In [4]:
from os import environ
environ['optimizer'] = 'Adam'
environ['num_workers']= '2'
environ['maxsize']= '10000'
environ['batch_size']= str(512)
environ['n_epochs']= '500'
environ['batch_norm']= 'True'
environ['loss_func']='mse'
environ['layers'] = '300 200 120 80 30'
environ['dropouts'] = '0.05 0.05 0.1 0.1 0.05'
environ['log'] = 'True'
environ['weight_decay'] = '0.00'
%run utils.ipynb
In [6]:
l = l.load(f"speedup_{optimizer}_batch_norm_{batch_norm}_{loss_func}_nlayers_{len(layers_sizes)}")
In [11]:
l.lr_find()
LR Finder is complete, type {learner_name}.recorder.plot() to see the graph.
In [12]:
l.recorder.plot()
In [77]:
lr = 1e-05
In [78]:
l.fit_one_cycle(500, lr)
Total time: 14:30
epoch
train_loss
valid_loss
1
20.607269
20.838434
2
20.572981
21.046347
3
20.524773
20.490526
4
20.505909
20.579035
5
20.655590
21.425268
6
20.648302
21.915030
7
20.586678
21.569431
8
20.568081
21.216703
9
20.645319
20.922182
10
20.574635
21.783293
11
20.596874
20.740532
12
20.551191
20.847963
13
20.596256
20.724800
14
20.666773
20.442526
15
20.573994
20.658924
16
20.671957
21.091988
17
20.621586
21.453350
18
20.656174
21.072149
19
20.545038
21.264757
20
20.575459
20.479015
21
20.612585
20.493549
22
20.539351
20.606779
23
20.564121
20.890619
24
20.528551
20.783043
25
20.559774
21.207277
26
20.632204
20.393938
27
20.639574
20.705160
28
20.646963
20.859894
29
20.552710
20.795172
30
20.582239
21.594843
31
20.594866
20.834402
32
20.697765
21.099491
33
20.652569
22.344519
34
20.618380
21.054741
35
20.557102
21.002459
36
20.696537
21.835106
37
20.626034
20.693962
38
20.563517
20.259537
39
20.568518
20.970797
40
20.654987
21.790991
41
20.674469
20.753553
42
20.728075
21.237421
43
20.598841
20.663191
44
20.539843
20.762337
45
20.508173
20.768824
46
20.558529
21.088552
47
20.588724
21.719040
48
20.622728
21.041071
49
20.596592
20.675835
50
20.635792
22.430376
51
20.590536
22.489637
52
20.527681
20.745251
53
20.598450
22.087662
54
20.523333
20.475044
55
20.626856
22.160774
56
20.612770
20.743677
57
20.575693
20.808659
58
20.484156
20.798901
59
20.544609
20.403509
60
20.484343
20.302809
61
20.529348
20.679399
62
20.507113
20.366316
63
20.567446
20.414413
64
20.619913
21.095001
65
20.561901
21.172934
66
20.569757
20.900305
67
20.688343
21.553343
68
20.572655
21.232622
69
20.599983
21.551390
70
20.549898
21.118109
71
20.618891
21.319365
72
20.566437
20.653809
73
20.735268
21.953854
74
20.711903
22.351116
75
20.667377
20.696796
76
20.655218
20.497982
77
20.556057
20.625341
78
20.582209
21.944653
79
20.569637
20.930765
80
20.527393
22.026384
81
20.596756
21.372671
82
20.545309
20.444218
83
20.490793
20.715952
84
20.556601
21.083141
85
20.593964
21.054325
86
20.553545
21.413429
87
20.593237
21.327913
88
20.584585
21.055294
89
20.618729
21.101822
90
20.632666
21.177870
91
20.656960
21.344622
92
20.669706
20.992977
93
20.683855
21.993916
94
20.598246
20.414518
95
20.721254
21.183012
96
20.610006
21.765135
97
20.615194
21.486853
98
20.523235
20.703354
99
20.521646
21.072803
100
20.639738
21.465292
101
20.654497
22.623938
102
20.698883
21.334509
103
20.564266
20.357147
104
20.541786
20.479210
105
20.510851
20.065987
106
20.529362
20.896204
107
20.637552
21.365253
108
20.690794
21.824327
109
20.719900
21.702887
110
20.607908
20.957062
111
20.554186
20.265390
112
20.559122
20.632854
113
20.650814
21.110462
114
20.668655
21.244087
115
20.640675
21.295397
116
20.659777
21.496460
117
20.584902
20.967642
118
20.617334
21.176598
119
20.656698
21.493027
120
20.516771
21.010300
121
20.598055
19.822821
122
20.622589
21.272488
123
20.704453
21.983259
124
20.627676
20.660385
125
20.670876
20.734215
126
20.670862
21.159206
127
20.626898
20.652794
128
20.645580
21.797438
129
20.591467
21.544443
130
20.621534
21.179888
131
20.640463
20.540905
132
20.679895
20.939751
133
20.623487
21.289764
134
20.576012
21.284367
135
20.521719
20.482613
136
20.548624
20.857098
137
20.568367
21.408447
138
20.603348
21.470140
139
20.591772
23.670437
140
20.580727
21.201271
141
20.560848
20.820139
142
20.625954
21.152451
143
20.625313
21.448095
144
20.586678
20.873539
145
20.596029
20.761850
146
20.605215
20.540462
147
20.639317
21.384226
148
20.561966
21.161978
149
20.652905
21.125637
150
20.596119
22.210815
151
20.594181
21.180601
152
20.578173
20.298925
153
20.613653
20.689610
154
20.587433
21.544329
155
20.669098
20.575083
156
20.635822
21.084221
157
20.657354
21.173323
158
20.618073
21.016474
159
20.547745
21.016079
160
20.590302
20.774078
161
20.572023
20.696463
162
20.603125
21.988413
163
20.570793
21.228306
164
20.574944
21.061075
165
20.560587
20.670279
166
20.547615
20.245943
167
20.567657
21.013418
168
20.563820
20.484203
169
20.655249
21.560583
170
20.554256
20.982363
171
20.585035
21.688641
172
20.594481
21.206144
173
20.492689
20.846071
174
20.558376
21.565050
175
20.541201
20.800011
176
20.614769
21.361645
177
20.549784
20.387743
178
20.519354
21.414488
179
20.476963
21.024582
180
20.565201
21.394466
181
20.622059
20.775200
182
20.664085
21.171022
183
20.667171
22.139368
184
20.621006
21.622063
185
20.616180
21.358952
186
20.666616
21.278389
187
20.620228
21.203371
188
20.555191
20.663719
189
20.615126
20.802279
190
20.601587
21.582958
191
20.552458
20.829403
192
20.545095
20.914989
193
20.514317
20.738642
194
20.556709
21.207209
195
20.542336
20.552679
196
20.478416
21.718130
197
20.494570
21.574339
198
20.564299
21.287565
199
20.519068
20.911009
200
20.491951
20.629429
201
20.569242
21.459984
202
20.613674
20.767336
203
20.536959
20.660881
204
20.609756
21.452549
205
20.596937
21.328585
206
20.587822
20.849018
207
20.549479
20.489069
208
20.553270
20.753893
209
20.547443
20.844301
210
20.514967
21.061138
211
20.553551
20.798832
212
20.545780
20.660559
213
20.532608
21.681669
214
20.484739
20.575909
215
20.551735
21.108181
216
20.488781
22.054947
217
20.484320
20.293480
218
20.488991
20.517233
219
20.555607
20.503254
220
20.513224
22.259174
221
20.525099
21.186075
222
20.516676
20.634571
223
20.448048
20.857418
224
20.456705
20.487246
225
20.542784
20.958275
226
20.622183
20.920097
227
20.567980
20.907732
228
20.569351
21.483484
229
20.589825
20.817820
230
20.502117
20.648817
231
20.508759
20.485054
232
20.504700
21.888409
233
20.550848
22.240910
234
20.524178
21.303186
235
20.527571
20.538563
236
20.530972
21.385149
237
20.499716
20.785219
238
20.560843
21.459974
239
20.614378
21.144815
240
20.651049
20.821918
241
20.586336
21.609409
242
20.566395
20.968782
243
20.625244
21.098911
244
20.591244
21.752848
245
20.521875
21.290493
246
20.432865
20.788084
247
20.595173
21.067244
248
20.652546
21.038731
249
20.631620
20.836647
250
20.609175
22.135216
251
20.726162
20.974831
252
20.614429
19.973419
253
20.592777
21.056654
254
20.531185
21.245359
255
20.598816
20.786482
256
20.533178
21.159231
257
20.581810
22.074635
258
20.547016
20.853115
259
20.474644
20.618874
260
20.496342
21.627234
261
20.443926
22.322187
262
20.507927
21.137804
263
20.562332
20.804411
264
20.570076
20.432232
265
20.634115
20.841770
266
20.508503
20.824272
267
20.540594
21.817913
268
20.561024
22.102795
269
20.526646
20.933123
270
20.524420
21.558962
271
20.488874
20.196037
272
20.543896
21.464846
273
20.578465
20.828424
274
20.514183
21.656050
275
20.561806
21.283739
276
20.704594
20.621649
277
20.613092
20.502398
278
20.592310
21.441301
279
20.566401
20.616922
280
20.523397
20.146410
281
20.534157
20.710827
282
20.576832
20.644699
283
20.496584
21.022102
284
20.488987
20.488802
285
20.507149
20.756081
286
20.471554
20.555368
287
20.619667
20.885145
288
20.522015
20.427654
289
20.509638
20.633211
290
20.480055
20.140516
291
20.499559
20.910147
292
20.483589
20.577147
293
20.442804
20.739645
294
20.509590
21.981775
295
20.533411
21.300303
296
20.473837
20.352194
297
20.510353
20.955696
298
20.486599
21.834835
299
20.612247
21.461443
300
20.542898
20.705400
301
20.529541
21.381552
302
20.459169
20.719250
303
20.523739
20.898382
304
20.512886
20.540319
305
20.520706
20.574024
306
20.528303
20.770388
307
20.481323
20.741982
308
20.487762
20.742624
309
20.523890
21.119165
310
20.476629
20.881372
311
20.538088
20.807213
312
20.522789
20.636974
313
20.523056
20.676819
314
20.419144
20.609339
315
20.473732
20.732458
316
20.508406
20.690987
317
20.520863
20.711363
318
20.551344
21.413570
319
20.536905
20.979322
320
20.537853
20.695499
321
20.449329
20.522995
322
20.518435
20.620306
323
20.414707
21.040291
324
20.478148
21.065767
325
20.493593
21.348661
326
20.441633
20.274168
327
20.511864
20.698812
328
20.523409
20.470928
329
20.620409
21.097342
330
20.574564
20.503950
331
20.507385
21.808704
332
20.511387
21.308426
333
20.511755
20.754969
334
20.505112
20.499279
335
20.485382
20.499569
336
20.495184
20.979742
337
20.538607
21.544779
338
20.611595
22.587662
339
20.578985
20.585941
340
20.499022
20.555531
341
20.491795
20.452644
342
20.522884
20.871740
343
20.501022
22.040995
344
20.552366
20.618763
345
20.615292
23.628202
346
20.550066
20.631115
347
20.488691
20.328398
348
20.454550
19.985676
349
20.549919
21.593689
350
20.601273
21.273312
351
20.567694
21.032532
352
20.578087
22.482668
353
20.523138
21.388563
354
20.449772
20.960531
355
20.424742
21.045780
356
20.463720
21.359241
357
20.459076
21.150404
358
20.619961
21.720943
359
20.544230
21.691837
360
20.537447
20.786932
361
20.492418
20.591845
362
20.533516
20.805410
363
20.526325
20.865335
364
20.529188
21.681438
365
20.468151
21.112238
366
20.533190
20.875454
367
20.542471
21.105318
368
20.536692
21.374168
369
20.521191
21.003809
370
20.510376
21.180471
371
20.465904
20.891272
372
20.554163
21.854069
373
20.539417
20.599249
374
20.567215
20.563559
375
20.453228
20.370560
376
20.437983
21.073029
377
20.415255
20.168070
378
20.491062
20.713860
379
20.478802
19.808945
380
20.434748
20.461617
381
20.447197
20.501854
382
20.485693
20.300303
383
20.590534
21.036024
384
20.583567
21.227268
385
20.466801
22.474966
386
20.551924
20.933502
387
20.550797
20.356073
388
20.504517
21.305183
389
20.497095
20.969870
390
20.584656
22.178967
391
20.589289
21.315163
392
20.544561
20.239464
393
20.541731
21.668407
394
20.500305
21.257271
395
20.501150
21.168016
396
20.528376
20.559822
397
20.525129
20.881901
398
20.526522
20.547657
399
20.441919
20.864910
400
20.493013
20.946150
401
20.499523
20.616106
402
20.468393
20.866760
403
20.511173
21.356649
404
20.513014
20.672491
405
20.449314
20.185658
406
20.484995
20.631012
407
20.547701
21.116159
408
20.516890
20.307775
409
20.571693
20.766439
410
20.478575
20.914413
411
20.550665
22.419510
412
20.561626
21.096813
413
20.628773
20.528494
414
20.529884
20.762648
415
20.548023
21.346928
416
20.512774
21.376934
417
20.496325
21.151041
418
20.551649
20.421577
419
20.567078
20.483103
420
20.475363
20.180418
421
20.503384
20.744904
422
20.536526
21.071440
423
20.461073
20.320944
424
20.527000
21.719448
425
20.492056
20.704920
426
20.442841
21.194595
427
20.473093
21.239716
428
20.480682
21.484175
429
20.527878
21.041607
430
20.504305
21.463398
431
20.524313
20.608906
432
20.670282
21.559641
433
20.553219
20.975891
434
20.486238
20.891129
435
20.530012
22.012146
436
20.501539
20.863115
437
20.580256
21.717224
438
20.492067
20.767260
439
20.500467
21.078976
440
20.549334
20.800041
441
20.527761
21.530228
442
20.518375
21.328007
443
20.460171
20.921850
444
20.524340
20.010700
445
20.506981
20.997822
446
20.508821
20.403872
447
20.493961
20.885704
448
20.562300
21.716665
449
20.470722
21.101768
450
20.451139
20.297087
451
20.451588
20.728174
452
20.448027
20.172487
453
20.459747
21.496222
454
20.450733
20.736216
455
20.441868
21.223970
456
20.397539
20.307388
457
20.416033
20.499640
458
20.478661
21.220243
459
20.505924
21.317669
460
20.471926
20.926838
461
20.512255
21.447382
462
20.475349
20.857805
463
20.450018
20.895466
464
20.502691
20.812387
465
20.392063
20.632687
466
20.487408
20.375412
467
20.436972
20.485043
468
20.510950
21.065561
469
20.456909
20.490477
470
20.565201
21.422085
471
20.543552
20.480860
472
20.549644
20.967941
473
20.500441
21.166311
474
20.461721
21.515579
475
20.371813
20.635532
476
20.422823
21.819668
477
20.476374
20.854437
478
20.577082
21.169706
479
20.551367
19.911783
480
20.465246
21.289417
481
20.560362
22.181309
482
20.497925
21.183908
483
20.458633
21.300220
484
20.521997
20.662741
485
20.504639
20.729963
486
20.603355
20.776943
487
20.541727
21.181351
488
20.591154
21.215719
489
20.529676
20.899109
490
20.507282
20.770788
491
20.448151
20.250435
492
20.436842
20.225109
493
20.470255
20.322369
494
20.464331
21.538782
495
20.489517
21.428734
496
20.442972
20.109222
497
20.490007
20.180887
498
20.515217
21.044651
499
20.516928
22.093464
500
20.443981
20.795254
In [79]:
l.recorder.plot_losses()
In [86]:
l.save(f"speedup_{optimizer}_batch_norm_{batch_norm}_{loss_func}_nlayers_{len(layers_sizes)}")
In [7]:
val_df = pd.DataFrame()
train_df = pd.DataFrame()
preds, targets = l.get_preds(fai.basic_data.DatasetType.Valid)
preds = preds.reshape((-1,)).numpy()
targets = targets.reshape((-1,)).numpy()
val_df['prediction'] = preds
val_df['target'] = targets
val_df['abs_diff'] = np.abs(preds - targets)
val_df['APE'] = np.abs(val_df.target - val_df.prediction)/val_df.target * 100
preds, targets = l.get_preds(fai.basic_data.DatasetType.Train)
preds = preds.reshape((-1,)).numpy()
targets = targets.reshape((-1,)).numpy()
train_df['prediction'] = preds
train_df['target'] = targets
train_df['abs_diff'] = np.abs(preds - targets)
train_df['APE'] = np.abs(train_df.target - train_df.prediction)/train_df.target * 100
In [8]:
val_df.describe() #MAPE loss
Out[8]:
prediction
target
abs_diff
APE
count
1000.000000
1000.000000
1000.000000
1000.000000
mean
1.583753
2.030625
0.761118
39.932007
std
1.351380
1.775879
0.890533
32.495262
min
0.048963
0.038235
0.000405
0.203580
25%
0.574952
0.684233
0.156295
16.902315
50%
1.131436
1.518185
0.448943
33.866941
75%
2.293313
2.785208
1.084316
57.161997
max
6.302624
9.649230
5.606473
384.061310
In [10]:
joint_plot(train_df, f"Training dataset, {loss_func} loss")
In [11]:
x, y = get_schedule_data(val_dl, (0, 0, 0))
In [ ]:
In [8]:
rand_prog = 'function' + str(np.random.randint(0, 400))
joint_plot_one_program(val_dl, 'function46', l.model)
In [ ]:
Content source: rbaghdadi/COLi
Similar notebooks: