In [1]:
from os import environ
environ['optimizer'] = 'Adam'
environ['num_workers']= '2'
environ['batch_size']= str(2048)
environ['n_epochs']= '1000'
environ['batch_norm']= 'True'
environ['loss_func']='MSE'
environ['layers'] = '500 300 120 80 30'
environ['dropouts'] = '0.2'+' 0.3'*4
environ['log'] = 'False'
environ['weight_decay'] = '0.01'
environ['cuda_device'] ='cuda:0'
environ['dataset'] = 'data/speedup_dataset3.pkl'
%run utils.ipynb
In [2]:
train_dl, val_dl, test_dl = train_dev_split(dataset, batch_size, num_workers, log=log)
db = fai.basic_data.DataBunch(train_dl, val_dl, test_dl, device=device)
In [3]:
input_size = train_dl.dataset.X.shape[1]
output_size = train_dl.dataset.Y.shape[1]
model = None
if batch_norm:
model = Model_BN(input_size, output_size, hidden_sizes=layers_sizes, drops=drops)
else:
model = Model(input_size, output_size)
if loss_func == 'MSE':
criterion = nn.MSELoss()
else:
criterion = mape_criterion
l = fai.Learner(db, model, loss_func=criterion, metrics=[mape_criterion, rmse_criterion],
callback_fns=[partial(EarlyStoppingCallback, mode='min',
monitor='mape_criterion', min_delta=0.1, patience=500)])
if optimizer == 'SGD':
l.opt_func = optim.SGD
In [4]:
l = l.load(f"training_tuning_l2")
In [50]:
l.lr_find()
LR Finder is complete, type {learner_name}.recorder.plot() to see the graph.
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-50-d3b7136227cf> in <module>
----> 1 l.lr_find()
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/train.py in lr_find(learn, start_lr, end_lr, num_it, stop_div, **kwargs)
29 cb = LRFinder(learn, start_lr, end_lr, num_it, stop_div)
30 a = int(np.ceil(num_it/len(learn.data.train_dl)))
---> 31 learn.fit(a, start_lr, callbacks=[cb], **kwargs)
32
33 def to_fp16(learn:Learner, loss_scale:float=512., flat_master:bool=False)->Learner:
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/basic_train.py in fit(self, epochs, lr, wd, callbacks)
164 callbacks = [cb(self) for cb in self.callback_fns] + listify(callbacks)
165 fit(epochs, self.model, self.loss_func, opt=self.opt, data=self.data, metrics=self.metrics,
--> 166 callbacks=self.callbacks+callbacks)
167
168 def create_opt(self, lr:Floats, wd:Floats=0.)->None:
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/basic_train.py in fit(epochs, model, loss_func, opt, data, callbacks, metrics)
92 except Exception as e:
93 exception = e
---> 94 raise e
95 finally: cb_handler.on_train_end(exception)
96
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/basic_train.py in fit(epochs, model, loss_func, opt, data, callbacks, metrics)
89 cb_handler=cb_handler, pbar=pbar)
90 else: val_loss=None
---> 91 if cb_handler.on_epoch_end(val_loss): break
92 except Exception as e:
93 exception = e
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/callback.py in on_epoch_end(self, val_loss)
251 met.on_epoch_end(**self.state_dict)
252 self.state_dict['last_metrics'].append(met.metric)
--> 253 return np.any(self('epoch_end', False))
254
255 def on_train_end(self, exception:Union[bool,Exception])->None:
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/callback.py in __call__(self, cb_name, call_mets, **kwargs)
186 "Call through to all of the `CallbakHandler` functions."
187 if call_mets: [getattr(met, f'on_{cb_name}')(**self.state_dict, **kwargs) for met in self.metrics]
--> 188 return [getattr(cb, f'on_{cb_name}')(**self.state_dict, **kwargs) for cb in self.callbacks]
189
190 def on_train_begin(self, epochs:int, pbar:PBar, metrics:MetricFuncList)->None:
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/callback.py in <listcomp>(.0)
186 "Call through to all of the `CallbakHandler` functions."
187 if call_mets: [getattr(met, f'on_{cb_name}')(**self.state_dict, **kwargs) for met in self.metrics]
--> 188 return [getattr(cb, f'on_{cb_name}')(**self.state_dict, **kwargs) for cb in self.callbacks]
189
190 def on_train_begin(self, epochs:int, pbar:PBar, metrics:MetricFuncList)->None:
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/callbacks/tracker.py in on_epoch_end(self, epoch, **kwargs)
69 def on_epoch_end(self, epoch, **kwargs:Any)->None:
70 "Compare the value monitored to its best score and maybe stop training."
---> 71 current = self.get_monitor_value()
72 if current is None: return
73 if self.operator(current - self.min_delta, self.best):
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/fastai/callbacks/tracker.py in get_monitor_value(self)
45 "Pick the monitored value."
46 values = {'trn_loss':self.learn.recorder.losses[-1:][0].cpu().numpy(),
---> 47 'val_loss':self.learn.recorder.val_losses[-1:][0]}
48 for i, name in enumerate(self.learn.recorder.names[3:]):
49 values[name]=self.learn.recorder.metrics[-1:][0][i]
IndexError: list index out of range
In [ ]:
l.recorder.plot()
In [8]:
losses = []
for lr in [0.1, 0.01, 0.001, 0.0001]:
l.fit_one_cycle(150, lr)
losses.append(l.recorder.losses)
model = Model_BN(input_size, output_size, hidden_sizes=layers_sizes, drops=drops)
l = fai.Learner(db, model, loss_func=criterion, metrics=[mape_criterion, rmse_criterion],
callback_fns=[partial(EarlyStoppingCallback, mode='min',
monitor='mape_criterion', min_delta=0.1, patience=500)])
Total time: 11:00
epoch
train_loss
valid_loss
mape_criterion
rmse_criterion
1
74.129715
71.761269
71.761269
0.825816
2
68.933952
66.092979
66.092979
0.657644
3
67.739174
65.797287
65.797287
0.597088
4
66.937553
65.451782
65.451782
0.670642
5
66.673203
64.836830
64.836830
0.680429
6
66.160767
63.825073
63.825073
0.706331
7
66.129143
64.092758
64.092758
0.628661
8
66.332947
65.573387
65.573387
0.707224
9
66.217003
64.257729
64.257729
0.699780
10
66.006088
69.592033
69.592033
0.634401
11
66.343262
63.578262
63.578262
0.642157
12
67.145119
66.229759
66.229759
0.713845
13
67.268608
64.336151
64.336151
0.671809
14
67.174400
67.036545
67.036545
0.731992
15
67.200226
64.785233
64.785233
0.704845
16
68.346596
66.878754
66.878754
0.821418
17
67.122070
67.986122
67.986122
0.666342
18
68.910095
65.193069
65.193069
0.700007
19
68.816078
70.836937
70.836937
0.696350
20
67.706818
68.085724
68.085724
0.672909
21
68.840645
69.028854
69.028854
0.688403
22
67.762421
66.083412
66.083412
0.644511
23
70.657562
68.099709
68.099709
0.651158
24
69.697723
67.086617
67.086617
0.710175
25
69.817863
71.845703
71.845703
0.748325
26
69.794876
69.093674
69.093674
0.655088
27
71.311768
72.924644
72.924644
0.693576
28
70.915810
72.234322
72.234322
0.803332
29
71.400322
73.237076
73.237076
0.808454
30
69.975929
75.539650
75.539650
0.783104
31
71.563622
73.857590
73.857590
0.659705
32
72.510216
77.062386
77.062386
0.692708
33
72.415894
74.086853
74.086853
0.652929
34
72.362625
71.247185
71.247185
0.664693
35
72.502907
71.032478
71.032478
0.789165
36
73.389008
69.636665
69.636665
0.680104
37
72.455383
72.434830
72.434830
0.758714
38
72.722412
72.480316
72.480316
0.747389
39
71.176201
68.507263
68.507263
0.654852
40
71.235947
70.465950
70.465950
0.723883
41
72.196144
73.872337
73.872337
0.718700
42
72.142914
70.784225
70.784225
0.685358
43
72.999054
72.013359
72.013359
0.690339
44
73.948616
72.941620
72.941620
0.689372
45
73.482056
72.211441
72.211441
0.772457
46
73.064423
70.979973
70.979973
0.699112
47
72.591621
70.217957
70.217957
0.710844
48
72.785439
74.808258
74.808258
0.729649
49
74.066956
77.069191
77.069191
0.734764
50
73.256233
71.864311
71.864311
0.644130
51
74.074142
73.579079
73.579079
0.752155
52
76.401253
81.016953
81.016953
0.724941
53
76.036263
81.622719
81.622719
0.738909
54
74.965492
74.959213
74.959213
0.709766
55
74.819611
76.628059
76.628059
0.791778
56
74.536346
77.826889
77.826889
0.811882
57
74.662552
74.984772
74.984772
0.842771
58
74.407700
76.337891
76.337891
0.832524
59
73.645775
75.729851
75.729851
0.812563
60
75.141830
77.183067
77.183067
0.825451
61
74.977753
75.960434
75.960434
0.820102
62
74.856491
73.824715
73.824715
0.817159
63
74.700943
79.077660
79.077660
0.822883
64
76.089417
78.738098
78.738098
0.819268
65
74.088882
74.433151
74.433151
0.813673
66
73.593269
74.074409
74.074409
0.854098
67
73.399849
77.375237
77.375237
0.852565
68
73.611946
76.555687
76.555687
0.852443
69
73.319237
73.745735
73.745735
0.787380
70
73.424385
75.305489
75.305489
0.818963
71
73.626518
79.418976
79.418976
0.857298
72
72.915298
76.874268
76.874268
0.834206
73
74.383751
76.659439
76.659439
0.819737
74
74.071983
78.265602
78.265602
0.758827
75
75.245163
72.456131
72.456131
0.761241
76
74.031746
73.339241
73.339241
0.718921
77
73.824066
73.297272
73.297272
0.794677
78
74.627045
75.299255
75.299255
0.725684
79
74.717560
70.819923
70.819923
0.739350
80
74.642570
74.135727
74.135727
0.807424
81
74.388977
74.953590
74.953590
0.684614
82
78.727539
78.493263
78.493263
0.856149
83
78.442131
78.521935
78.521935
0.855678
84
78.639442
78.538666
78.538666
0.859967
85
78.597298
78.784874
78.784874
0.850855
86
78.536140
78.797043
78.797043
0.852530
87
78.551147
78.549149
78.549149
0.853203
88
78.583923
78.411072
78.411072
0.855879
89
78.531494
78.423248
78.423248
0.857283
90
78.404587
78.403641
78.403641
0.853307
91
78.411011
79.112740
79.112740
0.853073
92
78.395828
78.559586
78.559586
0.854967
93
78.787781
78.892471
78.892471
0.853446
94
78.462418
79.039558
79.039558
0.851005
95
78.519569
78.442078
78.442078
0.857544
96
78.326965
78.413391
78.413391
0.857022
97
78.470139
79.075203
79.075203
0.851642
98
78.411896
79.615471
79.615471
0.849960
99
78.507568
78.548378
78.548378
0.854924
100
78.355896
78.579628
78.579628
0.855073
101
78.511559
78.423088
78.423088
0.856146
102
78.440552
78.433975
78.433975
0.853827
103
78.495506
78.810844
78.810844
0.853841
104
78.499657
78.733627
78.733627
0.860753
105
78.309891
78.681374
78.681374
0.853919
106
78.258873
78.555237
78.555237
0.856083
107
78.320518
78.388435
78.388435
0.857736
108
78.413795
79.316391
79.316391
0.850916
109
78.415855
78.379677
78.379677
0.853266
110
78.417801
78.464928
78.464928
0.860092
111
78.435013
78.404610
78.404610
0.855833
112
78.272629
78.416153
78.416153
0.857879
113
78.193214
78.453209
78.453209
0.857490
114
78.237518
78.593246
78.593246
0.855784
115
78.536293
78.374283
78.374283
0.858883
116
78.305168
78.377213
78.377213
0.859151
117
78.294861
78.400742
78.400742
0.857875
118
78.238914
78.767151
78.767151
0.852539
119
78.217880
78.519684
78.519684
0.854697
120
78.282547
78.668594
78.668594
0.852813
121
78.409180
78.425049
78.425049
0.860467
122
78.345848
78.517746
78.517746
0.854359
123
78.389793
78.376785
78.376785
0.858833
124
78.233421
78.446556
78.446556
0.857254
125
78.376358
78.487511
78.487511
0.851246
126
78.299042
78.591721
78.591721
0.855116
127
78.207848
78.454826
78.454826
0.851336
128
78.297592
78.373985
78.373985
0.857720
129
78.340004
78.481277
78.481277
0.853103
130
78.205864
78.655853
78.655853
0.855738
131
78.336891
78.536736
78.536736
0.854893
132
78.178078
78.476257
78.476257
0.853668
133
78.244591
78.432190
78.432190
0.852746
134
78.273300
78.503998
78.503998
0.850656
135
78.233223
78.377678
78.377678
0.858299
136
78.332390
78.437119
78.437119
0.856776
137
78.145714
78.412880
78.412880
0.855999
138
78.239769
78.435616
78.435616
0.856686
139
78.193352
78.386856
78.386856
0.855417
140
78.147728
78.472275
78.472275
0.854785
141
78.165154
78.492386
78.492386
0.854922
142
78.181511
78.402641
78.402641
0.855791
143
78.308853
78.374084
78.374084
0.856133
144
78.199387
78.509834
78.509834
0.855466
145
78.326927
78.408005
78.408005
0.854711
146
78.262840
78.391975
78.391975
0.854955
147
78.292389
78.409248
78.409248
0.857250
148
78.227715
78.388863
78.388863
0.857053
149
78.227036
78.411560
78.411560
0.856556
150
78.185745
78.407059
78.407059
0.857438
Total time: 11:07
epoch
train_loss
valid_loss
mape_criterion
rmse_criterion
1
78.732628
74.668945
74.668945
0.785543
2
73.747482
68.938980
68.938980
0.721840
3
71.185043
68.270363
68.270363
0.703981
4
69.435204
65.281067
65.281067
0.608692
5
68.609169
65.847290
65.847290
0.646976
6
67.569168
65.308105
65.308105
0.612554
7
66.567024
63.464306
63.464306
0.650067
8
65.510147
62.813137
62.813137
0.661986
9
64.402550
62.850044
62.850044
0.629169
10
63.164013
60.025162
60.025162
0.655814
11
61.546154
61.835964
61.835964
0.625811
12
60.582607
58.781162
58.781162
0.645955
13
61.169193
57.088413
57.088413
0.643853
14
62.030334
60.807083
60.807083
0.613845
15
63.967625
57.567207
57.567207
0.694838
16
61.363831
52.985062
52.985062
0.648376
17
60.890251
52.525162
52.525162
0.606945
18
61.899330
60.233280
60.233280
0.664386
19
60.862564
53.731525
53.731525
0.666449
20
59.562538
58.466980
58.466980
0.683956
21
59.740726
56.178738
56.178738
0.616167
22
57.092148
47.590775
47.590775
0.624068
23
58.384018
64.879997
64.879997
0.681681
24
57.243408
49.976139
49.976139
0.621890
25
58.117142
61.551636
61.551636
0.650586
26
57.018944
48.111073
48.111073
0.596477
27
62.491119
57.961651
57.961651
0.688582
28
64.754066
60.371944
60.371944
0.682894
29
62.833466
58.915562
58.915562
0.655445
30
62.589619
53.972401
53.972401
0.624377
31
59.367210
52.063839
52.063839
0.647980
32
57.083237
48.601208
48.601208
0.652080
33
66.234543
63.070705
63.070705
0.717712
34
63.232040
61.201355
61.201355
0.702635
35
62.267563
57.592949
57.592949
0.645652
36
62.931698
67.166672
67.166672
0.702517
37
60.590302
71.380272
71.380272
0.805807
38
65.119072
64.449921
64.449921
0.702632
39
63.563034
58.568382
58.568382
0.680899
40
63.462177
61.155239
61.155239
0.718747
41
58.837063
51.101482
51.101482
0.637929
42
54.683235
50.175987
50.175987
0.667028
43
56.414803
51.769821
51.769821
0.646628
44
53.946941
46.909462
46.909462
0.608443
45
61.105927
57.574013
57.574013
0.669155
46
55.427574
56.318493
56.318493
0.677525
47
50.699768
50.604156
50.604156
0.642280
48
51.813480
52.459900
52.459900
0.646431
49
52.309410
49.167145
49.167145
0.646949
50
53.291355
65.416290
65.416290
0.695952
51
48.798679
44.241829
44.241829
0.611579
52
50.617500
63.929714
63.929714
0.756198
53
47.428818
78.805359
78.805359
0.743892
54
48.187931
49.821575
49.821575
0.671463
55
45.562405
43.757763
43.757763
0.622630
56
44.480625
43.743591
43.743591
0.613425
57
46.108173
47.511238
47.511238
0.623424
58
46.207737
52.761513
52.761513
0.628917
59
43.905773
60.726681
60.726681
0.660598
60
45.405361
43.641628
43.641628
0.590209
61
43.984154
52.489052
52.489052
0.602211
62
43.029140
54.792057
54.792057
0.632085
63
44.158443
61.837276
61.837276
0.633418
64
41.843124
68.399796
68.399796
0.628674
65
41.786255
43.019382
43.019382
0.622939
66
43.398582
65.793434
65.793434
0.619392
67
42.445000
49.885876
49.885876
0.593592
68
42.868233
64.547035
64.547035
0.636485
69
43.551796
64.913139
64.913139
0.658786
70
42.275230
72.450157
72.450157
0.656963
71
41.411095
73.462234
73.462234
0.654922
72
42.188965
67.559349
67.559349
0.639716
73
40.166672
55.362099
55.362099
0.627319
74
42.981018
64.515465
64.515465
0.641631
75
41.142193
78.160004
78.160004
0.664724
76
40.424644
73.190002
73.190002
0.622386
77
40.063576
63.964336
63.964336
0.648157
78
39.680779
80.358055
80.358055
0.662790
79
40.393070
59.967945
59.967945
0.653837
80
39.584057
77.216522
77.216522
0.653686
81
38.861160
63.815826
63.815826
0.616303
82
39.819843
75.106224
75.106224
0.671650
83
39.699238
77.171844
77.171844
0.647881
84
38.296993
63.535419
63.535419
0.603627
85
39.015644
69.596069
69.596069
0.652975
86
38.624657
54.288738
54.288738
0.624049
87
38.352276
76.048927
76.048927
0.626539
88
38.889801
73.226891
73.226891
0.629765
89
37.918171
70.745316
70.745316
0.604146
90
37.873905
69.276520
69.276520
0.625958
91
38.180782
55.073242
55.073242
0.634862
92
38.154835
74.722275
74.722275
0.631077
93
37.953510
44.795242
44.795242
0.606503
94
37.632153
57.260349
57.260349
0.615446
95
37.014500
73.629646
73.629646
0.651635
96
37.592705
69.024193
69.024193
0.625550
97
37.200840
57.891762
57.891762
0.636061
98
36.631100
63.772987
63.772987
0.603147
99
36.895737
72.660744
72.660744
0.633301
100
36.202988
64.187355
64.187355
0.591181
101
36.862778
71.949341
71.949341
0.610153
102
36.820114
76.471329
76.471329
0.658686
103
36.289097
68.868835
68.868835
0.589975
104
36.516151
75.332283
75.332283
0.635557
105
35.759624
67.953300
67.953300
0.606831
106
36.430527
69.688339
69.688339
0.624841
107
35.832668
72.677788
72.677788
0.609115
108
35.860928
78.682610
78.682610
0.646734
109
35.685719
70.320610
70.320610
0.609770
110
35.238361
71.917847
71.917847
0.607114
111
35.904701
63.782650
63.782650
0.607810
112
35.469753
65.348091
65.348091
0.579961
113
35.374088
64.205116
64.205116
0.639028
114
35.032883
73.137032
73.137032
0.596896
115
35.321846
76.959663
76.959663
0.624514
116
34.776932
74.913078
74.913078
0.587557
117
34.901749
73.856300
73.856300
0.613485
118
34.863369
72.247009
72.247009
0.601366
119
34.663025
72.316010
72.316010
0.597283
120
34.450878
67.266365
67.266365
0.598786
121
35.501129
74.811760
74.811760
0.648322
122
34.504772
69.513763
69.513763
0.607324
123
34.361736
70.155571
70.155571
0.607903
124
34.171963
67.483101
67.483101
0.588378
125
34.246178
73.108315
73.108315
0.600364
126
34.122986
67.353157
67.353157
0.593595
127
34.256908
66.446274
66.446274
0.601210
128
34.126236
72.301697
72.301697
0.592007
129
33.978279
60.732327
60.732327
0.583087
130
33.897968
73.817863
73.817863
0.591618
131
33.845924
64.454353
64.454353
0.581012
132
33.684277
68.975616
68.975616
0.582405
133
33.574047
71.281685
71.281685
0.588718
134
33.630005
67.313301
67.313301
0.578512
135
33.572548
73.514473
73.514473
0.593368
136
33.478924
66.276634
66.276634
0.581045
137
33.514908
70.676483
70.676483
0.586630
138
33.406063
71.176765
71.176765
0.590882
139
33.465443
66.092010
66.092010
0.577613
140
33.542320
67.739853
67.739853
0.584200
141
33.385281
62.270126
62.270126
0.571992
142
33.461391
68.872406
68.872406
0.581944
143
33.361401
72.398605
72.398605
0.577764
144
33.276554
74.100883
74.100883
0.589396
145
33.166237
65.905396
65.905396
0.572856
146
33.318470
59.669342
59.669342
0.571949
147
33.280960
63.296158
63.296158
0.572205
148
33.395512
69.609566
69.609566
0.581048
149
33.290821
65.708214
65.708214
0.574521
150
33.330116
71.221626
71.221626
0.586031
Total time: 10:59
epoch
train_loss
valid_loss
mape_criterion
rmse_criterion
1
85.475044
78.203087
78.203087
0.859930
2
81.747810
77.755051
77.755051
0.858159
3
78.602066
75.115341
75.115341
0.829927
4
76.386917
72.833954
72.833954
0.759531
5
74.345993
71.072212
71.072212
0.714779
6
72.541962
69.128082
69.128082
0.704221
7
71.019814
67.677841
67.677841
0.626232
8
69.782219
67.188934
67.188934
0.633810
9
68.920784
66.456329
66.456329
0.655580
10
68.174614
65.793755
65.793755
0.617052
11
67.548897
65.368546
65.368546
0.630968
12
67.287498
64.878716
64.878716
0.663209
13
66.562340
64.672409
64.672409
0.682549
14
65.662926
63.418186
63.418186
0.632498
15
65.391876
64.261871
64.261871
0.606654
16
64.589706
62.414001
62.414001
0.608613
17
64.012390
62.356430
62.356430
0.649411
18
63.801193
62.058044
62.058044
0.641426
19
63.137047
61.502968
61.502968
0.634176
20
62.002483
60.284718
60.284718
0.638478
21
60.706017
58.843018
58.843018
0.621727
22
60.656208
58.225449
58.225449
0.653394
23
58.532963
52.950367
52.950367
0.628924
24
57.139252
53.731750
53.731750
0.655412
25
55.522659
49.086998
49.086998
0.619860
26
53.403915
47.138142
47.138142
0.535584
27
53.888821
52.043804
52.043804
0.627902
28
52.969650
47.959095
47.959095
0.616618
29
52.585205
50.603970
50.603970
0.553384
30
51.597172
43.038990
43.038990
0.588636
31
52.419144
44.523857
44.523857
0.600000
32
52.473045
46.245575
46.245575
0.589745
33
50.520626
46.556137
46.556137
0.520816
34
54.103638
44.586208
44.586208
0.604090
35
53.374313
42.790905
42.790905
0.620238
36
53.518959
46.235481
46.235481
0.584068
37
52.941734
41.723610
41.723610
0.564978
38
51.591434
40.397392
40.397392
0.586123
39
52.631649
45.837700
45.837700
0.554231
40
51.650734
40.630238
40.630238
0.576929
41
51.123463
41.253929
41.253929
0.610821
42
49.559628
38.892258
38.892258
0.547783
43
49.987446
39.638081
39.638081
0.556180
44
51.957291
41.561321
41.561321
0.577880
45
50.287056
38.800774
38.800774
0.573105
46
46.705025
37.737637
37.737637
0.554491
47
50.817928
40.752224
40.752224
0.601017
48
46.433041
35.325829
35.325829
0.511699
49
49.065750
37.130081
37.130081
0.554205
50
43.097847
34.204632
34.204632
0.582000
51
44.763378
35.941612
35.941612
0.580202
52
43.963249
34.424335
34.424335
0.537173
53
43.447189
35.151989
35.151989
0.528311
54
41.212849
32.856903
32.856903
0.546326
55
42.688400
36.452202
36.452202
0.567680
56
42.223446
32.734470
32.734470
0.574328
57
44.098869
36.805801
36.805801
0.543106
58
45.935974
36.282848
36.282848
0.532496
59
41.606300
33.394043
33.394043
0.554785
60
41.467300
34.557503
34.557503
0.580652
61
38.658943
31.723318
31.723318
0.513211
62
40.527702
31.799524
31.799524
0.563781
63
37.938885
30.793556
30.793556
0.527568
64
40.110336
33.799187
33.799187
0.558012
65
38.672150
30.170013
30.170013
0.536787
66
37.997421
29.028013
29.028013
0.541615
67
37.817482
30.069687
30.069687
0.535288
68
36.617016
31.217562
31.217562
0.548534
69
36.751808
31.099934
31.099934
0.578499
70
37.164371
31.521587
31.521587
0.588367
71
36.587505
29.043716
29.043716
0.534084
72
36.168457
30.509399
30.509399
0.560732
73
35.562286
29.154512
29.154512
0.512926
74
35.355759
29.284397
29.284397
0.536071
75
35.106312
29.775991
29.775991
0.541446
76
35.042198
29.187363
29.187363
0.537959
77
34.990150
32.162983
32.162983
0.539530
78
35.090260
27.521324
27.521324
0.543017
79
34.748959
27.574785
27.574785
0.492185
80
34.556477
28.412521
28.412521
0.519718
81
34.554604
27.712994
27.712994
0.498494
82
34.334408
28.372484
28.372484
0.526381
83
34.319736
27.381685
27.381685
0.536995
84
34.099403
26.837904
26.837904
0.531676
85
33.948132
27.818815
27.818815
0.533672
86
33.894028
27.460812
27.460812
0.521685
87
33.537445
27.864359
27.864359
0.523073
88
33.334839
27.348703
27.348703
0.545280
89
33.447475
26.826509
26.826509
0.530299
90
33.426270
27.115484
27.115484
0.567318
91
33.471184
27.295900
27.295900
0.523386
92
33.310020
26.252703
26.252703
0.529207
93
33.083374
27.138580
27.138580
0.517641
94
33.021515
26.959299
26.959299
0.504953
95
32.779564
26.907356
26.907356
0.528122
96
32.727512
28.410074
28.410074
0.511741
97
32.556038
26.870491
26.870491
0.525127
98
32.345760
26.467037
26.467037
0.523230
99
32.450108
27.500704
27.500704
0.530824
100
32.469482
26.941307
26.941307
0.515468
101
32.047062
26.063229
26.063229
0.529688
102
32.069141
26.472263
26.472263
0.518157
103
31.941046
26.494347
26.494347
0.534942
104
31.876970
27.018087
27.018087
0.528553
105
31.860830
26.815540
26.815540
0.510791
106
31.740248
27.094515
27.094515
0.526115
107
31.747698
25.658550
25.658550
0.521511
108
31.770206
26.277452
26.277452
0.512824
109
31.516027
26.745996
26.745996
0.525309
110
31.420444
25.662788
25.662788
0.529923
111
31.384809
26.066662
26.066662
0.531701
112
31.316532
27.042747
27.042747
0.517077
113
31.191298
26.428116
26.428116
0.533218
114
31.053524
26.158062
26.158062
0.515454
115
31.061594
26.183420
26.183420
0.526295
116
31.537945
26.368280
26.368280
0.529399
117
31.261202
26.125250
26.125250
0.510107
118
30.977947
26.082346
26.082346
0.525533
119
30.793871
26.062609
26.062609
0.526088
120
30.874659
27.891441
27.891441
0.532643
121
30.771917
25.515467
25.515467
0.490475
122
30.684080
25.958887
25.958887
0.518528
123
30.529436
25.916267
25.916267
0.498556
124
30.666172
26.090237
26.090237
0.519990
125
30.546158
25.692478
25.692478
0.536051
126
30.431118
25.738363
25.738363
0.512810
127
30.394775
26.003738
26.003738
0.517751
128
30.333904
25.796827
25.796827
0.512442
129
30.272209
25.531420
25.531420
0.497901
130
30.273552
25.504284
25.504284
0.497828
131
30.240648
25.770674
25.770674
0.505515
132
30.154064
25.416166
25.416166
0.515608
133
30.113155
25.587456
25.587456
0.507118
134
30.079847
25.269257
25.269257
0.509357
135
29.995117
25.537579
25.537579
0.506954
136
30.260754
26.248802
26.248802
0.529539
137
30.027489
25.840988
25.840988
0.507366
138
30.006231
25.652693
25.652693
0.492815
139
30.038446
25.402805
25.402805
0.504725
140
30.010201
25.493456
25.493456
0.509587
141
29.888111
25.135721
25.135721
0.511209
142
29.933033
25.269131
25.269131
0.508628
143
29.997406
25.779900
25.779900
0.510411
144
29.894754
25.258230
25.258230
0.505224
145
29.947865
25.730991
25.730991
0.506399
146
29.985338
25.329933
25.329933
0.499613
147
29.867212
25.423382
25.423382
0.498747
148
29.808867
25.639549
25.639549
0.509043
149
29.842262
26.606310
26.606310
0.506298
150
29.795200
25.286749
25.286749
0.503587
Total time: 11:29
epoch
train_loss
valid_loss
mape_criterion
rmse_criterion
1
104.514160
77.406235
77.406235
0.669803
2
98.514900
77.565689
77.565689
0.644120
3
95.915794
77.693497
77.693497
0.704989
4
92.156853
77.824066
77.824066
0.770047
5
88.956642
78.246147
78.246147
0.930589
6
85.917107
78.377357
78.377357
1.010906
7
84.112511
78.934227
78.934227
1.095024
8
82.054459
78.700066
78.700066
0.993953
9
81.039833
78.687019
78.687019
1.029927
10
80.045441
78.408630
78.408630
0.921155
11
79.545639
78.213486
78.213486
0.907180
12
78.620445
77.864487
77.864487
0.855725
13
78.283333
77.054871
77.054871
0.830651
14
77.803970
76.436302
76.436302
0.765501
15
76.751045
75.536301
75.536301
0.818535
16
76.071808
74.059364
74.059364
0.657265
17
75.267883
72.819336
72.819336
0.635063
18
74.331871
71.579010
71.579010
0.625898
19
73.349045
70.308098
70.308098
0.625361
20
72.091705
68.553505
68.553505
0.617823
21
71.178322
67.951317
67.951317
0.626209
22
70.235672
67.210716
67.210716
0.619609
23
70.066673
66.863098
66.863098
0.625936
24
69.571426
66.472328
66.472328
0.638554
25
68.951912
65.820534
65.820534
0.639400
26
68.650490
65.552216
65.552216
0.631527
27
68.252754
65.127266
65.127266
0.656301
28
68.089790
64.958801
64.958801
0.635538
29
67.838036
64.699883
64.699883
0.614242
30
67.579552
64.751335
64.751335
0.648452
31
67.274017
64.426971
64.426971
0.615448
32
67.194534
64.537781
64.537781
0.624549
33
66.967056
64.080910
64.080910
0.660193
34
66.677017
64.040184
64.040184
0.608349
35
66.421326
63.570530
63.570530
0.628821
36
66.175278
63.035526
63.035526
0.635219
37
65.908653
63.067520
63.067520
0.604770
38
65.658867
63.131519
63.131519
0.609374
39
65.376343
62.682987
62.682987
0.638428
40
65.023346
61.885948
61.885948
0.609147
41
64.828857
61.429405
61.429405
0.610439
42
64.351799
60.844681
60.844681
0.592786
43
63.717014
59.391487
59.391487
0.596663
44
63.185009
58.680511
58.680511
0.616476
45
62.903477
58.426956
58.426956
0.623679
46
62.417503
56.820000
56.820000
0.627461
47
60.895607
55.195919
55.195919
0.586099
48
60.522209
54.080612
54.080612
0.596958
49
59.720654
53.054131
53.054131
0.635583
50
58.910904
53.174374
53.174374
0.564284
51
57.715366
50.200966
50.200966
0.583302
52
57.742142
50.314011
50.314011
0.587844
53
55.746307
47.512558
47.512558
0.580287
54
55.180981
49.676113
49.676113
0.579034
55
53.071030
44.675369
44.675369
0.547656
56
51.676624
43.232044
43.232044
0.571836
57
51.044029
41.973469
41.973469
0.561819
58
50.351921
43.133339
43.133339
0.551858
59
50.059105
46.025993
46.025993
0.528609
60
48.128746
39.186390
39.186390
0.559812
61
48.681023
49.570961
49.570961
0.550608
62
48.603642
39.539593
39.539593
0.550343
63
45.876572
38.195606
38.195606
0.571722
64
46.749138
39.305550
39.305550
0.570172
65
45.256763
36.548229
36.548229
0.547204
66
44.464848
36.847569
36.847569
0.539549
67
43.763725
35.853931
35.853931
0.516584
68
43.583858
37.368057
37.368057
0.549462
69
42.728588
35.475189
35.475189
0.582877
70
42.703182
35.737724
35.737724
0.540143
71
41.934200
34.394318
34.394318
0.537390
72
42.327187
32.521767
32.521767
0.538807
73
40.907917
34.793339
34.793339
0.496540
74
41.556698
34.863441
34.863441
0.559921
75
41.153095
34.577927
34.577927
0.519901
76
40.688152
34.178867
34.178867
0.562332
77
39.904385
32.683422
32.683422
0.514977
78
39.590694
32.744209
32.744209
0.486324
79
39.940548
31.392906
31.392906
0.540376
80
39.059582
31.292097
31.292097
0.523767
81
38.671616
32.218586
32.218586
0.521145
82
38.447201
31.312893
31.312893
0.466255
83
38.308685
31.482763
31.482763
0.503551
84
38.138691
30.308681
30.308681
0.504301
85
37.922146
30.670420
30.670420
0.555326
86
37.353584
30.048216
30.048216
0.498037
87
37.245064
29.088577
29.088577
0.481321
88
37.226818
32.803711
32.803711
0.503201
89
37.372520
30.414282
30.414282
0.535532
90
36.841160
30.770397
30.770397
0.475063
91
36.754196
28.839752
28.839752
0.536783
92
36.529617
29.400282
29.400282
0.509066
93
36.224758
28.849072
28.849072
0.511246
94
36.198917
29.547001
29.547001
0.504911
95
36.344528
30.129869
30.129869
0.508448
96
36.130447
29.565140
29.565140
0.487295
97
36.038708
29.621340
29.621340
0.502039
98
35.713413
28.921116
28.921116
0.519865
99
35.690762
29.015444
29.015444
0.499108
100
35.574001
28.992832
28.992832
0.504605
101
35.655254
27.978201
27.978201
0.480527
102
35.255566
28.837126
28.837126
0.482176
103
35.308769
28.673216
28.673216
0.498311
104
35.143711
27.761738
27.761738
0.518689
105
34.956699
27.401379
27.401379
0.502657
106
34.951458
28.253168
28.253168
0.499678
107
34.794064
27.597437
27.597437
0.490302
108
34.862507
27.559847
27.559847
0.488108
109
34.968452
28.798975
28.798975
0.491719
110
34.764206
28.230490
28.230490
0.523670
111
34.442436
29.907118
29.907118
0.503118
112
34.507858
28.305187
28.305187
0.502746
113
34.599533
28.333168
28.333168
0.524709
114
34.477917
28.725075
28.725075
0.485247
115
34.470188
29.554560
29.554560
0.508178
116
34.387726
28.222660
28.222660
0.496925
117
34.217194
27.305288
27.305288
0.491032
118
34.211384
28.106297
28.106297
0.494757
119
34.276676
27.826206
27.826206
0.520185
120
34.078041
27.272154
27.272154
0.486664
121
34.164497
28.257315
28.257315
0.488430
122
34.034420
27.039597
27.039597
0.504673
123
33.967461
28.004847
28.004847
0.512785
124
34.011410
28.596785
28.596785
0.507182
125
33.853382
27.493406
27.493406
0.508292
126
33.750599
28.033169
28.033169
0.506323
127
33.804192
26.880590
26.880590
0.473062
128
33.865063
27.956028
27.956028
0.479233
129
33.798119
26.893435
26.893435
0.501723
130
33.793221
26.487997
26.487997
0.514634
131
33.619450
28.027054
28.027054
0.494170
132
33.641651
27.316494
27.316494
0.493317
133
33.657948
26.776756
26.776756
0.506664
134
33.529182
27.151731
27.151731
0.467818
135
33.432178
27.092394
27.092394
0.498643
136
33.640461
27.457785
27.457785
0.493412
137
33.594345
27.590412
27.590412
0.490983
138
33.677269
28.171728
28.171728
0.517406
139
33.595806
26.692772
26.692772
0.499101
140
33.505543
27.953032
27.953032
0.509682
141
33.602135
27.213472
27.213472
0.512947
142
33.590023
28.174456
28.174456
0.498297
143
33.692356
27.364178
27.364178
0.494682
144
33.604843
27.848650
27.848650
0.508121
145
33.408455
26.772194
26.772194
0.494983
146
33.476189
27.064751
27.064751
0.483190
147
33.454948
26.950472
26.950472
0.496455
148
33.558083
27.408688
27.408688
0.480026
149
33.531635
27.995834
27.995834
0.503392
150
33.543018
26.573143
26.573143
0.492198
In [9]:
len(losses[3])
Out[9]:
37200
In [11]:
for i in range(4):
losses[i] = [sum(losses[i][j:j+120])/120 for j in range(0, len(losses[i]), 120)]
In [36]:
len(losses[0])
Out[36]:
100
In [14]:
plt.rcParams['figure.figsize'] = [40, 30]
In [ ]:
In [30]:
df = pd.DataFrame()
df['0.1'] = losses[0]
df['0.01'] = losses[1]
df['0.001'] = losses[2]
df['0.0001'] = losses[3]
df['x'] = df.index
df1 = df
plt.plot('x', '0.1', data=df1, color='blue')
plt.plot('x', '0.01', data=df1, color='orange')
plt.plot('x', '0.001', data=df1, color='green')
plt.plot('x', '0.0001', data=df1, color='red')
plt.ylabel('loss', fontsize=30)
plt.yticks(fontsize=30)
plt.xlabel('epoch', fontsize=30)
plt.xticks(fontsize=30)
plt.legend(prop={"size": 35}, title="learning rate", title_fontsize=30)
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:12: RuntimeWarning: Second argument '0.1' is ambiguous: could be a color spec but is in data; using as data. Either rename the entry in data or use three arguments to plot.
if sys.path[0] == '':
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:13: RuntimeWarning: Second argument '0.01' is ambiguous: could be a color spec but is in data; using as data. Either rename the entry in data or use three arguments to plot.
del sys.path[0]
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:14: RuntimeWarning: Second argument '0.001' is ambiguous: could be a color spec but is in data; using as data. Either rename the entry in data or use three arguments to plot.
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:15: RuntimeWarning: Second argument '0.0001' is ambiguous: could be a color spec but is in data; using as data. Either rename the entry in data or use three arguments to plot.
from ipykernel import kernelapp as app
Out[30]:
<matplotlib.legend.Legend at 0x7f62c128ee10>
In [34]:
lr = 1e-03
In [ ]:
l.fit_one_cycle(500, lr)
95.80% [479/500 38:19<01:40]
epoch
train_loss
valid_loss
mape_criterion
rmse_criterion
1
0.475406
0.420576
81.282562
0.647358
2
0.405994
0.366841
96.016930
0.604133
3
0.344525
0.339823
118.622673
0.581630
4
0.301959
0.294398
151.522369
0.541339
5
0.279079
0.265475
176.185654
0.513714
6
0.257754
0.260151
201.353348
0.509145
7
0.244165
0.243444
218.734299
0.492636
8
0.240072
0.242082
223.577301
0.490018
9
0.227321
0.218746
227.491348
0.466438
10
0.223451
0.224616
227.358154
0.473307
11
0.221739
0.228833
229.233444
0.475281
12
0.219741
0.217300
230.642456
0.464932
13
0.213040
0.218349
225.167374
0.466354
14
0.205401
0.211783
228.419495
0.459676
15
0.207131
0.214586
224.215424
0.463039
16
0.202794
0.209371
221.324631
0.456981
17
0.202883
0.241835
222.751846
0.488367
18
0.203182
0.209684
223.728271
0.457464
19
0.200087
0.215252
224.435257
0.462218
20
0.195811
0.194650
224.352478
0.441063
21
0.197789
0.246931
219.846207
0.493894
22
0.195421
0.207963
222.671173
0.454886
23
0.194013
0.199590
226.953995
0.446054
24
0.192628
0.196215
222.705078
0.442229
25
0.192933
0.200987
220.032623
0.443804
26
0.190379
0.207265
216.337753
0.453691
27
0.187904
0.197114
199.067108
0.442944
28
0.185137
0.198751
196.800049
0.444867
29
0.185337
0.184401
187.957504
0.427711
30
0.179981
0.197990
173.472778
0.441683
31
0.177364
0.195547
180.987549
0.441213
32
0.174661
0.169926
156.733505
0.411595
33
0.173724
0.181382
156.234802
0.424731
34
0.167750
0.161846
156.840027
0.401355
35
0.166822
0.160032
144.547607
0.398136
36
0.167223
0.176469
161.998856
0.418652
37
0.174429
0.161643
160.627106
0.399974
38
0.159436
0.144355
139.550446
0.377588
39
0.170560
0.156272
157.485901
0.392797
40
0.162162
0.168926
145.219955
0.410324
41
0.163248
0.173517
130.425507
0.416195
42
0.164509
0.152321
139.795898
0.390192
43
0.164119
0.147986
147.506561
0.383671
44
0.154622
0.184031
129.331467
0.428394
45
0.157122
0.144155
137.516861
0.377487
46
0.153733
0.149046
132.083725
0.385338
47
0.157351
0.144521
147.670029
0.379460
48
0.157699
0.152786
122.390625
0.390212
49
0.148563
0.140500
137.971207
0.374064
50
0.155515
0.137413
130.477295
0.369410
51
0.148105
0.127922
123.826012
0.354966
52
0.145255
0.132419
121.343361
0.362372
53
0.141036
0.129683
113.811203
0.358134
54
0.146341
0.132821
127.187889
0.363370
55
0.143687
0.136949
116.324203
0.368435
56
0.141120
0.161604
114.197052
0.398403
57
0.143222
0.133780
134.711990
0.364757
58
0.140269
0.131467
117.266464
0.359904
59
0.147293
0.146835
107.791527
0.382008
60
0.137157
0.124116
107.929321
0.351522
61
0.132427
0.134996
122.431511
0.364971
62
0.135523
0.123980
113.754723
0.351053
63
0.130339
0.117057
107.394096
0.341678
64
0.131296
0.122736
109.492401
0.348272
65
0.134226
0.120492
98.950363
0.346387
66
0.135235
0.133276
101.300484
0.362192
67
0.128196
0.154104
98.679497
0.389209
68
0.137471
0.125807
115.577301
0.354012
69
0.131396
0.114506
94.947540
0.334934
70
0.131485
0.120219
100.288406
0.345195
71
0.132145
0.117262
104.534584
0.338920
72
0.125218
0.117774
92.901215
0.338154
73
0.126465
0.141572
97.011902
0.373426
74
0.130007
0.108634
96.770134
0.327594
75
0.127177
0.136673
94.068398
0.365920
76
0.126994
0.120893
113.426041
0.347099
77
0.120786
0.111808
83.732628
0.333369
78
0.127221
0.124580
93.314354
0.350220
79
0.122750
0.117467
102.068810
0.340105
80
0.119404
0.117972
95.633667
0.342736
81
0.126136
0.111227
80.424911
0.330558
82
0.119081
0.108040
79.963806
0.325806
83
0.125218
0.116183
89.403099
0.339622
84
0.121713
0.108245
85.648956
0.326080
85
0.122076
0.106856
78.939178
0.325265
86
0.123213
0.108635
73.954384
0.328463
87
0.119124
0.121434
74.426010
0.348029
88
0.115035
0.101471
82.570076
0.314875
89
0.118819
0.105583
77.683128
0.324440
90
0.120437
0.105389
78.385910
0.323532
91
0.118018
0.117500
79.991997
0.340899
92
0.120117
0.115946
64.040298
0.337699
93
0.119183
0.130519
77.577942
0.361008
94
0.119600
0.103869
67.392403
0.321669
95
0.113080
0.105792
76.086823
0.323896
96
0.119567
0.114391
56.163395
0.335739
97
0.112169
0.112314
61.155720
0.334738
98
0.114368
0.103228
52.479843
0.319650
99
0.112376
0.109289
56.597420
0.330007
100
0.114115
0.102432
50.209530
0.317577
101
0.114912
0.102392
58.343655
0.316164
102
0.110704
0.125773
65.990150
0.353943
103
0.116222
0.120408
58.446724
0.345404
104
0.110378
0.102292
58.633301
0.318696
105
0.115898
0.113019
49.414886
0.334641
106
0.112487
0.109830
46.589905
0.330147
107
0.118024
0.123982
51.673805
0.347284
108
0.112238
0.104627
52.055008
0.321224
109
0.116409
0.103600
58.447830
0.320522
110
0.117490
0.104436
44.896564
0.317334
111
0.112378
0.101166
44.161133
0.317326
112
0.108596
0.104741
51.149467
0.320892
113
0.109435
0.100507
54.166195
0.316451
114
0.113779
0.108595
52.830124
0.326765
115
0.113495
0.101239
48.715111
0.316193
116
0.113121
0.103282
55.116642
0.319331
117
0.115281
0.103951
46.462486
0.320291
118
0.110442
0.099723
48.686546
0.314230
119
0.114515
0.098103
42.343102
0.309471
120
0.111457
0.108158
57.039444
0.326596
121
0.107170
0.106478
48.619255
0.325521
122
0.112238
0.099237
45.656242
0.312055
123
0.111520
0.103849
47.083279
0.320694
124
0.107513
0.098653
46.207558
0.313753
125
0.109505
0.100255
48.992180
0.316418
126
0.106426
0.104380
46.353325
0.320906
127
0.107942
0.104182
42.045727
0.322021
128
0.111396
0.099496
43.412178
0.314451
129
0.110557
0.099152
45.510098
0.312694
130
0.109972
0.106237
47.976307
0.321931
131
0.109443
0.110016
43.645237
0.328810
132
0.105403
0.097297
50.942066
0.309475
133
0.108816
0.109109
49.370880
0.329362
134
0.106102
0.101967
44.513386
0.318570
135
0.106819
0.108547
45.515774
0.327977
136
0.106619
0.097049
46.683727
0.308450
137
0.109454
0.106315
48.951107
0.325212
138
0.113476
0.120335
46.253563
0.344828
139
0.102007
0.098021
48.160069
0.310107
140
0.107493
0.099422
53.732639
0.313785
141
0.107420
0.101019
45.175205
0.317608
142
0.102565
0.095683
48.274204
0.304203
143
0.107760
0.094896
46.090752
0.305411
144
0.105494
0.102618
44.136089
0.315958
145
0.106046
0.099592
47.092155
0.314725
146
0.107174
0.093969
44.941849
0.304906
147
0.106835
0.096265
42.216957
0.309309
148
0.106255
0.097670
47.717167
0.310200
149
0.102088
0.097239
45.017982
0.310530
150
0.105733
0.097253
43.482880
0.311561
151
0.108735
0.101065
47.699242
0.316567
152
0.106747
0.103799
46.810661
0.320620
153
0.097594
0.099611
43.496223
0.312885
154
0.106343
0.094144
48.396633
0.305128
155
0.110409
0.112961
47.396904
0.334353
156
0.106920
0.099015
44.399921
0.313307
157
0.102120
0.100912
47.636787
0.315472
158
0.102995
0.100405
44.501179
0.314176
159
0.099059
0.099554
46.662090
0.314043
160
0.102173
0.093630
46.184963
0.304999
161
0.098542
0.103283
43.671871
0.320478
162
0.104761
0.093630
46.172935
0.304167
163
0.103304
0.100788
54.603958
0.317364
164
0.107877
0.093351
47.647423
0.303571
165
0.101522
0.117548
43.011848
0.340792
166
0.101473
0.100043
44.595768
0.315311
167
0.106502
0.099418
48.739429
0.314111
168
0.105191
0.099449
44.793560
0.314648
169
0.102888
0.095937
48.713505
0.307583
170
0.099390
0.094059
43.325089
0.303976
171
0.100377
0.118527
45.461239
0.343196
172
0.103144
0.096060
47.994980
0.307031
173
0.102098
0.102651
48.419308
0.319910
174
0.103107
0.097466
47.431602
0.311870
175
0.099771
0.096689
44.717133
0.310668
176
0.102585
0.094009
45.670948
0.305536
177
0.102370
0.097629
43.323776
0.310141
178
0.101885
0.095145
47.788929
0.306467
179
0.102006
0.093439
47.042816
0.305370
180
0.101346
0.104135
45.314838
0.320798
181
0.101280
0.105415
43.529514
0.324290
182
0.098628
0.101472
47.834988
0.317461
183
0.100289
0.095641
44.383163
0.307084
184
0.108695
0.095749
51.261505
0.306582
185
0.101441
0.095384
45.308636
0.307585
186
0.101252
0.104172
48.155197
0.322370
187
0.097041
0.096021
46.225292
0.309395
188
0.095830
0.105522
54.085949
0.323436
189
0.101180
0.095983
45.117626
0.307754
190
0.100943
0.097404
48.441059
0.311597
191
0.100046
0.103583
47.747913
0.321354
192
0.096218
0.102709
42.373920
0.319508
193
0.098983
0.099068
46.974686
0.313007
194
0.097584
0.097386
44.913376
0.309127
195
0.101118
0.092279
42.959103
0.301028
196
0.096170
0.095766
49.682388
0.308427
197
0.095140
0.092165
48.599819
0.302740
198
0.100695
0.099979
47.673008
0.316091
199
0.100897
0.097312
50.156296
0.308784
200
0.102165
0.102935
45.928818
0.318758
201
0.096254
0.093633
42.704559
0.300883
202
0.095211
0.098730
44.511024
0.313354
203
0.095179
0.097896
46.160599
0.311914
204
0.095501
0.093326
45.703575
0.302784
205
0.100599
0.095320
45.952374
0.307827
206
0.093127
0.094283
44.444256
0.304331
207
0.099368
0.104660
45.521183
0.320379
208
0.095875
0.093196
45.521027
0.304371
209
0.095190
0.098685
48.439808
0.311875
210
0.099169
0.093708
45.658600
0.305820
211
0.096410
0.095376
45.350422
0.307128
212
0.100163
0.096080
46.304932
0.306348
213
0.094288
0.095195
47.388954
0.307340
214
0.100976
0.093138
44.704163
0.301615
215
0.099931
0.096969
43.036079
0.310577
216
0.100360
0.097611
47.670021
0.311724
217
0.096483
0.105423
43.451752
0.321548
218
0.096582
0.095949
43.901703
0.305983
219
0.093860
0.093374
43.815075
0.304616
220
0.096683
0.093291
44.293617
0.304958
221
0.094365
0.096965
45.615753
0.310453
222
0.094511
0.100386
49.509418
0.314765
223
0.091423
0.095591
45.000370
0.307754
224
0.098937
0.097562
45.322025
0.311625
225
0.100269
0.112175
47.143742
0.330273
226
0.094355
0.094676
45.639957
0.304249
227
0.093091
0.111531
47.858315
0.332223
228
0.097158
0.093868
42.203941
0.304713
229
0.096763
0.098348
45.481224
0.313136
230
0.097930
0.091482
41.834202
0.300644
231
0.097641
0.087082
41.551830
0.293651
232
0.099169
0.109826
45.718475
0.328698
233
0.092805
0.103204
43.758598
0.320819
234
0.095388
0.095393
42.821503
0.305670
235
0.098073
0.100135
47.245842
0.312975
236
0.094666
0.097602
43.099480
0.312008
237
0.093561
0.090710
42.198967
0.299607
238
0.098304
0.088927
44.171951
0.296146
239
0.096710
0.089569
41.793831
0.297312
240
0.099168
0.098534
42.967274
0.310962
241
0.089044
0.091644
44.908108
0.301645
242
0.089951
0.096814
43.663364
0.310898
243
0.093954
0.093313
44.869804
0.304938
244
0.091452
0.092274
46.101555
0.301051
245
0.094989
0.092678
44.310543
0.304292
246
0.096674
0.095519
43.710262
0.308100
247
0.094717
0.091317
43.648148
0.300821
248
0.090383
0.098027
46.317383
0.311673
249
0.092953
0.100129
42.212040
0.314227
250
0.093205
0.095304
44.778530
0.308416
251
0.094033
0.094150
46.887005
0.305753
252
0.097019
0.096983
43.979820
0.309858
253
0.095624
0.090288
44.264927
0.297379
254
0.087284
0.092604
44.462204
0.299927
255
0.093724
0.098291
43.531956
0.312708
256
0.097193
0.097458
41.805927
0.310548
257
0.097627
0.103584
47.849087
0.321204
258
0.093977
0.102255
45.003365
0.319181
259
0.092956
0.091150
42.541718
0.299839
260
0.093361
0.101008
42.812447
0.317438
261
0.086386
0.093155
44.301117
0.302176
262
0.090485
0.102115
44.048214
0.318605
263
0.095316
0.112182
45.677364
0.330437
264
0.097211
0.095942
48.241096
0.309008
265
0.091692
0.100400
43.450314
0.314668
266
0.092054
0.092798
46.099289
0.303266
267
0.091598
0.099179
49.106499
0.314635
268
0.091876
0.099429
42.841358
0.314625
269
0.095609
0.090908
43.428951
0.300946
270
0.089391
0.093459
44.359013
0.302601
271
0.092927
0.096510
47.389950
0.309419
272
0.092724
0.094225
46.265945
0.305732
273
0.095518
0.095231
45.044338
0.306376
274
0.090331
0.127820
44.396114
0.355746
275
0.094750
0.091145
43.413742
0.299774
276
0.094001
0.092190
51.243736
0.303043
277
0.098597
0.134161
43.743156
0.364611
278
0.094575
0.091241
44.116932
0.300850
279
0.096463
0.094937
47.355297
0.307341
280
0.089537
0.104695
45.001598
0.322872
281
0.093438
0.096039
43.941467
0.309252
282
0.091919
0.094678
42.974533
0.307138
283
0.093541
0.091395
45.859806
0.301246
284
0.092834
0.091046
45.159588
0.297702
285
0.091389
0.094358
42.413204
0.305217
286
0.093824
0.094662
43.139187
0.306262
287
0.092971
0.104902
50.145981
0.322794
288
0.089295
0.093878
45.240707
0.305110
289
0.093764
0.089581
42.805012
0.294044
290
0.089289
0.092098
42.840183
0.303355
291
0.091378
0.088554
44.412601
0.296755
292
0.086209
0.101127
45.269711
0.316625
293
0.089193
0.093790
45.418194
0.303543
294
0.090970
0.089927
45.213009
0.299405
295
0.090907
0.103176
42.898170
0.319913
296
0.088527
0.087171
44.331291
0.294805
297
0.088631
0.090466
43.469002
0.300027
298
0.089605
0.103579
46.296505
0.321219
299
0.090625
0.086401
43.346451
0.292896
300
0.088791
0.084109
44.852497
0.287049
301
0.092072
0.088898
44.065376
0.296885
302
0.089914
0.091982
46.700394
0.302497
303
0.090294
0.092109
46.918327
0.301922
304
0.089666
0.096178
46.573555
0.308533
305
0.084649
0.092883
44.372940
0.300530
306
0.089469
0.087766
43.128391
0.295732
307
0.094126
0.081912
46.458881
0.284001
308
0.090270
0.089005
47.726555
0.297558
309
0.088750
0.084557
42.701096
0.288853
310
0.090929
0.105650
44.908329
0.317763
311
0.084760
0.103264
44.126675
0.319863
312
0.090397
0.090156
46.644489
0.298951
313
0.085052
0.087806
45.262722
0.296066
314
0.086341
0.085504
43.238083
0.290261
315
0.086841
0.084894
43.879677
0.289527
316
0.091322
0.091234
42.903248
0.298249
317
0.089541
0.085810
44.756680
0.291796
318
0.083313
0.100721
41.928429
0.316513
319
0.089088
0.086197
43.390736
0.292930
320
0.087634
0.086276
43.488152
0.291838
321
0.087681
0.089496
45.084637
0.297101
322
0.087729
0.086750
45.493069
0.293692
323
0.084741
0.091349
44.980839
0.299319
324
0.088968
0.092591
44.671661
0.302314
325
0.090629
0.088140
45.389717
0.295931
326
0.085179
0.098210
44.222221
0.306121
327
0.084943
0.087896
44.524673
0.294385
328
0.087333
0.082998
45.295387
0.283413
329
0.088992
0.104273
45.244381
0.319680
330
0.087882
0.084664
42.643044
0.289307
331
0.087802
0.086795
45.181080
0.293163
332
0.091122
0.086657
45.394661
0.293799
333
0.085398
0.098674
45.827736
0.313046
334
0.084679
0.089562
45.718655
0.298107
335
0.089921
0.089210
42.849819
0.297390
336
0.087347
0.132000
43.534576
0.362462
337
0.085178
0.085757
46.124962
0.291084
338
0.085149
0.083707
43.122349
0.287454
339
0.089438
0.079184
43.917995
0.280991
340
0.086570
0.080864
43.791752
0.282723
341
0.089662
0.088296
44.733295
0.296923
342
0.083936
0.094286
43.191959
0.306812
343
0.082105
0.083111
41.531898
0.287946
344
0.084585
0.116899
45.377487
0.339713
345
0.079801
0.080992
46.022789
0.282682
346
0.088440
0.078688
42.454636
0.280009
347
0.084089
0.086645
44.076958
0.293540
348
0.080667
0.108318
43.275486
0.327789
349
0.082850
0.108250
44.121651
0.327945
350
0.083309
0.079041
45.985561
0.279748
351
0.085899
0.138506
41.140045
0.367941
352
0.082413
0.072740
44.829285
0.268499
353
0.085663
0.076460
45.036339
0.274637
354
0.084654
0.078136
42.999172
0.278856
355
0.083815
0.090693
45.466061
0.298740
356
0.085047
0.077428
43.973896
0.277972
357
0.082194
0.079286
45.057037
0.279603
358
0.082611
0.079669
42.453800
0.281859
359
0.083505
0.068689
45.439774
0.261335
360
0.080888
0.081152
45.033646
0.282295
361
0.082637
0.087381
45.226093
0.292272
362
0.085682
0.082264
46.072575
0.285145
363
0.085606
0.082379
44.468029
0.285950
364
0.081576
0.092298
43.614983
0.303272
365
0.081210
0.078475
44.054161
0.278445
366
0.078233
0.072015
44.922558
0.267882
367
0.076009
0.082593
45.566769
0.286517
368
0.085385
0.075136
42.479797
0.273528
369
0.081377
0.088346
45.356014
0.296344
370
0.081164
0.076393
43.610878
0.275544
371
0.079360
0.084176
46.673279
0.284283
372
0.081290
0.077686
45.139614
0.277866
373
0.081418
0.090409
43.782413
0.299499
374
0.078396
0.077317
43.484711
0.277395
375
0.076744
0.079282
41.396156
0.280521
376
0.081679
0.082374
43.701199
0.285057
377
0.082778
0.069482
43.863289
0.261007
378
0.077895
0.074134
44.023800
0.271382
379
0.080150
0.076873
42.713657
0.276981
380
0.074139
0.069682
47.602020
0.263249
381
0.078610
0.085682
45.395161
0.290618
382
0.078443
0.085193
42.959099
0.291005
383
0.080279
0.075808
44.532028
0.274606
384
0.077258
0.073051
43.990250
0.269416
385
0.075999
0.074791
44.464283
0.272050
386
0.079745
0.081336
41.352726
0.282824
387
0.079369
0.074761
44.726543
0.273274
388
0.079366
0.076728
43.124043
0.275356
389
0.073532
0.070068
44.130161
0.263826
390
0.071678
0.061576
43.845760
0.246181
391
0.074264
0.065286
44.440342
0.255177
392
0.072881
0.066427
42.461662
0.255644
393
0.073349
0.072260
45.141857
0.267513
394
0.075067
0.078173
45.700142
0.276877
395
0.081131
0.069926
42.450645
0.262664
396
0.081708
0.088856
44.699245
0.297520
397
0.077317
0.063445
44.836483
0.251553
398
0.072080
0.077141
45.467457
0.275672
399
0.072371
0.074939
44.048214
0.272353
400
0.075851
0.069047
44.442177
0.261952
401
0.077874
0.080028
45.295055
0.280177
402
0.073231
0.060709
44.552155
0.245853
403
0.073888
0.063075
42.913933
0.250155
404
0.076355
0.065873
45.135944
0.256340
405
0.075553
0.065820
43.379517
0.255111
406
0.072974
0.062523
45.087196
0.248839
407
0.077227
0.059467
45.115856
0.243751
408
0.072253
0.062960
44.684513
0.250472
409
0.069978
0.062478
44.484264
0.248384
410
0.071628
0.066918
43.228558
0.257494
411
0.070733
0.076106
44.529408
0.274857
412
0.073040
0.068057
42.984886
0.259388
413
0.071243
0.060273
44.620728
0.245384
414
0.072422
0.062522
43.581833
0.249827
415
0.072043
0.075101
46.006855
0.272288
416
0.069956
0.061916
43.523438
0.246541
417
0.068591
0.056583
43.359825
0.237778
418
0.069986
0.075006
42.312477
0.272766
419
0.071028
0.072055
43.913258
0.267500
420
0.072706
0.086438
41.999214
0.292342
421
0.072310
0.061905
44.224911
0.246301
422
0.071627
0.067323
44.190025
0.255906
423
0.066633
0.057519
43.813183
0.238966
424
0.064755
0.063161
43.254738
0.250044
425
0.067288
0.060946
43.129711
0.245826
426
0.067982
0.062033
43.427063
0.248790
427
0.070757
0.079742
43.866783
0.281672
428
0.066557
0.064434
44.326923
0.253699
429
0.068737
0.069216
45.601383
0.262332
430
0.065591
0.057564
43.650684
0.239375
431
0.068948
0.078424
45.620541
0.278837
432
0.065728
0.058510
43.717731
0.240825
433
0.064702
0.059429
44.956863
0.243143
434
0.069581
0.062953
44.579700
0.247779
435
0.064473
0.068893
43.744652
0.262389
436
0.064715
0.056872
43.732136
0.237149
437
0.062569
0.061253
45.739639
0.246733
438
0.065393
0.060705
43.970493
0.245660
439
0.060922
0.058931
42.280952
0.242228
440
0.065599
0.058491
43.648113
0.240215
441
0.064167
0.054371
44.054234
0.232085
442
0.062347
0.060847
44.352627
0.244904
443
0.063911
0.057038
42.419498
0.238666
444
0.065097
0.055953
42.453789
0.235477
445
0.063821
0.057930
43.930889
0.240103
446
0.065034
0.059104
42.516819
0.241063
447
0.062218
0.058720
43.669468
0.241173
448
0.062098
0.063810
42.408623
0.251617
449
0.063690
0.062403
42.827744
0.249582
450
0.061907
0.054846
43.438431
0.232230
451
0.060772
0.055766
42.595207
0.234181
452
0.058719
0.057945
41.808998
0.239547
453
0.061410
0.056936
45.220657
0.238254
454
0.061812
0.060003
42.758167
0.244500
455
0.060440
0.055429
44.399357
0.233899
456
0.059253
0.057548
43.220486
0.239312
457
0.060086
0.055763
44.208378
0.235855
458
0.057311
0.067120
44.464317
0.257236
459
0.056959
0.059706
42.617905
0.243080
460
0.058200
0.053589
43.237080
0.230658
461
0.057905
0.053554
44.021103
0.231217
462
0.059049
0.057678
43.779545
0.239218
463
0.059007
0.053668
41.389687
0.230982
464
0.059130
0.055894
44.295376
0.235418
465
0.056235
0.058048
43.203083
0.240169
466
0.060028
0.054952
42.827045
0.233437
467
0.054370
0.058931
43.694939
0.241740
468
0.060387
0.055299
43.948620
0.234690
469
0.058372
0.059477
42.478714
0.242387
470
0.054570
0.058839
42.308533
0.242050
471
0.056992
0.057736
43.741478
0.239785
472
0.056688
0.061029
42.900826
0.246360
473
0.056058
0.050662
43.448673
0.223439
474
0.058964
0.053430
42.996403
0.230133
475
0.059008
0.053955
41.728088
0.231026
476
0.055571
0.052340
43.233940
0.227604
477
0.060456
0.055382
42.893795
0.234255
478
0.054517
0.054195
43.240952
0.231611
479
0.057016
0.057290
45.269768
0.239083
48.39% [120/248 00:02<00:02 0.0569]
In [38]:
l.recorder.plot_losses()
In [39]:
l.save(f"training_tuning_l2")
In [5]:
val_df = get_results_df(val_dl, l.model)
train_df = get_results_df(train_dl, l.model)
test_df = get_results_df(test_dl, l.model)
In [49]:
df = val_df
In [51]:
df[:][['prediction','target', 'abs_diff','APE']].describe()
Out[51]:
prediction
target
abs_diff
APE
count
10000.000000
10000.000000
10000.000000
10000.000000
mean
0.482927
0.469633
0.111734
45.108513
std
0.679316
0.748854
0.203059
59.437885
min
0.000000
0.011766
0.000004
0.000876
25%
0.160788
0.119905
0.030099
10.576813
50%
0.342015
0.299996
0.059738
24.266205
75%
0.629870
0.609846
0.122063
59.659491
max
8.527840
11.273418
3.859247
831.365295
In [52]:
df2 = df
joint_plot(df2, f"Validation dataset, {loss_func} loss")
In [14]:
df_ = df.sort_values(by=["APE"])
df_['x'] = range(len(df_))
In [15]:
plt.plot('x', 'APE', 'bo', data=df_)
plt.xlabel('scheduled program')
plt.ylabel('APE')
plt.legend()
Out[15]:
<matplotlib.legend.Legend at 0x7f57e51ff6d8>
In [134]:
plt.plot('x', 'APE', 'go', data=df_)
Out[134]:
[<matplotlib.lines.Line2D at 0x7f67e6ceb2e8>]
In [6]:
df = pd.concat([val_df, test_df])
In [7]:
df1 = df[:][['prediction','target', 'name']]
In [8]:
def evaluation_df(df1, n=1):
eval_df = pd.DataFrame(columns=['performance', 'achieved_speedup', 'max_speedup', 'schedules_count'])
for prog in set(df1.name):
tmp_df = df1[df1.name == prog].sort_values(by=["prediction"], ascending=False)
speedup = tmp_df.iloc[list(range(n))].target.max()
perf = speedup / tmp_df.target.max()
abs_diff = tmp_df.target.max() - speedup
new = pd.DataFrame()
new['achieved_speedup'] = [speedup]
new['performance'] = [perf]
new['max_speedup'] = [tmp_df.target.max()]
new['schedules_count'] = [len(tmp_df.target)]
eval_df = pd.concat([eval_df, new],
ignore_index=True)
return eval_df
In [13]:
eval_df = evaluation_df(df1, n=5)
/data/scratch/henni-mohammed/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:20: FutureWarning: Sorting because non-concatenation axis is not aligned. A future version
of pandas will change to not sort by default.
To accept the future behavior, pass 'sort=False'.
To retain the current behavior and silence the warning, pass 'sort=True'.
In [14]:
eval_df.performance.describe()
Out[14]:
count 29.000000
mean 0.893790
std 0.096194
min 0.553684
25% 0.835778
50% 0.907576
75% 0.962869
max 1.000000
Name: performance, dtype: float64
In [ ]:
Content source: rbaghdadi/COLi
Similar notebooks: