In [1]:
import os, sys
sys.path.insert(0, "/Users/magnusax/AutoML")
from gazer import GazerMetaLearner
from gazer.optimization import param_search
from sklearn.datasets import load_digits
from scipy.stats import uniform, randint
from sklearn.model_selection import train_test_split
from keras.utils import to_categorical
Using TensorFlow backend.
/Users/magnusax/AutoML/gazer/__init__.py:16: RuntimeWarning: xgboost import failed; 'xgboost'
will be unavailable.
will be unavailable.""".format(lib, alias), RuntimeWarning)
In [2]:
X, y = load_digits(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.5, random_state=0)
#y_train = to_categorical(y_train)
#y_test = to_categorical(y_test)
In [3]:
learner = GazerMetaLearner(
method='select',
estimators=['neuralnet'],
verbose=0
)
param_grid = {
'epochs': randint(5, 31),
'input_units': randint(200, 1201),
'dropout': [True, False],
'p': uniform(0,0.7),
'batch_size': randint(16, 129),
'batch_norm': [False, True],
'n_hidden': randint(2,4),
'decay_units': [True, False],
'learning_rate': [1e-3 * x for x in range(1, 11)],
'optimizer': ['adam', 'adagrad'],
'gamma': uniform(1.5, 1.0),
}
data = {
'train': (X_train, y_train),
'val': (X_test, y_test)
}
type_of_search = 'random'
n_iter = 100
name = learner.names[0]
modelfiles = ["tmp/model{}.hdf5".format(i) for i in range(1,6)]
In [4]:
config, df = param_search(learner, param_grid, data,
type_of_search=type_of_search,
n_iter=n_iter, name=name,
modelfiles=modelfiles, top_n=5)
Widget Javascript not detected. It may not be installed or enabled properly.
In [5]:
df
Out[5]:
batch_norm
batch_size
decay_units
dropout
epochs
gamma
input_units
learning_rate
n_hidden
optimizer
p
train_loss
val_loss
train_score
val_score
32
True
92
True
False
25
2.164772
718
0.003
3
adam
0.199357
0.0004
0.0531
1.0000
0.9844
20
True
50
False
True
9
2.157223
761
0.007
2
adagrad
0.406312
0.0008
0.0637
1.0000
0.9833
31
True
115
True
False
28
2.451649
892
0.003
3
adam
0.265402
0.0005
0.0575
1.0000
0.9833
7
True
65
False
False
14
1.982446
671
0.009
3
adagrad
0.666452
0.0005
0.0662
1.0000
0.9811
45
True
39
False
True
18
1.617226
359
0.005
3
adagrad
0.277546
0.0003
0.0688
1.0000
0.9811
5
True
92
True
True
25
1.670565
936
0.005
3
adam
0.346420
0.0018
0.0948
0.9989
0.9800
72
True
86
True
False
24
2.440254
452
0.003
2
adam
0.052628
0.0005
0.0623
1.0000
0.9800
61
True
83
False
False
20
2.445365
305
0.001
2
adam
0.691845
0.0006
0.0705
1.0000
0.9800
88
False
22
True
False
30
1.970324
646
0.001
3
adam
0.400966
0.0000
0.1147
1.0000
0.9800
38
True
40
False
True
30
2.372189
827
0.005
2
adagrad
0.237888
0.0000
0.0858
1.0000
0.9800
89
True
93
True
False
16
1.802900
552
0.003
2
adam
0.689255
0.0007
0.0652
1.0000
0.9800
86
False
58
False
False
24
1.515386
266
0.002
2
adagrad
0.218241
0.0062
0.0751
1.0000
0.9789
19
True
49
True
False
10
1.655869
1167
0.003
3
adagrad
0.457649
0.0008
0.0625
1.0000
0.9789
87
True
34
False
False
16
1.912526
568
0.001
3
adam
0.074532
0.0002
0.0898
1.0000
0.9789
23
False
125
True
False
15
1.685536
453
0.007
2
adagrad
0.138185
0.0039
0.0747
1.0000
0.9789
11
True
107
False
True
13
1.896324
819
0.004
2
adagrad
0.634315
0.0152
0.0792
0.9944
0.9789
36
True
51
False
False
14
1.920328
584
0.009
2
adagrad
0.053240
0.0003
0.0954
1.0000
0.9778
41
False
67
False
True
28
2.081769
1119
0.002
3
adam
0.029261
0.0000
0.1230
1.0000
0.9778
6
True
101
True
False
25
1.673758
271
0.004
2
adam
0.636489
0.0004
0.0600
1.0000
0.9778
66
True
107
True
False
19
1.616126
773
0.001
3
adam
0.163225
0.0003
0.0826
1.0000
0.9778
8
False
71
True
False
22
2.441546
1156
0.004
2
adagrad
0.408861
0.0014
0.0716
1.0000
0.9778
52
True
126
True
False
21
1.834490
932
0.001
2
adam
0.010813
0.0027
0.0769
1.0000
0.9778
27
False
101
False
False
15
1.880408
882
0.001
3
adam
0.298583
0.0008
0.1105
1.0000
0.9766
33
False
115
False
True
13
2.127296
776
0.006
2
adagrad
0.525248
0.0206
0.1052
0.9978
0.9766
34
True
19
True
False
11
1.576127
304
0.007
3
adagrad
0.145355
0.0058
0.0795
1.0000
0.9766
44
True
61
False
False
13
1.586635
584
0.002
3
adagrad
0.319783
0.0003
0.0766
1.0000
0.9766
97
True
56
False
False
22
1.769392
707
0.001
2
adam
0.308908
0.0010
0.0831
1.0000
0.9766
80
False
47
True
False
21
1.706127
1022
0.002
3
adam
0.063555
0.0000
0.0978
1.0000
0.9766
78
False
102
True
False
26
1.751615
359
0.003
2
adam
0.525131
0.0004
0.1041
1.0000
0.9766
58
False
128
False
True
21
1.504867
620
0.003
2
adagrad
0.044342
0.0015
0.0891
1.0000
0.9755
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
...
83
True
81
False
True
22
1.750174
234
0.006
3
adam
0.036993
0.0248
0.1830
0.9911
0.9588
64
True
95
False
True
29
1.520706
360
0.008
3
adam
0.058633
0.0092
0.2099
0.9989
0.9566
53
False
23
True
False
8
2.312634
406
0.001
3
adam
0.518446
0.0314
0.1513
0.9878
0.9544
62
False
100
True
False
14
2.474577
250
0.010
3
adagrad
0.640502
0.0509
0.1627
0.9911
0.9511
29
True
107
True
True
9
1.574681
477
0.009
2
adam
0.157113
0.0535
0.2420
0.9878
0.9477
15
True
124
False
True
23
2.137996
1141
0.003
3
adam
0.361793
0.0528
0.3778
0.9878
0.9477
39
True
27
True
False
25
2.018783
478
0.007
3
adam
0.150018
0.0400
0.2091
0.9889
0.9399
73
False
92
True
True
29
2.271539
218
0.003
2
adagrad
0.216845
0.1048
0.1905
0.9777
0.9388
71
True
73
False
False
7
1.802877
424
0.007
2
adam
0.258735
0.1437
0.4429
0.9644
0.9155
26
False
97
True
True
9
2.470406
829
0.002
2
adagrad
0.475919
0.4155
0.4959
0.9042
0.8799
90
True
100
True
True
14
2.236906
655
0.001
3
adagrad
0.377263
0.3788
0.4926
0.9265
0.8754
76
True
25
False
False
22
2.353056
407
0.005
3
adam
0.582093
0.3525
0.7134
0.9232
0.8699
54
False
36
True
True
6
2.387656
1068
0.001
3
adam
0.602475
2.0612
2.0848
0.5568
0.5117
21
False
50
True
True
19
1.826826
835
0.002
3
adagrad
0.679326
2.2397
2.2459
0.3853
0.3660
16
False
114
True
False
13
2.294137
598
0.010
2
adagrad
0.451073
13.1386
12.5704
0.1849
0.2191
25
False
66
False
True
6
1.794173
499
0.010
2
adagrad
0.146728
12.8693
12.9088
0.2016
0.1991
50
False
48
False
False
30
2.035419
723
0.008
3
adam
0.312689
14.5027
14.4507
0.1002
0.1034
18
False
92
True
True
24
1.581434
1108
0.009
2
adam
0.444805
14.5924
14.4686
0.0947
0.1023
70
False
70
False
False
12
1.555037
468
0.010
2
adam
0.092374
14.6463
14.4686
0.0913
0.1023
79
False
46
False
False
21
1.705495
921
0.005
2
adam
0.487813
14.5386
14.4686
0.0980
0.1023
63
False
29
False
True
12
2.245408
686
0.010
3
adagrad
0.011566
14.5386
14.4686
0.0980
0.1023
92
False
58
False
False
30
1.524129
1089
0.010
2
adagrad
0.221097
14.6463
14.4686
0.0913
0.1023
37
False
63
False
True
13
1.950359
469
0.010
3
adam
0.166119
14.4668
14.5045
0.1024
0.1001
69
False
36
True
False
28
2.011719
1025
0.007
2
adagrad
0.276951
14.4668
14.5224
0.1024
0.0990
49
False
67
False
False
24
2.324231
1093
0.006
2
adam
0.305027
14.5206
14.5224
0.0991
0.0990
57
False
98
False
False
19
1.846793
965
0.005
2
adagrad
0.235114
14.4668
14.5224
0.1024
0.0990
84
False
44
True
True
19
1.600724
949
0.010
2
adagrad
0.405949
14.5206
14.5224
0.0991
0.0990
35
False
126
True
True
9
2.369172
440
0.009
3
adam
0.630923
2.2994
2.3102
0.1125
0.0868
85
False
61
False
False
22
2.299105
1082
0.008
2
adam
0.243700
14.3053
14.7196
0.1125
0.0868
12
False
102
False
True
29
1.749254
1170
0.008
3
adagrad
0.472058
14.2335
14.7555
0.1169
0.0845
100 rows × 15 columns
In [6]:
from keras.models import Sequential, load_model
In [7]:
model = load_model(modelfiles[0])
In [8]:
model.evaluate(X_test, to_categorical(y_test))
899/899 [==============================] - 5s 5ms/step
Out[8]:
[0.05312484056671752, 0.98442714126807562]
In [9]:
model.summary()
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_144 (Dense) (None, 718) 46670
_________________________________________________________________
activation_112 (Activation) (None, 718) 0
_________________________________________________________________
batch_normalization_72 (Batc (None, 718) 2872
_________________________________________________________________
dense_145 (Dense) (None, 331) 237989
_________________________________________________________________
activation_113 (Activation) (None, 331) 0
_________________________________________________________________
batch_normalization_73 (Batc (None, 331) 1324
_________________________________________________________________
dense_146 (Dense) (None, 153) 50796
_________________________________________________________________
activation_114 (Activation) (None, 153) 0
_________________________________________________________________
batch_normalization_74 (Batc (None, 153) 612
_________________________________________________________________
dense_147 (Dense) (None, 70) 10780
_________________________________________________________________
activation_115 (Activation) (None, 70) 0
_________________________________________________________________
batch_normalization_75 (Batc (None, 70) 280
_________________________________________________________________
dense_148 (Dense) (None, 10) 710
=================================================================
Total params: 352,033
Trainable params: 349,489
Non-trainable params: 2,544
_________________________________________________________________
In [ ]:
Content source: magnusax/ml-meta-wrapper
Similar notebooks: