In [1]:
from environment import ex
import clades
import pandas as pd
import os
#create a new sacred object, which includes the config dictionary
n1e1p1b2_dict = ex.run(config_updates=\
{'population_size':30,\
'environment':'lab3000_n1e1p1b2',\
'max_train_time':120})
#create a new clade object, passing in the config dictionary
n1e1p1b2_clade = clades.GAFC1(n1e1p1b2_dict.config)
#loading the data creates train,test, and validation sets
#and also creates a folder to store the output of clade activity
n1e1p1b2_clade.load_data()
Using TensorFlow backend.
WARNING - DLGn1e1p1 - No observers have been added to this run
INFO - DLGn1e1p1 - Running command 'main'
INFO - DLGn1e1p1 - Started
INFO - DLGn1e1p1 - Completed after 0:00:00
Vectorizing sequence data...
x_ shape: (8982, 10000)
46 classes
Converting class vector to binary class matrix (for use with categorical_crossentropy)
In [2]:
n1e1p1b2_clade.current_generation
Out[2]:
0
In [3]:
n1e1p1b2_clade.spawn()
In [4]:
n1e1p1b2_clade.genotypes
Out[4]:
LR
activations
batch_size
epochs
gene_name
layer_units
loss
model_name
nb_layers
optimizer
0
0.091625
[relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
512
16
lab3000_n1e1p1b2+Gen0+gene0
[494, 283, 25, 33, 308, 95, 59, 186, 500]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene0+model.h5
9
RMSProp
0
0.001332
[softmax, sigmoid, softsign, hard_sigmoid, elu...
128
15
lab3000_n1e1p1b2+Gen0+gene1
[270, 367, 409, 280, 94, 345, 400, 452, 475, 1...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene1+model.h5
11
Nadam
0
0.053918
[softplus, hard_sigmoid, softplus]
32
19
lab3000_n1e1p1b2+Gen0+gene2
[14, 392, 25]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene2+model.h5
3
RMSProp
0
0.005913
[softplus, elu, elu, softmax, softsign, softma...
512
12
lab3000_n1e1p1b2+Gen0+gene3
[85, 216, 95, 39, 92, 466, 435, 399, 124, 197,...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene3+model.h5
12
Adamax
0
0.483776
[softmax, elu, softmax, softsign, softmax, lin...
128
2
lab3000_n1e1p1b2+Gen0+gene4
[511, 278, 417, 457, 37, 333, 331, 299, 16]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene4+model.h5
9
Adamax
0
0.053448
[tanh, hard_sigmoid, sigmoid, softsign, linear...
8
5
lab3000_n1e1p1b2+Gen0+gene5
[149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene5+model.h5
10
Adagrad
0
0.002698
[linear, softplus, relu]
512
9
lab3000_n1e1p1b2+Gen0+gene6
[139, 158, 491]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene6+model.h5
3
Adam
0
0.026426
[softplus, softmax, tanh, softsign, hard_sigmo...
16
15
lab3000_n1e1p1b2+Gen0+gene7
[487, 36, 144, 3, 250, 508, 244, 240, 490, 480...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene7+model.h5
12
sgd
0
0.119733
[tanh, softsign, softsign, softmax, sigmoid, r...
8
5
lab3000_n1e1p1b2+Gen0+gene8
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene8+model.h5
10
sgd
0
0.429790
[tanh, tanh, elu, relu, softsign, softmax, elu]
16
2
lab3000_n1e1p1b2+Gen0+gene9
[306, 484, 292, 411, 183, 127, 402]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene9+model.h5
7
Adam
0
0.129539
[sigmoid, softsign, hard_sigmoid, softplus, so...
32
13
lab3000_n1e1p1b2+Gen0+gene10
[62, 123, 424, 41, 32, 147, 178, 412, 161]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene10+model.h5
9
Adagrad
0
0.013870
[linear, hard_sigmoid]
128
16
lab3000_n1e1p1b2+Gen0+gene11
[4, 204]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene11+model.h5
2
Nadam
0
0.026689
[sigmoid, elu, elu, softmax, softsign, sigmoid]
64
5
lab3000_n1e1p1b2+Gen0+gene12
[130, 272, 291, 170, 511, 381]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene12+model.h5
6
RMSProp
0
0.002635
[softsign]
128
5
lab3000_n1e1p1b2+Gen0+gene13
[287]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene13+model.h5
1
Adam
0
0.442461
[hard_sigmoid, elu, tanh, hard_sigmoid, sigmoid]
8
19
lab3000_n1e1p1b2+Gen0+gene14
[444, 290, 174, 391, 327]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene14+model.h5
5
Adamax
0
0.344473
[softsign, relu, linear, hard_sigmoid, softmax...
32
6
lab3000_n1e1p1b2+Gen0+gene15
[343, 390, 472, 325, 386, 318, 162, 411, 357, ...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene15+model.h5
12
Adagrad
0
0.008660
[softsign, relu, relu, linear, elu, tanh, soft...
512
16
lab3000_n1e1p1b2+Gen0+gene16
[386, 378, 42, 114, 154, 287, 178, 101, 202, 2...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene16+model.h5
11
Adamax
0
0.318930
[elu, tanh, hard_sigmoid, sigmoid, elu, relu, ...
128
10
lab3000_n1e1p1b2+Gen0+gene17
[104, 482, 283, 188, 56, 473, 457, 90, 340, 28...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene17+model.h5
11
Nadam
0
0.004464
[softplus, softsign, softsign, sigmoid, sigmoi...
512
19
lab3000_n1e1p1b2+Gen0+gene18
[308, 456, 483, 16, 163, 106, 289, 242, 86, 20...
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene18+model.h5
11
sgd
0
0.170069
[softplus, softmax, linear, tanh, softmax, sof...
32
6
lab3000_n1e1p1b2+Gen0+gene19
[292, 138, 107, 35, 338, 367, 313, 308, 133, 36]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene19+model.h5
10
Nadam
0
0.012467
[hard_sigmoid, elu, elu, softmax, softplus, so...
64
6
lab3000_n1e1p1b2+Gen0+gene20
[234, 304, 99, 323, 474, 201]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene20+model.h5
6
Adam
0
0.009289
[linear, linear, linear, hard_sigmoid, softsig...
256
16
lab3000_n1e1p1b2+Gen0+gene21
[130, 333, 452, 435, 469, 237, 468, 437, 266]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene21+model.h5
9
Adadelta
0
0.297231
[linear, linear, linear, softmax, linear, soft...
128
4
lab3000_n1e1p1b2+Gen0+gene22
[244, 29, 155, 505, 28, 328, 75]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene22+model.h5
7
RMSProp
0
0.281495
[sigmoid, softplus, softplus]
8
8
lab3000_n1e1p1b2+Gen0+gene23
[245, 282, 171]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene23+model.h5
3
Adamax
0
0.354097
[softplus, hard_sigmoid, elu]
8
9
lab3000_n1e1p1b2+Gen0+gene24
[416, 89, 497]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene24+model.h5
3
Adamax
0
0.252698
[softsign, relu, softmax, softsign, elu]
8
9
lab3000_n1e1p1b2+Gen0+gene25
[96, 345, 345, 198, 276]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene25+model.h5
5
Adam
0
0.034179
[elu, elu, softplus, hard_sigmoid, softmax, li...
32
2
lab3000_n1e1p1b2+Gen0+gene26
[245, 331, 431, 115, 97, 168, 235, 255, 247, 429]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene26+model.h5
10
Adadelta
0
0.006189
[hard_sigmoid, softplus, softsign, relu, hard_...
32
2
lab3000_n1e1p1b2+Gen0+gene27
[182, 150, 56, 501, 278, 406]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene27+model.h5
6
Nadam
0
0.394899
[softplus, softmax, hard_sigmoid, softplus]
512
17
lab3000_n1e1p1b2+Gen0+gene28
[462, 81, 243, 499]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene28+model.h5
4
Adam
0
0.334299
[sigmoid, softmax, softmax, sigmoid, elu, elu]
32
8
lab3000_n1e1p1b2+Gen0+gene29
[206, 375, 283, 394, 415, 271]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene29+model.h5
6
sgd
In [5]:
n1e1p1b2_clade.seed_models()
In [6]:
n1e1p1b2_clade.grow_models()
this is the index: 0
and this is the gene: LR 0.0916252
activations [relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
batch_size 512
epochs 16
gene_name lab3000_n1e1p1b2+Gen0+gene0
layer_units [494, 283, 25, 33, 308, 95, 59, 186, 500]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene0+model.h5
nb_layers 9
optimizer RMSProp
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 10s - loss: 2.5475 - acc: 0.3111 - val_loss: 2.3936 - val_acc: 0.3537
Epoch 2/16
8083/8083 [==============================] - 9s - loss: 2.2894 - acc: 0.3334 - val_loss: 1.9464 - val_acc: 0.3860
Epoch 3/16
8083/8083 [==============================] - 10s - loss: 1.9488 - acc: 0.3658 - val_loss: 1.9705 - val_acc: 0.2481
Epoch 4/16
8083/8083 [==============================] - 10s - loss: 1.9537 - acc: 0.3303 - val_loss: 1.9452 - val_acc: 0.3982
Epoch 5/16
8083/8083 [==============================] - 9s - loss: 1.8822 - acc: 0.3713 - val_loss: 1.9150 - val_acc: 0.3826
Epoch 6/16
8083/8083 [==============================] - 8s - loss: 1.8749 - acc: 0.3853 - val_loss: 1.9093 - val_acc: 0.3993
Epoch 7/16
8083/8083 [==============================] - 9s - loss: 1.9220 - acc: 0.3789 - val_loss: 1.9146 - val_acc: 0.3904
Epoch 8/16
8083/8083 [==============================] - 9s - loss: 1.8592 - acc: 0.3827 - val_loss: 1.9124 - val_acc: 0.3882
Epoch 9/16
8083/8083 [==============================] - 8s - loss: 1.8836 - acc: 0.3726 - val_loss: 1.9088 - val_acc: 0.4016
Epoch 10/16
8083/8083 [==============================] - 9s - loss: 1.8559 - acc: 0.3733 - val_loss: 2.0361 - val_acc: 0.3960
Epoch 11/16
8083/8083 [==============================] - 8s - loss: 1.8568 - acc: 0.3718 - val_loss: 1.9191 - val_acc: 0.3938
Epoch 12/16
8083/8083 [==============================] - 8s - loss: 1.8074 - acc: 0.3793 - val_loss: 1.8555 - val_acc: 0.4004
Epoch 13/16
7680/8083 [===========================>..] - ETA: 0s - loss: 1.7713 - acc: 0.3997_______Stopping after 120 seconds.
8083/8083 [==============================] - 8s - loss: 1.7755 - acc: 0.4005 - val_loss: 1.8739 - val_acc: 0.4194
2176/2246 [============================>.] - ETA: 0sthis is the index: 1
and this is the gene: LR 0.00133161
activations [softmax, sigmoid, softsign, hard_sigmoid, elu...
batch_size 128
epochs 15
gene_name lab3000_n1e1p1b2+Gen0+gene1
layer_units [270, 367, 409, 280, 94, 345, 400, 452, 475, 1...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene1+model.h5
nb_layers 11
optimizer Nadam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/15
8083/8083 [==============================] - 14s - loss: 2.5108 - acc: 0.3392 - val_loss: 2.4327 - val_acc: 0.3537
Epoch 2/15
8083/8083 [==============================] - 14s - loss: 2.4170 - acc: 0.3515 - val_loss: 2.4158 - val_acc: 0.3537
Epoch 3/15
8083/8083 [==============================] - 13s - loss: 2.4162 - acc: 0.3515 - val_loss: 2.4765 - val_acc: 0.2191
Epoch 4/15
8083/8083 [==============================] - 13s - loss: 2.4196 - acc: 0.3471 - val_loss: 2.4515 - val_acc: 0.3537
Epoch 5/15
8083/8083 [==============================] - 13s - loss: 2.4129 - acc: 0.3515 - val_loss: 2.4316 - val_acc: 0.3537
Epoch 6/15
8083/8083 [==============================] - 13s - loss: 2.4133 - acc: 0.3515 - val_loss: 2.4133 - val_acc: 0.3537
Epoch 7/15
8083/8083 [==============================] - 13s - loss: 2.4107 - acc: 0.3515 - val_loss: 2.4256 - val_acc: 0.3537
Epoch 8/15
8083/8083 [==============================] - 13s - loss: 2.4127 - acc: 0.3515 - val_loss: 2.4196 - val_acc: 0.3537
Epoch 9/15
8064/8083 [============================>.] - ETA: 0s - loss: 2.4097 - acc: 0.3516_______Stopping after 120 seconds.
8083/8083 [==============================] - 14s - loss: 2.4111 - acc: 0.3515 - val_loss: 2.4209 - val_acc: 0.3537
2246/2246 [==============================] - 1s
in the else
this is the index: 2
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, softplus]
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen0+gene2
layer_units [14, 392, 25]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene2+model.h5
nb_layers 3
optimizer RMSProp
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 4s - loss: 1.9948 - acc: 0.4982 - val_loss: 1.6111 - val_acc: 0.5884
Epoch 2/19
8083/8083 [==============================] - 3s - loss: 1.4908 - acc: 0.6520 - val_loss: 1.4678 - val_acc: 0.6607
Epoch 3/19
8083/8083 [==============================] - 3s - loss: 1.2644 - acc: 0.7063 - val_loss: 1.2968 - val_acc: 0.6930
Epoch 4/19
8083/8083 [==============================] - 3s - loss: 1.0797 - acc: 0.7450 - val_loss: 1.2022 - val_acc: 0.7230
Epoch 5/19
8083/8083 [==============================] - 3s - loss: 0.9359 - acc: 0.7823 - val_loss: 1.1710 - val_acc: 0.7353
Epoch 6/19
8083/8083 [==============================] - 3s - loss: 0.8107 - acc: 0.8103 - val_loss: 1.1720 - val_acc: 0.7397
Epoch 7/19
8083/8083 [==============================] - 3s - loss: 0.7132 - acc: 0.8306 - val_loss: 1.1732 - val_acc: 0.7353
Epoch 8/19
8083/8083 [==============================] - 3s - loss: 0.6283 - acc: 0.8512 - val_loss: 1.1979 - val_acc: 0.7531
Epoch 9/19
8083/8083 [==============================] - 3s - loss: 0.5569 - acc: 0.8695 - val_loss: 1.2244 - val_acc: 0.7486
Epoch 10/19
8083/8083 [==============================] - 3s - loss: 0.5027 - acc: 0.8815 - val_loss: 1.2890 - val_acc: 0.7475
Epoch 11/19
8083/8083 [==============================] - 3s - loss: 0.4559 - acc: 0.8963 - val_loss: 1.3194 - val_acc: 0.7631
Epoch 12/19
8083/8083 [==============================] - 3s - loss: 0.4114 - acc: 0.9060 - val_loss: 1.3820 - val_acc: 0.7508
Epoch 13/19
8083/8083 [==============================] - 3s - loss: 0.3764 - acc: 0.9161 - val_loss: 1.3915 - val_acc: 0.7664
Epoch 14/19
8083/8083 [==============================] - 3s - loss: 0.3498 - acc: 0.9217 - val_loss: 1.4739 - val_acc: 0.7408
Epoch 15/19
8083/8083 [==============================] - 3s - loss: 0.3232 - acc: 0.9260 - val_loss: 1.4873 - val_acc: 0.7464
Epoch 16/19
8083/8083 [==============================] - 3s - loss: 0.3045 - acc: 0.9313 - val_loss: 1.5670 - val_acc: 0.7508
Epoch 17/19
8083/8083 [==============================] - 3s - loss: 0.2810 - acc: 0.9369 - val_loss: 1.5833 - val_acc: 0.7508
Epoch 18/19
8083/8083 [==============================] - 3s - loss: 0.2667 - acc: 0.9379 - val_loss: 1.5898 - val_acc: 0.7386
Epoch 19/19
8083/8083 [==============================] - 3s - loss: 0.2480 - acc: 0.9430 - val_loss: 1.6500 - val_acc: 0.7375
2016/2246 [=========================>....] - ETA: 0sin the else
this is the index: 3
and this is the gene: LR 0.00591316
activations [softplus, elu, elu, softmax, softsign, softma...
batch_size 512
epochs 12
gene_name lab3000_n1e1p1b2+Gen0+gene3
layer_units [85, 216, 95, 39, 92, 466, 435, 399, 124, 197,...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene3+model.h5
nb_layers 12
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/12
8083/8083 [==============================] - 6s - loss: 2.5795 - acc: 0.3090 - val_loss: 2.4266 - val_acc: 0.3537
Epoch 2/12
8083/8083 [==============================] - 4s - loss: 2.4143 - acc: 0.3515 - val_loss: 2.4178 - val_acc: 0.3537
Epoch 3/12
8083/8083 [==============================] - 3s - loss: 2.4100 - acc: 0.3515 - val_loss: 2.4095 - val_acc: 0.3537
Epoch 4/12
8083/8083 [==============================] - 3s - loss: 2.4057 - acc: 0.3515 - val_loss: 2.4102 - val_acc: 0.3537
Epoch 5/12
8083/8083 [==============================] - 3s - loss: 2.4038 - acc: 0.3515 - val_loss: 2.4017 - val_acc: 0.3537
Epoch 6/12
8083/8083 [==============================] - 3s - loss: 2.3642 - acc: 0.3515 - val_loss: 2.2810 - val_acc: 0.3537
Epoch 7/12
8083/8083 [==============================] - 3s - loss: 2.0805 - acc: 0.5080 - val_loss: 1.9416 - val_acc: 0.5462
Epoch 8/12
8083/8083 [==============================] - 5s - loss: 1.9240 - acc: 0.5442 - val_loss: 1.9550 - val_acc: 0.5428
Epoch 9/12
8083/8083 [==============================] - 4s - loss: 1.9026 - acc: 0.5461 - val_loss: 1.9584 - val_acc: 0.5384
Epoch 10/12
8083/8083 [==============================] - 4s - loss: 1.8936 - acc: 0.5479 - val_loss: 1.9396 - val_acc: 0.5428
Epoch 11/12
8083/8083 [==============================] - 5s - loss: 1.8878 - acc: 0.5486 - val_loss: 1.9388 - val_acc: 0.5428
Epoch 12/12
8083/8083 [==============================] - 5s - loss: 1.8863 - acc: 0.5493 - val_loss: 1.9366 - val_acc: 0.5417
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 4
and this is the gene: LR 0.483776
activations [softmax, elu, softmax, softsign, softmax, lin...
batch_size 128
epochs 2
gene_name lab3000_n1e1p1b2+Gen0+gene4
layer_units [511, 278, 417, 457, 37, 333, 331, 299, 16]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene4+model.h5
nb_layers 9
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/2
8083/8083 [==============================] - 17s - loss: 3.0447 - acc: 0.2143 - val_loss: 2.6331 - val_acc: 0.3537
Epoch 2/2
8083/8083 [==============================] - 17s - loss: 2.5148 - acc: 0.3515 - val_loss: 2.4518 - val_acc: 0.3537
2246/2246 [==============================] - 2s
in the else
this is the index: 5
and this is the gene: LR 0.053448
activations [tanh, hard_sigmoid, sigmoid, softsign, linear...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen0+gene5
layer_units [149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene5+model.h5
nb_layers 10
optimizer Adagrad
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 55s - loss: 3.1154 - acc: 0.3515 - val_loss: 2.8753 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 49s - loss: 2.7651 - acc: 0.3515 - val_loss: 2.6826 - val_acc: 0.3537
Epoch 3/5
8072/8083 [============================>.] - ETA: 0s - loss: 2.6279 - acc: 0.3512_______Stopping after 120 seconds.
8083/8083 [==============================] - 51s - loss: 2.6275 - acc: 0.3515 - val_loss: 2.5859 - val_acc: 0.3537
2112/2246 [===========================>..] - ETA: 0sin the else
this is the index: 6
and this is the gene: LR 0.00269799
activations [linear, softplus, relu]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen0+gene6
layer_units [139, 158, 491]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene6+model.h5
nb_layers 3
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 5s - loss: 2.3531 - acc: 0.4439 - val_loss: 1.8489 - val_acc: 0.5250
Epoch 2/9
8083/8083 [==============================] - 3s - loss: 1.5709 - acc: 0.6244 - val_loss: 1.4289 - val_acc: 0.6585
Epoch 3/9
8083/8083 [==============================] - 3s - loss: 1.1662 - acc: 0.7235 - val_loss: 1.1954 - val_acc: 0.7230
Epoch 4/9
8083/8083 [==============================] - 3s - loss: 0.8730 - acc: 0.8043 - val_loss: 1.0789 - val_acc: 0.7675
Epoch 5/9
8083/8083 [==============================] - 3s - loss: 0.6221 - acc: 0.8562 - val_loss: 1.0149 - val_acc: 0.7998
Epoch 6/9
8083/8083 [==============================] - 3s - loss: 0.4235 - acc: 0.9040 - val_loss: 1.0342 - val_acc: 0.7898
Epoch 7/9
8083/8083 [==============================] - 3s - loss: 0.2953 - acc: 0.9323 - val_loss: 1.0828 - val_acc: 0.8020
Epoch 8/9
8083/8083 [==============================] - 3s - loss: 0.2186 - acc: 0.9437 - val_loss: 1.1050 - val_acc: 0.8109
Epoch 9/9
8083/8083 [==============================] - 3s - loss: 0.1878 - acc: 0.9514 - val_loss: 1.1573 - val_acc: 0.7942
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 7
and this is the gene: LR 0.0264261
activations [softplus, softmax, tanh, softsign, hard_sigmo...
batch_size 16
epochs 15
gene_name lab3000_n1e1p1b2+Gen0+gene7
layer_units [487, 36, 144, 3, 250, 508, 244, 240, 490, 480...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene7+model.h5
nb_layers 12
optimizer sgd
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/15
8083/8083 [==============================] - 47s - loss: 2.4910 - acc: 0.3473 - val_loss: 2.4242 - val_acc: 0.3537
Epoch 2/15
8083/8083 [==============================] - 43s - loss: 2.4110 - acc: 0.3509 - val_loss: 2.4147 - val_acc: 0.3537
Epoch 3/15
8080/8083 [============================>.] - ETA: 0s - loss: 2.4101 - acc: 0.3501_______Stopping after 120 seconds.
8083/8083 [==============================] - 42s - loss: 2.4100 - acc: 0.3500 - val_loss: 2.4427 - val_acc: 0.2191
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 8
and this is the gene: LR 0.119733
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen0+gene8
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene8+model.h5
nb_layers 10
optimizer sgd
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 49s - loss: 2.7437 - acc: 0.3509 - val_loss: 2.4582 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 49s - loss: 2.4306 - acc: 0.3515 - val_loss: 2.4244 - val_acc: 0.3537
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 2.4127 - acc: 0.3515_______Stopping after 120 seconds.
8083/8083 [==============================] - 48s - loss: 2.4129 - acc: 0.3515 - val_loss: 2.4157 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 9
and this is the gene: LR 0.42979
activations [tanh, tanh, elu, relu, softsign, softmax, elu]
batch_size 16
epochs 2
gene_name lab3000_n1e1p1b2+Gen0+gene9
layer_units [306, 484, 292, 411, 183, 127, 402]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene9+model.h5
nb_layers 7
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/2
8083/8083 [==============================] - 61s - loss: 2.4803 - acc: 0.3509 - val_loss: 2.4206 - val_acc: 0.3537
Epoch 2/2
8083/8083 [==============================] - 57s - loss: 2.4157 - acc: 0.3515 - val_loss: 2.4106 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 10
and this is the gene: LR 0.129539
activations [sigmoid, softsign, hard_sigmoid, softplus, so...
batch_size 32
epochs 13
gene_name lab3000_n1e1p1b2+Gen0+gene10
layer_units [62, 123, 424, 41, 32, 147, 178, 412, 161]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene10+model.h5
nb_layers 9
optimizer Adagrad
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/13
8083/8083 [==============================] - 9s - loss: 1.9186 - acc: 0.5165 - val_loss: 1.6574 - val_acc: 0.5651
Epoch 2/13
8083/8083 [==============================] - 9s - loss: 1.6197 - acc: 0.5804 - val_loss: 1.6199 - val_acc: 0.5795
Epoch 3/13
8083/8083 [==============================] - 8s - loss: 1.5370 - acc: 0.5988 - val_loss: 1.5924 - val_acc: 0.5851
Epoch 4/13
8083/8083 [==============================] - 8s - loss: 1.4227 - acc: 0.6250 - val_loss: 1.5311 - val_acc: 0.6029
Epoch 5/13
8083/8083 [==============================] - 8s - loss: 1.3285 - acc: 0.6436 - val_loss: 1.5157 - val_acc: 0.6018
Epoch 6/13
8083/8083 [==============================] - 8s - loss: 1.2590 - acc: 0.6658 - val_loss: 1.5830 - val_acc: 0.6007
Epoch 7/13
8083/8083 [==============================] - 9s - loss: 1.1973 - acc: 0.6853 - val_loss: 1.6707 - val_acc: 0.5862
Epoch 8/13
8083/8083 [==============================] - 8s - loss: 1.1488 - acc: 0.6976 - val_loss: 1.5600 - val_acc: 0.6118
Epoch 9/13
8083/8083 [==============================] - 8s - loss: 1.1016 - acc: 0.7058 - val_loss: 1.5905 - val_acc: 0.6085
Epoch 10/13
8083/8083 [==============================] - 8s - loss: 1.0613 - acc: 0.7143 - val_loss: 1.6894 - val_acc: 0.6118
Epoch 11/13
8083/8083 [==============================] - 8s - loss: 1.0257 - acc: 0.7205 - val_loss: 1.5813 - val_acc: 0.6073
Epoch 12/13
8083/8083 [==============================] - 8s - loss: 0.9973 - acc: 0.7287 - val_loss: 1.6717 - val_acc: 0.6051
Epoch 13/13
8083/8083 [==============================] - 8s - loss: 0.9723 - acc: 0.7319 - val_loss: 1.6255 - val_acc: 0.6185
2112/2246 [===========================>..] - ETA: 0sin the else
this is the index: 11
and this is the gene: LR 0.0138704
activations [linear, hard_sigmoid]
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen0+gene11
layer_units [4, 204]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene11+model.h5
nb_layers 2
optimizer Nadam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 2s - loss: 2.2925 - acc: 0.4367 - val_loss: 1.7502 - val_acc: 0.5706
Epoch 2/16
8083/8083 [==============================] - 1s - loss: 1.6002 - acc: 0.6237 - val_loss: 1.5419 - val_acc: 0.6285
Epoch 3/16
8083/8083 [==============================] - 1s - loss: 1.3726 - acc: 0.6966 - val_loss: 1.3848 - val_acc: 0.6830
Epoch 4/16
8083/8083 [==============================] - 1s - loss: 1.1485 - acc: 0.7336 - val_loss: 1.2658 - val_acc: 0.6941
Epoch 5/16
8083/8083 [==============================] - 1s - loss: 0.9803 - acc: 0.7605 - val_loss: 1.2052 - val_acc: 0.7075
Epoch 6/16
8083/8083 [==============================] - 1s - loss: 0.8489 - acc: 0.7857 - val_loss: 1.1698 - val_acc: 0.7241
Epoch 7/16
8083/8083 [==============================] - 1s - loss: 0.7378 - acc: 0.8138 - val_loss: 1.1702 - val_acc: 0.7353
Epoch 8/16
8083/8083 [==============================] - 1s - loss: 0.6486 - acc: 0.8362 - val_loss: 1.1627 - val_acc: 0.7397
Epoch 9/16
8083/8083 [==============================] - 1s - loss: 0.5742 - acc: 0.8520 - val_loss: 1.2034 - val_acc: 0.7330
Epoch 10/16
8083/8083 [==============================] - 1s - loss: 0.5129 - acc: 0.8664 - val_loss: 1.2283 - val_acc: 0.7330
Epoch 11/16
8083/8083 [==============================] - 1s - loss: 0.4617 - acc: 0.8774 - val_loss: 1.2634 - val_acc: 0.7286
Epoch 12/16
8083/8083 [==============================] - 1s - loss: 0.4199 - acc: 0.8884 - val_loss: 1.3257 - val_acc: 0.7208
Epoch 13/16
8083/8083 [==============================] - 1s - loss: 0.3828 - acc: 0.8977 - val_loss: 1.3328 - val_acc: 0.7253
Epoch 14/16
8083/8083 [==============================] - 1s - loss: 0.3521 - acc: 0.9083 - val_loss: 1.3558 - val_acc: 0.7286
Epoch 15/16
8083/8083 [==============================] - 1s - loss: 0.3265 - acc: 0.9138 - val_loss: 1.4054 - val_acc: 0.7308
Epoch 16/16
8083/8083 [==============================] - 1s - loss: 0.3047 - acc: 0.9180 - val_loss: 1.4507 - val_acc: 0.7230
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 12
and this is the gene: LR 0.0266889
activations [sigmoid, elu, elu, softmax, softsign, sigmoid]
batch_size 64
epochs 5
gene_name lab3000_n1e1p1b2+Gen0+gene12
layer_units [130, 272, 291, 170, 511, 381]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene12+model.h5
nb_layers 6
optimizer RMSProp
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 10s - loss: 2.0162 - acc: 0.4612 - val_loss: 2.0235 - val_acc: 0.4383
Epoch 2/5
8083/8083 [==============================] - 9s - loss: 1.6416 - acc: 0.5735 - val_loss: 2.1175 - val_acc: 0.3782
Epoch 3/5
8083/8083 [==============================] - 9s - loss: 1.5256 - acc: 0.6117 - val_loss: 1.5800 - val_acc: 0.5996
Epoch 4/5
8083/8083 [==============================] - 9s - loss: 1.3671 - acc: 0.6531 - val_loss: 1.5344 - val_acc: 0.6307
Epoch 5/5
8083/8083 [==============================] - 9s - loss: 1.2201 - acc: 0.6833 - val_loss: 1.3904 - val_acc: 0.6574
2112/2246 [===========================>..] - ETA: 0sin the else
this is the index: 13
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen0+gene13
layer_units [287]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene13+model.h5
nb_layers 1
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 10s - loss: 1.5607 - acc: 0.6755 - val_loss: 0.9889 - val_acc: 0.7953
Epoch 2/5
8083/8083 [==============================] - 9s - loss: 0.5990 - acc: 0.8790 - val_loss: 0.8006 - val_acc: 0.8287
Epoch 3/5
8083/8083 [==============================] - 9s - loss: 0.3286 - acc: 0.9322 - val_loss: 0.7863 - val_acc: 0.8276
Epoch 4/5
8083/8083 [==============================] - 9s - loss: 0.2217 - acc: 0.9478 - val_loss: 0.7881 - val_acc: 0.8309
Epoch 5/5
8083/8083 [==============================] - 9s - loss: 0.1713 - acc: 0.9527 - val_loss: 0.8174 - val_acc: 0.8242
2144/2246 [===========================>..] - ETA: 0sin the else
this is the index: 14
and this is the gene: LR 0.442461
activations [hard_sigmoid, elu, tanh, hard_sigmoid, sigmoid]
batch_size 8
epochs 19
gene_name lab3000_n1e1p1b2+Gen0+gene14
layer_units [444, 290, 174, 391, 327]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene14+model.h5
nb_layers 5
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8080/8083 [============================>.] - ETA: 0s - loss: 1.8570 - acc: 0.5272_______Stopping after 120 seconds.
8083/8083 [==============================] - 124s - loss: 1.8566 - acc: 0.5274 - val_loss: 1.5781 - val_acc: 0.6007
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 15
and this is the gene: LR 0.344473
activations [softsign, relu, linear, hard_sigmoid, softmax...
batch_size 32
epochs 6
gene_name lab3000_n1e1p1b2+Gen0+gene15
layer_units [343, 390, 472, 325, 386, 318, 162, 411, 357, ...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene15+model.h5
nb_layers 12
optimizer Adagrad
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/6
8083/8083 [==============================] - 33s - loss: 2.4360 - acc: 0.3502 - val_loss: 2.4112 - val_acc: 0.3537
Epoch 2/6
8083/8083 [==============================] - 32s - loss: 2.4067 - acc: 0.3515 - val_loss: 2.4088 - val_acc: 0.3537
Epoch 3/6
8083/8083 [==============================] - 32s - loss: 2.4051 - acc: 0.3515 - val_loss: 2.4094 - val_acc: 0.3537
Epoch 4/6
8064/8083 [============================>.] - ETA: 0s - loss: 2.4051 - acc: 0.3516_______Stopping after 120 seconds.
8083/8083 [==============================] - 32s - loss: 2.4047 - acc: 0.3515 - val_loss: 2.4089 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 16
and this is the gene: LR 0.00865982
activations [softsign, relu, relu, linear, elu, tanh, soft...
batch_size 512
epochs 16
gene_name lab3000_n1e1p1b2+Gen0+gene16
layer_units [386, 378, 42, 114, 154, 287, 178, 101, 202, 2...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene16+model.h5
nb_layers 11
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 9s - loss: 3.7701 - acc: 0.2493 - val_loss: 3.7224 - val_acc: 0.3537
Epoch 2/16
8083/8083 [==============================] - 8s - loss: 3.6954 - acc: 0.3515 - val_loss: 3.6631 - val_acc: 0.3537
Epoch 3/16
8083/8083 [==============================] - 7s - loss: 3.6331 - acc: 0.3515 - val_loss: 3.6005 - val_acc: 0.3537
Epoch 4/16
8083/8083 [==============================] - 7s - loss: 3.5701 - acc: 0.3515 - val_loss: 3.5384 - val_acc: 0.3537
Epoch 5/16
8083/8083 [==============================] - 8s - loss: 3.5082 - acc: 0.3515 - val_loss: 3.4779 - val_acc: 0.3537
Epoch 6/16
8083/8083 [==============================] - 7s - loss: 3.4481 - acc: 0.3515 - val_loss: 3.4191 - val_acc: 0.3537
Epoch 7/16
8083/8083 [==============================] - 7s - loss: 3.3900 - acc: 0.3515 - val_loss: 3.3625 - val_acc: 0.3537
Epoch 8/16
8083/8083 [==============================] - 7s - loss: 3.3341 - acc: 0.3515 - val_loss: 3.3082 - val_acc: 0.3537
Epoch 9/16
8083/8083 [==============================] - 7s - loss: 3.2803 - acc: 0.3515 - val_loss: 3.2558 - val_acc: 0.3537
Epoch 10/16
8083/8083 [==============================] - 8s - loss: 3.2283 - acc: 0.3515 - val_loss: 3.2050 - val_acc: 0.3537
Epoch 11/16
8083/8083 [==============================] - 7s - loss: 3.1781 - acc: 0.3515 - val_loss: 3.1559 - val_acc: 0.3537
Epoch 12/16
8083/8083 [==============================] - 7s - loss: 3.1297 - acc: 0.3515 - val_loss: 3.1086 - val_acc: 0.3537
Epoch 13/16
8083/8083 [==============================] - 7s - loss: 3.0830 - acc: 0.3515 - val_loss: 3.0633 - val_acc: 0.3537
Epoch 14/16
8083/8083 [==============================] - 7s - loss: 3.0383 - acc: 0.3515 - val_loss: 3.0197 - val_acc: 0.3537
Epoch 15/16
8083/8083 [==============================] - 7s - loss: 2.9956 - acc: 0.3515 - val_loss: 2.9784 - val_acc: 0.3537
Epoch 16/16
7680/8083 [===========================>..] - ETA: 0s - loss: 2.9566 - acc: 0.3521_______Stopping after 120 seconds.
8083/8083 [==============================] - 8s - loss: 2.9552 - acc: 0.3515 - val_loss: 2.9390 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 17
and this is the gene: LR 0.31893
activations [elu, tanh, hard_sigmoid, sigmoid, elu, relu, ...
batch_size 128
epochs 10
gene_name lab3000_n1e1p1b2+Gen0+gene17
layer_units [104, 482, 283, 188, 56, 473, 457, 90, 340, 28...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene17+model.h5
nb_layers 11
optimizer Nadam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/10
8083/8083 [==============================] - 10s - loss: 2.1343 - acc: 0.3590 - val_loss: 2.0504 - val_acc: 0.2614
Epoch 2/10
8083/8083 [==============================] - 10s - loss: 1.9886 - acc: 0.3735 - val_loss: 1.9944 - val_acc: 0.3893
Epoch 3/10
8083/8083 [==============================] - 9s - loss: 2.0222 - acc: 0.3697 - val_loss: 2.0903 - val_acc: 0.3604
Epoch 4/10
8083/8083 [==============================] - 9s - loss: 2.0303 - acc: 0.3673 - val_loss: 2.0117 - val_acc: 0.3860
Epoch 5/10
8083/8083 [==============================] - 9s - loss: 1.9986 - acc: 0.3881 - val_loss: 1.9957 - val_acc: 0.3793
Epoch 6/10
8083/8083 [==============================] - 8s - loss: 1.9624 - acc: 0.3778 - val_loss: 2.0559 - val_acc: 0.3960
Epoch 7/10
8083/8083 [==============================] - 8s - loss: 1.9703 - acc: 0.3890 - val_loss: 1.9720 - val_acc: 0.4004
Epoch 8/10
8083/8083 [==============================] - 8s - loss: 2.0561 - acc: 0.3886 - val_loss: 2.1069 - val_acc: 0.3993
Epoch 9/10
8083/8083 [==============================] - 8s - loss: 2.3725 - acc: 0.3499 - val_loss: 2.4332 - val_acc: 0.3537
Epoch 10/10
8083/8083 [==============================] - 9s - loss: 2.4195 - acc: 0.3515 - val_loss: 2.4759 - val_acc: 0.3537
2144/2246 [===========================>..] - ETA: 0sin the else
this is the index: 18
and this is the gene: LR 0.00446407
activations [softplus, softsign, softsign, sigmoid, sigmoi...
batch_size 512
epochs 19
gene_name lab3000_n1e1p1b2+Gen0+gene18
layer_units [308, 456, 483, 16, 163, 106, 289, 242, 86, 20...
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene18+model.h5
nb_layers 11
optimizer sgd
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 9s - loss: 3.6735 - acc: 0.0795 - val_loss: 2.9374 - val_acc: 0.3537
Epoch 2/19
8083/8083 [==============================] - 5s - loss: 2.7372 - acc: 0.3515 - val_loss: 2.5701 - val_acc: 0.3537
Epoch 3/19
8083/8083 [==============================] - 5s - loss: 2.5680 - acc: 0.3515 - val_loss: 2.5111 - val_acc: 0.3537
Epoch 4/19
8083/8083 [==============================] - 6s - loss: 2.5241 - acc: 0.3515 - val_loss: 2.4837 - val_acc: 0.3537
Epoch 5/19
8083/8083 [==============================] - 6s - loss: 2.4975 - acc: 0.3515 - val_loss: 2.4651 - val_acc: 0.3537
Epoch 6/19
8083/8083 [==============================] - 5s - loss: 2.4780 - acc: 0.3515 - val_loss: 2.4515 - val_acc: 0.3537
Epoch 7/19
8083/8083 [==============================] - 6s - loss: 2.4636 - acc: 0.3515 - val_loss: 2.4414 - val_acc: 0.3537
Epoch 8/19
8083/8083 [==============================] - 5s - loss: 2.4523 - acc: 0.3515 - val_loss: 2.4335 - val_acc: 0.3537
Epoch 9/19
8083/8083 [==============================] - 5s - loss: 2.4432 - acc: 0.3515 - val_loss: 2.4276 - val_acc: 0.3537
Epoch 10/19
8083/8083 [==============================] - 5s - loss: 2.4362 - acc: 0.3515 - val_loss: 2.4227 - val_acc: 0.3537
Epoch 11/19
8083/8083 [==============================] - 5s - loss: 2.4306 - acc: 0.3515 - val_loss: 2.4189 - val_acc: 0.3537
Epoch 12/19
8083/8083 [==============================] - 5s - loss: 2.4259 - acc: 0.3515 - val_loss: 2.4161 - val_acc: 0.3537
Epoch 13/19
8083/8083 [==============================] - 5s - loss: 2.4223 - acc: 0.3515 - val_loss: 2.4138 - val_acc: 0.3537
Epoch 14/19
8083/8083 [==============================] - 5s - loss: 2.4193 - acc: 0.3515 - val_loss: 2.4121 - val_acc: 0.3537
Epoch 15/19
8083/8083 [==============================] - 5s - loss: 2.4172 - acc: 0.3515 - val_loss: 2.4108 - val_acc: 0.3537
Epoch 16/19
8083/8083 [==============================] - 6s - loss: 2.4153 - acc: 0.3515 - val_loss: 2.4099 - val_acc: 0.3537
Epoch 17/19
8083/8083 [==============================] - 5s - loss: 2.4138 - acc: 0.3515 - val_loss: 2.4096 - val_acc: 0.3537
Epoch 18/19
8083/8083 [==============================] - 5s - loss: 2.4127 - acc: 0.3515 - val_loss: 2.4087 - val_acc: 0.3537
Epoch 19/19
8083/8083 [==============================] - 5s - loss: 2.4116 - acc: 0.3515 - val_loss: 2.4081 - val_acc: 0.3537
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 19
and this is the gene: LR 0.170069
activations [softplus, softmax, linear, tanh, softmax, sof...
batch_size 32
epochs 6
gene_name lab3000_n1e1p1b2+Gen0+gene19
layer_units [292, 138, 107, 35, 338, 367, 313, 308, 133, 36]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene19+model.h5
nb_layers 10
optimizer Nadam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/6
8083/8083 [==============================] - 39s - loss: 2.3997 - acc: 0.3812 - val_loss: 2.4533 - val_acc: 0.3537
Epoch 2/6
8083/8083 [==============================] - 38s - loss: 2.1128 - acc: 0.4876 - val_loss: 2.0783 - val_acc: 0.4894
Epoch 3/6
8083/8083 [==============================] - 38s - loss: 2.0223 - acc: 0.5153 - val_loss: 2.0292 - val_acc: 0.5161
Epoch 4/6
8064/8083 [============================>.] - ETA: 0s - loss: 2.1469 - acc: 0.4914_______Stopping after 120 seconds.
8083/8083 [==============================] - 38s - loss: 2.1463 - acc: 0.4915 - val_loss: 2.0709 - val_acc: 0.5206
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 20
and this is the gene: LR 0.0124672
activations [hard_sigmoid, elu, elu, softmax, softplus, so...
batch_size 64
epochs 6
gene_name lab3000_n1e1p1b2+Gen0+gene20
layer_units [234, 304, 99, 323, 474, 201]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene20+model.h5
nb_layers 6
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/6
8083/8083 [==============================] - 16s - loss: 2.3685 - acc: 0.4036 - val_loss: 2.0161 - val_acc: 0.5373
Epoch 2/6
8083/8083 [==============================] - 15s - loss: 1.9674 - acc: 0.5385 - val_loss: 1.9655 - val_acc: 0.5473
Epoch 3/6
8083/8083 [==============================] - 16s - loss: 1.9094 - acc: 0.5466 - val_loss: 1.9421 - val_acc: 0.5462
Epoch 4/6
8083/8083 [==============================] - 18s - loss: 1.8409 - acc: 0.5539 - val_loss: 1.8164 - val_acc: 0.5462
Epoch 5/6
8083/8083 [==============================] - 15s - loss: 1.6340 - acc: 0.5930 - val_loss: 1.7669 - val_acc: 0.5651
Epoch 6/6
8083/8083 [==============================] - 14s - loss: 1.4827 - acc: 0.6287 - val_loss: 1.6429 - val_acc: 0.6073
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 21
and this is the gene: LR 0.00928905
activations [linear, linear, linear, hard_sigmoid, softsig...
batch_size 256
epochs 16
gene_name lab3000_n1e1p1b2+Gen0+gene21
layer_units [130, 333, 452, 435, 469, 237, 468, 437, 266]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene21+model.h5
nb_layers 9
optimizer Adadelta
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 9s - loss: 2.6358 - acc: 0.3136 - val_loss: 2.4286 - val_acc: 0.3537
Epoch 2/16
8083/8083 [==============================] - 8s - loss: 2.4167 - acc: 0.3460 - val_loss: 2.4227 - val_acc: 0.3537
Epoch 3/16
8083/8083 [==============================] - 8s - loss: 2.4095 - acc: 0.3516 - val_loss: 2.4158 - val_acc: 0.3537
Epoch 4/16
8083/8083 [==============================] - 7s - loss: 2.4102 - acc: 0.3453 - val_loss: 2.4159 - val_acc: 0.3537
Epoch 5/16
8083/8083 [==============================] - 7s - loss: 2.3252 - acc: 0.3767 - val_loss: 2.0783 - val_acc: 0.4917
Epoch 6/16
8083/8083 [==============================] - 7s - loss: 1.9918 - acc: 0.5012 - val_loss: 2.0823 - val_acc: 0.4650
Epoch 7/16
8083/8083 [==============================] - 7s - loss: 1.9362 - acc: 0.5103 - val_loss: 1.7691 - val_acc: 0.5462
Epoch 8/16
8083/8083 [==============================] - 7s - loss: 1.8613 - acc: 0.5211 - val_loss: 1.7102 - val_acc: 0.5684
Epoch 9/16
8083/8083 [==============================] - 8s - loss: 1.6975 - acc: 0.5640 - val_loss: 2.0270 - val_acc: 0.4772
Epoch 10/16
8083/8083 [==============================] - 7s - loss: 1.6776 - acc: 0.5670 - val_loss: 1.7424 - val_acc: 0.5673
Epoch 11/16
8083/8083 [==============================] - 7s - loss: 1.6513 - acc: 0.5710 - val_loss: 1.7061 - val_acc: 0.5684
Epoch 12/16
8083/8083 [==============================] - 7s - loss: 1.6249 - acc: 0.5827 - val_loss: 1.6543 - val_acc: 0.5740
Epoch 13/16
8083/8083 [==============================] - 7s - loss: 1.5854 - acc: 0.5899 - val_loss: 1.6496 - val_acc: 0.5640
Epoch 14/16
8083/8083 [==============================] - 7s - loss: 1.5783 - acc: 0.5951 - val_loss: 1.6329 - val_acc: 0.5795
Epoch 15/16
8083/8083 [==============================] - 7s - loss: 1.5285 - acc: 0.6094 - val_loss: 1.6685 - val_acc: 0.5717
Epoch 16/16
7936/8083 [============================>.] - ETA: 0s - loss: 1.5528 - acc: 0.5988_______Stopping after 120 seconds.
8083/8083 [==============================] - 7s - loss: 1.5447 - acc: 0.6008 - val_loss: 1.5918 - val_acc: 0.5973
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 22
and this is the gene: LR 0.297231
activations [linear, linear, linear, softmax, linear, soft...
batch_size 128
epochs 4
gene_name lab3000_n1e1p1b2+Gen0+gene22
layer_units [244, 29, 155, 505, 28, 328, 75]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene22+model.h5
nb_layers 7
optimizer RMSProp
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/4
8083/8083 [==============================] - 11s - loss: 2.4221 - acc: 0.3650 - val_loss: 1.8015 - val_acc: 0.5651
Epoch 2/4
8083/8083 [==============================] - 8s - loss: 1.6904 - acc: 0.5841 - val_loss: 1.6896 - val_acc: 0.5729
Epoch 3/4
8083/8083 [==============================] - 8s - loss: 1.5416 - acc: 0.6209 - val_loss: 1.5838 - val_acc: 0.6140
Epoch 4/4
8083/8083 [==============================] - 7s - loss: 1.4220 - acc: 0.6442 - val_loss: 1.5171 - val_acc: 0.6151
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 23
and this is the gene: LR 0.281495
activations [sigmoid, softplus, softplus]
batch_size 8
epochs 8
gene_name lab3000_n1e1p1b2+Gen0+gene23
layer_units [245, 282, 171]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene23+model.h5
nb_layers 3
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/8
8083/8083 [==============================] - 65s - loss: 1.6452 - acc: 0.6107 - val_loss: 1.3107 - val_acc: 0.6952
Epoch 2/8
8080/8083 [============================>.] - ETA: 0s - loss: 1.1295 - acc: 0.7329_______Stopping after 120 seconds.
8083/8083 [==============================] - 62s - loss: 1.1297 - acc: 0.7329 - val_loss: 1.1768 - val_acc: 0.7141
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 24
and this is the gene: LR 0.354097
activations [softplus, hard_sigmoid, elu]
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen0+gene24
layer_units [416, 89, 497]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene24+model.h5
nb_layers 3
optimizer Adamax
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 98s - loss: 1.4872 - acc: 0.6463 - val_loss: 1.1333 - val_acc: 0.7442
Epoch 2/9
8080/8083 [============================>.] - ETA: 0s - loss: 0.9198 - acc: 0.7814_______Stopping after 120 seconds.
8083/8083 [==============================] - 96s - loss: 0.9196 - acc: 0.7814 - val_loss: 0.9701 - val_acc: 0.7642
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 25
and this is the gene: LR 0.252698
activations [softsign, relu, softmax, softsign, elu]
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen0+gene25
layer_units [96, 345, 345, 198, 276]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene25+model.h5
nb_layers 5
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 46s - loss: 1.6483 - acc: 0.5765 - val_loss: 1.3847 - val_acc: 0.6507
Epoch 2/9
8083/8083 [==============================] - 44s - loss: 1.1133 - acc: 0.7214 - val_loss: 1.1191 - val_acc: 0.7319
Epoch 3/9
8080/8083 [============================>.] - ETA: 0s - loss: 0.8192 - acc: 0.7965_______Stopping after 120 seconds.
8083/8083 [==============================] - 50s - loss: 0.8203 - acc: 0.7964 - val_loss: 1.0447 - val_acc: 0.7575
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 26
and this is the gene: LR 0.0341792
activations [elu, elu, softplus, hard_sigmoid, softmax, li...
batch_size 32
epochs 2
gene_name lab3000_n1e1p1b2+Gen0+gene26
layer_units [245, 331, 431, 115, 97, 168, 235, 255, 247, 429]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene26+model.h5
nb_layers 10
optimizer Adadelta
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/2
8083/8083 [==============================] - 44s - loss: 2.4550 - acc: 0.3443 - val_loss: 2.4259 - val_acc: 0.3537
Epoch 2/2
8083/8083 [==============================] - 40s - loss: 2.4187 - acc: 0.3496 - val_loss: 2.4224 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 27
and this is the gene: LR 0.0061892
activations [hard_sigmoid, softplus, softsign, relu, hard_...
batch_size 32
epochs 2
gene_name lab3000_n1e1p1b2+Gen0+gene27
layer_units [182, 150, 56, 501, 278, 406]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene27+model.h5
nb_layers 6
optimizer Nadam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/2
8083/8083 [==============================] - 29s - loss: 1.9906 - acc: 0.4935 - val_loss: 1.7335 - val_acc: 0.5784
Epoch 2/2
8083/8083 [==============================] - 25s - loss: 1.5154 - acc: 0.6045 - val_loss: 1.3800 - val_acc: 0.6429
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 28
and this is the gene: LR 0.394899
activations [softplus, softmax, hard_sigmoid, softplus]
batch_size 512
epochs 17
gene_name lab3000_n1e1p1b2+Gen0+gene28
layer_units [462, 81, 243, 499]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene28+model.h5
nb_layers 4
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/17
8083/8083 [==============================] - 15s - loss: 2.6706 - acc: 0.3202 - val_loss: 2.4899 - val_acc: 0.3537
Epoch 2/17
8083/8083 [==============================] - 10s - loss: 2.4274 - acc: 0.3515 - val_loss: 2.4046 - val_acc: 0.3537
Epoch 3/17
8083/8083 [==============================] - 11s - loss: 2.3718 - acc: 0.3515 - val_loss: 2.3463 - val_acc: 0.3537
Epoch 4/17
8083/8083 [==============================] - 13s - loss: 2.3219 - acc: 0.3772 - val_loss: 2.2863 - val_acc: 0.3537
Epoch 5/17
8083/8083 [==============================] - 13s - loss: 2.2350 - acc: 0.4440 - val_loss: 2.2014 - val_acc: 0.3537
Epoch 6/17
8083/8083 [==============================] - 11s - loss: 2.1312 - acc: 0.5253 - val_loss: 2.0915 - val_acc: 0.5417
Epoch 7/17
8083/8083 [==============================] - 10s - loss: 2.0159 - acc: 0.5533 - val_loss: 1.9858 - val_acc: 0.5451
Epoch 8/17
8083/8083 [==============================] - 11s - loss: 1.9355 - acc: 0.5550 - val_loss: 1.9594 - val_acc: 0.5451
Epoch 9/17
8083/8083 [==============================] - 11s - loss: 1.8941 - acc: 0.5562 - val_loss: 1.9336 - val_acc: 0.5451
Epoch 10/17
7680/8083 [===========================>..] - ETA: 0s - loss: 1.8801 - acc: 0.5577_______Stopping after 120 seconds.
8083/8083 [==============================] - 10s - loss: 1.8805 - acc: 0.5564 - val_loss: 1.9515 - val_acc: 0.5406
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 29
and this is the gene: LR 0.334299
activations [sigmoid, softmax, softmax, sigmoid, elu, elu]
batch_size 32
epochs 8
gene_name lab3000_n1e1p1b2+Gen0+gene29
layer_units [206, 375, 283, 394, 415, 271]
loss categorical_crossentropy
model_name lab3000_n1e1p1b2+Gen0+gene29+model.h5
nb_layers 6
optimizer sgd
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/8
8083/8083 [==============================] - 21s - loss: 2.4583 - acc: 0.3434 - val_loss: 2.4188 - val_acc: 0.3537
Epoch 2/8
8083/8083 [==============================] - 15s - loss: 2.4273 - acc: 0.3387 - val_loss: 2.4137 - val_acc: 0.3537
Epoch 3/8
8083/8083 [==============================] - 18s - loss: 2.4222 - acc: 0.3437 - val_loss: 2.4279 - val_acc: 0.3537
Epoch 4/8
8083/8083 [==============================] - 17s - loss: 2.4201 - acc: 0.3450 - val_loss: 2.4142 - val_acc: 0.3537
Epoch 5/8
8083/8083 [==============================] - 16s - loss: 2.4211 - acc: 0.3444 - val_loss: 2.4239 - val_acc: 0.3537
Epoch 6/8
8083/8083 [==============================] - 17s - loss: 2.4232 - acc: 0.3463 - val_loss: 2.4401 - val_acc: 0.3537
Epoch 7/8
8064/8083 [============================>.] - ETA: 0s - loss: 2.4179 - acc: 0.3454_______Stopping after 120 seconds.
8083/8083 [==============================] - 17s - loss: 2.4180 - acc: 0.3455 - val_loss: 2.4190 - val_acc: 0.3537
2176/2246 [============================>.] - ETA: 0sin the else
^^^verbose output of n1e1p1b1_clade.grow_models()
In [7]:
n1e1p1b2_clade.phenotypes
Out[7]:
gene_name
misclassed
test_accuracy
test_loss
time
train_accuracy
train_loss
0
lab3000_n1e1p1b2+Gen0+gene0
{'true_class': [10, 1, 4, 4, 5, 4, 1, 11, 23, ...
0.421193
1.921113
122.650144
0.449462
1.708227
0
lab3000_n1e1p1b2+Gen0+gene1
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.427144
126.230092
0.351478
2.412989
0
lab3000_n1e1p1b2+Gen0+gene2
{'true_class': [10, 4, 4, 3, 5, 23, 8, 20, 1, ...
0.722173
1.754980
64.744479
0.954349
0.213664
0
lab3000_n1e1p1b2+Gen0+gene3
{'true_class': [10, 1, 5, 1, 1, 11, 23, 19, 8,...
0.545414
1.920586
54.632695
0.551033
1.875691
0
lab3000_n1e1p1b2+Gen0+gene4
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.453830
35.217261
0.351478
2.447145
0
lab3000_n1e1p1b2+Gen0+gene5
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.589548
156.549994
0.351478
2.583639
0
lab3000_n1e1p1b2+Gen0+gene6
{'true_class': [4, 5, 23, 20, 1, 40, 15, 1, 21...
0.780944
1.239399
31.594541
0.962266
0.149377
0
lab3000_n1e1p1b2+Gen0+gene7
{'true_class': [3, 10, 1, 3, 3, 3, 3, 3, 5, 1,...
0.211042
2.452458
133.506259
0.216751
2.435554
0
lab3000_n1e1p1b2+Gen0+gene8
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.422574
147.820026
0.351478
2.408828
0
lab3000_n1e1p1b2+Gen0+gene9
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.420465
118.986796
0.351478
2.406220
0
lab3000_n1e1p1b2+Gen0+gene10
{'true_class': [10, 4, 3, 5, 23, 8, 9, 6, 10, ...
0.626002
1.637140
113.158686
0.745268
0.925260
0
lab3000_n1e1p1b2+Gen0+gene11
{'true_class': [10, 4, 4, 5, 23, 8, 6, 20, 1, ...
0.723063
1.521470
26.861886
0.932327
0.261401
0
lab3000_n1e1p1b2+Gen0+gene12
{'true_class': [10, 1, 4, 5, 1, 1, 23, 8, 9, 6...
0.638914
1.407381
50.740100
0.695163
1.101717
0
lab3000_n1e1p1b2+Gen0+gene13
{'true_class': [4, 4, 5, 23, 20, 1, 40, 15, 1,...
0.803651
0.883051
47.459044
0.965607
0.120601
0
lab3000_n1e1p1b2+Gen0+gene14
{'true_class': [10, 1, 4, 5, 1, 1, 11, 23, 8, ...
0.589938
1.617656
124.633040
0.604602
1.546149
0
lab3000_n1e1p1b2+Gen0+gene15
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.414036
130.925534
0.351478
2.401978
0
lab3000_n1e1p1b2+Gen0+gene16
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.941463
127.376373
0.351478
2.933976
0
lab3000_n1e1p1b2+Gen0+gene17
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.495039
93.818096
0.351478
2.475288
0
lab3000_n1e1p1b2+Gen0+gene18
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.422885
115.749587
0.351478
2.410771
0
lab3000_n1e1p1b2+Gen0+gene19
{'true_class': [10, 1, 3, 5, 1, 1, 11, 23, 19,...
0.525378
2.063859
154.142115
0.530249
2.038407
0
lab3000_n1e1p1b2+Gen0+gene20
{'true_class': [10, 1, 4, 5, 1, 1, 11, 23, 8, ...
0.605076
1.672505
97.117181
0.646790
1.401081
0
lab3000_n1e1p1b2+Gen0+gene21
{'true_class': [10, 1, 4, 5, 1, 1, 11, 23, 8, ...
0.596171
1.630955
123.401309
0.628727
1.460353
0
lab3000_n1e1p1b2+Gen0+gene22
{'true_class': [10, 1, 4, 5, 1, 1, 11, 23, 8, ...
0.618433
1.540090
35.467959
0.650377
1.354569
0
lab3000_n1e1p1b2+Gen0+gene23
{'true_class': [4, 5, 23, 8, 9, 6, 10, 20, 1, ...
0.712378
1.204043
127.953699
0.777063
0.941307
0
lab3000_n1e1p1b2+Gen0+gene24
{'true_class': [4, 4, 5, 23, 10, 20, 1, 40, 15...
0.752449
1.030251
194.952801
0.827910
0.682031
0
lab3000_n1e1p1b2+Gen0+gene25
{'true_class': [4, 4, 5, 23, 8, 3, 6, 10, 20, ...
0.743544
1.087753
141.736290
0.838550
0.620869
0
lab3000_n1e1p1b2+Gen0+gene26
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.425202
85.081392
0.351478
2.409010
0
lab3000_n1e1p1b2+Gen0+gene27
{'true_class': [10, 1, 4, 5, 11, 23, 8, 3, 9, ...
0.643366
1.408406
55.569928
0.672523
1.237195
0
lab3000_n1e1p1b2+Gen0+gene28
{'true_class': [10, 1, 5, 1, 1, 11, 23, 19, 8,...
0.540071
1.946962
121.436508
0.556105
1.867724
0
lab3000_n1e1p1b2+Gen0+gene29
{'true_class': [10, 1, 4, 4, 5, 4, 1, 1, 11, 2...
0.361977
2.418423
124.918322
0.351478
2.407060
In [8]:
n1e1p1b2_clade.select_parents()
In [9]:
n1e1p1b2_clade.parent_genes
Out[9]:
LR
activations
batch_size
epochs
gene_name
layer_units
loss
model_name
nb_layers
optimizer
0
0.053918
[softplus, hard_sigmoid, softplus]
32
19
lab3000_n1e1p1b2+Gen0+gene2
[14, 392, 25]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene2+model.h5
3
RMSProp
0
0.002698
[linear, softplus, relu]
512
9
lab3000_n1e1p1b2+Gen0+gene6
[139, 158, 491]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene6+model.h5
3
Adam
0
0.013870
[linear, hard_sigmoid]
128
16
lab3000_n1e1p1b2+Gen0+gene11
[4, 204]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene11+model.h5
2
Nadam
0
0.002635
[softsign]
128
5
lab3000_n1e1p1b2+Gen0+gene13
[287]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene13+model.h5
1
Adam
0
0.354097
[softplus, hard_sigmoid, elu]
8
9
lab3000_n1e1p1b2+Gen0+gene24
[416, 89, 497]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene24+model.h5
3
Adamax
0
0.252698
[softsign, relu, softmax, softsign, elu]
8
9
lab3000_n1e1p1b2+Gen0+gene25
[96, 345, 345, 198, 276]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene25+model.h5
5
Adam
0
0.091625
[relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
512
16
lab3000_n1e1p1b2+Gen0+gene0
[494, 283, 25, 33, 308, 95, 59, 186, 500]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene0+model.h5
9
RMSProp
0
0.053448
[tanh, hard_sigmoid, sigmoid, softsign, linear...
8
5
lab3000_n1e1p1b2+Gen0+gene5
[149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene5+model.h5
10
Adagrad
0
0.119733
[tanh, softsign, softsign, softmax, sigmoid, r...
8
5
lab3000_n1e1p1b2+Gen0+gene8
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
categorical_crossentropy
lab3000_n1e1p1b2+Gen0+gene8+model.h5
10
sgd
In [10]:
n1e1p1b2_clade.breed()
In [12]:
n1e1p1b2_clade.current_generation
Out[12]:
1
In [13]:
n1e1p1b2_clade.genotypes
Out[13]:
LR
activations
batch_size
epochs
gene_name
layer_units
model_name
nb_layers
optimizer
0
0.091625
[relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
512
16
lab3000_n1e1p1b2+Gen1+gene0
[96, 345, 345, 198, 276, 2, 2, 2, 2]
lab3000_n1e1p1b2+Gen1+gene0+model.h5
9
Adam
1
0.252698
[softsign, softmax, elu]
512
9
lab3000_n1e1p1b2+Gen1+gene1
[139, 158, 491]
lab3000_n1e1p1b2+Gen1+gene1+model.h5
3
Adam
2
0.091625
[softplus, hard_sigmoid, elu]
8
9
lab3000_n1e1p1b2+Gen1+gene2
[416, 89, 497]
lab3000_n1e1p1b2+Gen1+gene2+model.h5
3
Adamax
3
0.252698
[softsign, relu, softmax, softsign, elu, elu, ...
8
5
lab3000_n1e1p1b2+Gen1+gene3
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene3+model.h5
10
sgd
4
0.053918
[softplus, hard_sigmoid, softplus]
32
19
lab3000_n1e1p1b2+Gen1+gene4
[14, 392, 25]
lab3000_n1e1p1b2+Gen1+gene4+model.h5
3
Nadam
5
0.119733
[softsign, relu, softmax, softsign, elu, relu,...
8
5
lab3000_n1e1p1b2+Gen1+gene5
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene5+model.h5
10
sgd
6
0.002635
[tanh, hard_sigmoid, sigmoid, softsign, linear...
128
5
lab3000_n1e1p1b2+Gen1+gene6
[149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
lab3000_n1e1p1b2+Gen1+gene6+model.h5
10
Adagrad
7
0.354097
[softplus, hard_sigmoid, elu, elu, elu, elu, e...
8
9
lab3000_n1e1p1b2+Gen1+gene7
[416, 89, 497, 2, 2, 2, 2, 2, 2, 2]
lab3000_n1e1p1b2+Gen1+gene7+model.h5
10
Adagrad
8
0.002635
[softsign]
8
5
lab3000_n1e1p1b2+Gen1+gene8
[384]
lab3000_n1e1p1b2+Gen1+gene8+model.h5
1
Adam
9
0.053918
[relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
32
16
lab3000_n1e1p1b2+Gen1+gene9
[14, 392, 25, 2, 2, 2, 2, 2, 2]
lab3000_n1e1p1b2+Gen1+gene9+model.h5
9
RMSProp
10
0.013870
[softplus, hard_sigmoid, elu]
128
16
lab3000_n1e1p1b2+Gen1+gene10
[416, 89, 497]
lab3000_n1e1p1b2+Gen1+gene10+model.h5
3
Adamax
11
0.002698
[softplus, hard_sigmoid, elu]
512
9
lab3000_n1e1p1b2+Gen1+gene11
[416, 89, 497]
lab3000_n1e1p1b2+Gen1+gene11+model.h5
3
sgd
12
0.013870
[linear, hard_sigmoid, hard_sigmoid, hard_sigm...
128
16
lab3000_n1e1p1b2+Gen1+gene12
[149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
lab3000_n1e1p1b2+Gen1+gene12+model.h5
10
Adagrad
13
0.053448
[softplus, hard_sigmoid, softplus, softplus, s...
256
5
lab3000_n1e1p1b2+Gen1+gene13
[14, 392, 25, 2, 2, 2, 2, 2, 2, 2]
lab3000_n1e1p1b2+Gen1+gene13+model.h5
10
Adagrad
14
0.252698
[sigmoid, sigmoid, hard_sigmoid, relu, softplus]
512
16
lab3000_n1e1p1b2+Gen1+gene14
[494, 283, 95, 59, 186]
lab3000_n1e1p1b2+Gen1+gene14+model.h5
5
RMSProp
15
0.002635
[softsign]
8
5
lab3000_n1e1p1b2+Gen1+gene15
[99]
lab3000_n1e1p1b2+Gen1+gene15+model.h5
1
Adam
16
0.252698
[tanh, softsign, softsign, softmax, sigmoid, r...
8
5
lab3000_n1e1p1b2+Gen1+gene16
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene16+model.h5
10
sgd
17
0.252698
[softmax, softsign, elu]
512
9
lab3000_n1e1p1b2+Gen1+gene17
[139, 158, 491]
lab3000_n1e1p1b2+Gen1+gene17+model.h5
3
Adam
18
0.011777
[tanh, softsign, softsign, softmax, sigmoid, r...
512
5
lab3000_n1e1p1b2+Gen1+gene18
[252, 481, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene18+model.h5
9
RMSProp
19
0.002635
[softsign, softsign, softsign]
32
5
lab3000_n1e1p1b2+Gen1+gene19
[287, 2, 2]
lab3000_n1e1p1b2+Gen1+gene19+model.h5
3
RMSProp
20
0.013870
[softplus, hard_sigmoid, elu]
128
16
lab3000_n1e1p1b2+Gen1+gene20
[416, 89, 497]
lab3000_n1e1p1b2+Gen1+gene20+model.h5
3
Adamax
21
0.013870
[linear, hard_sigmoid, hard_sigmoid]
128
16
lab3000_n1e1p1b2+Gen1+gene21
[14, 392, 25]
lab3000_n1e1p1b2+Gen1+gene21+model.h5
3
Nadam
22
0.252698
[tanh, softsign, softsign, softmax, sigmoid, r...
8
9
lab3000_n1e1p1b2+Gen1+gene22
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene22+model.h5
10
sgd
23
0.002682
[tanh, hard_sigmoid, sigmoid, softsign, linear...
8
5
lab3000_n1e1p1b2+Gen1+gene23
[252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene23+model.h5
10
sgd
24
0.252698
[linear, softplus, relu, relu, softplus]
512
9
lab3000_n1e1p1b2+Gen1+gene24
[96, 345, 345, 198, 276]
lab3000_n1e1p1b2+Gen1+gene24+model.h5
5
Adam
25
0.013870
[linear, hard_sigmoid]
8
5
lab3000_n1e1p1b2+Gen1+gene25
[4, 204]
lab3000_n1e1p1b2+Gen1+gene25+model.h5
2
Nadam
26
0.002635
[softsign, relu, softmax, softsign, elu]
8
5
lab3000_n1e1p1b2+Gen1+gene26
[96, 345, 345, 198, 276]
lab3000_n1e1p1b2+Gen1+gene26+model.h5
5
Adam
27
0.053918
[softplus, hard_sigmoid, softplus]
32
19
lab3000_n1e1p1b2+Gen1+gene27
[345, 345, 276]
lab3000_n1e1p1b2+Gen1+gene27+model.h5
3
RMSProp
28
0.091625
[relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
8
4
lab3000_n1e1p1b2+Gen1+gene28
[252, 481, 165, 512, 85, 25, 415, 351, 123]
lab3000_n1e1p1b2+Gen1+gene28+model.h5
9
sgd
29
0.002635
[softplus]
128
5
lab3000_n1e1p1b2+Gen1+gene29
[287]
lab3000_n1e1p1b2+Gen1+gene29+model.h5
1
Adam
In [14]:
n1e1p1b2_clade.seed_models()
In [15]:
n1e1p1b2_clade.grow_models()
this is the index: 0
and this is the gene: LR 0.0916252
activations [relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
batch_size 512
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene0
layer_units [96, 345, 345, 198, 276, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen1+gene0+model.h5
nb_layers 9
optimizer Adam
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 9s - loss: 3.7528 - acc: 0.0049 - val_loss: 3.7210 - val_acc: 0.0089
Epoch 2/16
8083/8083 [==============================] - 6s - loss: 3.6889 - acc: 0.0049 - val_loss: 3.6560 - val_acc: 0.0089
Epoch 3/16
8083/8083 [==============================] - 4s - loss: 3.6211 - acc: 0.0049 - val_loss: 3.5864 - val_acc: 0.0089
Epoch 4/16
8083/8083 [==============================] - 3s - loss: 3.5485 - acc: 0.0238 - val_loss: 3.5107 - val_acc: 0.3537
Epoch 5/16
8083/8083 [==============================] - 3s - loss: 3.4687 - acc: 0.3515 - val_loss: 3.4281 - val_acc: 0.3537
Epoch 6/16
8083/8083 [==============================] - 3s - loss: 3.3817 - acc: 0.3515 - val_loss: 3.3379 - val_acc: 0.3537
Epoch 7/16
8083/8083 [==============================] - 3s - loss: 3.2870 - acc: 0.3515 - val_loss: 3.2401 - val_acc: 0.3537
Epoch 8/16
8083/8083 [==============================] - 3s - loss: 3.1853 - acc: 0.3515 - val_loss: 3.1350 - val_acc: 0.3537
Epoch 9/16
8083/8083 [==============================] - 3s - loss: 3.0766 - acc: 0.3515 - val_loss: 3.0254 - val_acc: 0.3537
Epoch 10/16
8083/8083 [==============================] - 3s - loss: 2.9649 - acc: 0.3515 - val_loss: 2.9130 - val_acc: 0.3537
Epoch 11/16
8083/8083 [==============================] - 3s - loss: 2.8530 - acc: 0.3515 - val_loss: 2.8026 - val_acc: 0.3537
Epoch 12/16
8083/8083 [==============================] - 3s - loss: 2.7466 - acc: 0.3515 - val_loss: 2.7012 - val_acc: 0.3537
Epoch 13/16
8083/8083 [==============================] - 3s - loss: 2.6528 - acc: 0.3515 - val_loss: 2.6158 - val_acc: 0.3537
Epoch 14/16
8083/8083 [==============================] - 3s - loss: 2.5781 - acc: 0.3515 - val_loss: 2.5513 - val_acc: 0.3537
Epoch 15/16
8083/8083 [==============================] - 3s - loss: 2.5241 - acc: 0.3515 - val_loss: 2.5071 - val_acc: 0.3537
Epoch 16/16
8083/8083 [==============================] - 3s - loss: 2.4887 - acc: 0.3515 - val_loss: 2.4784 - val_acc: 0.3537
2112/2246 [===========================>..] - ETA: 0sthis is the index: 1
and this is the gene: LR 0.252698
activations [softsign, softmax, elu]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene1
layer_units [139, 158, 491]
model_name lab3000_n1e1p1b2+Gen1+gene1+model.h5
nb_layers 3
optimizer Adam
Name: 1, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 6s - loss: 3.6146 - acc: 0.3272 - val_loss: 3.2724 - val_acc: 0.3537
Epoch 2/9
8083/8083 [==============================] - 3s - loss: 2.8706 - acc: 0.3515 - val_loss: 2.4954 - val_acc: 0.3537
Epoch 3/9
8083/8083 [==============================] - 3s - loss: 2.3513 - acc: 0.3541 - val_loss: 2.2272 - val_acc: 0.4327
Epoch 4/9
8083/8083 [==============================] - 3s - loss: 2.0822 - acc: 0.5113 - val_loss: 1.9510 - val_acc: 0.5261
Epoch 5/9
8083/8083 [==============================] - 3s - loss: 1.8026 - acc: 0.5465 - val_loss: 1.6838 - val_acc: 0.5873
Epoch 6/9
8083/8083 [==============================] - 3s - loss: 1.5483 - acc: 0.6048 - val_loss: 1.4949 - val_acc: 0.6196
Epoch 7/9
8083/8083 [==============================] - 3s - loss: 1.3403 - acc: 0.6402 - val_loss: 1.3610 - val_acc: 0.6352
Epoch 8/9
8083/8083 [==============================] - 3s - loss: 1.1772 - acc: 0.6736 - val_loss: 1.2860 - val_acc: 0.6785
Epoch 9/9
8083/8083 [==============================] - 3s - loss: 1.0768 - acc: 0.7064 - val_loss: 1.2596 - val_acc: 0.6763
2112/2246 [===========================>..] - ETA: 0sin the else
this is the index: 2
and this is the gene: LR 0.0916252
activations [softplus, hard_sigmoid, elu]
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene2
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen1+gene2+model.h5
nb_layers 3
optimizer Adamax
Name: 2, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 89s - loss: 1.4737 - acc: 0.6454 - val_loss: 1.1576 - val_acc: 0.7208
Epoch 2/9
8080/8083 [============================>.] - ETA: 0s - loss: 0.9238 - acc: 0.7792_______Stopping after 120 seconds.
8083/8083 [==============================] - 87s - loss: 0.9236 - acc: 0.7793 - val_loss: 1.0282 - val_acc: 0.7608
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 3
and this is the gene: LR 0.252698
activations [softsign, relu, softmax, softsign, elu, elu, ...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene3
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene3+model.h5
nb_layers 10
optimizer sgd
Name: 3, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 49s - loss: 2.5628 - acc: 0.3502 - val_loss: 2.4197 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 46s - loss: 2.4137 - acc: 0.3500 - val_loss: 2.4117 - val_acc: 0.3537
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 2.4101 - acc: 0.3511_______Stopping after 120 seconds.
8083/8083 [==============================] - 44s - loss: 2.4108 - acc: 0.3510 - val_loss: 2.4213 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 4
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, softplus]
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen1+gene4
layer_units [14, 392, 25]
model_name lab3000_n1e1p1b2+Gen1+gene4+model.h5
nb_layers 3
optimizer Nadam
Name: 4, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 6s - loss: 1.8898 - acc: 0.5295 - val_loss: 1.4947 - val_acc: 0.6485
Epoch 2/19
8083/8083 [==============================] - 4s - loss: 1.2199 - acc: 0.7112 - val_loss: 1.2765 - val_acc: 0.6897
Epoch 3/19
8083/8083 [==============================] - 4s - loss: 0.8783 - acc: 0.7882 - val_loss: 1.2140 - val_acc: 0.7419
Epoch 4/19
8083/8083 [==============================] - 4s - loss: 0.6271 - acc: 0.8497 - val_loss: 1.2153 - val_acc: 0.7442
Epoch 5/19
8083/8083 [==============================] - 4s - loss: 0.4564 - acc: 0.8898 - val_loss: 1.3228 - val_acc: 0.7442
Epoch 6/19
8083/8083 [==============================] - 4s - loss: 0.3504 - acc: 0.9156 - val_loss: 1.3067 - val_acc: 0.7508
Epoch 7/19
8083/8083 [==============================] - 4s - loss: 0.2708 - acc: 0.9329 - val_loss: 1.3883 - val_acc: 0.7419
Epoch 8/19
8083/8083 [==============================] - 3s - loss: 0.2236 - acc: 0.9452 - val_loss: 1.4379 - val_acc: 0.7475
Epoch 9/19
8083/8083 [==============================] - 3s - loss: 0.1913 - acc: 0.9521 - val_loss: 1.3994 - val_acc: 0.7519
Epoch 10/19
8083/8083 [==============================] - 3s - loss: 0.1685 - acc: 0.9535 - val_loss: 1.4604 - val_acc: 0.7275
Epoch 11/19
8083/8083 [==============================] - 3s - loss: 0.1571 - acc: 0.9563 - val_loss: 1.5303 - val_acc: 0.7375
Epoch 12/19
8083/8083 [==============================] - 3s - loss: 0.1446 - acc: 0.9574 - val_loss: 1.4873 - val_acc: 0.7453
Epoch 13/19
8083/8083 [==============================] - 3s - loss: 0.1412 - acc: 0.9550 - val_loss: 1.5374 - val_acc: 0.7408
Epoch 14/19
8083/8083 [==============================] - 3s - loss: 0.1294 - acc: 0.9581 - val_loss: 1.5575 - val_acc: 0.7419
Epoch 15/19
8083/8083 [==============================] - 3s - loss: 0.1244 - acc: 0.9600 - val_loss: 1.5377 - val_acc: 0.7341
Epoch 16/19
8083/8083 [==============================] - 3s - loss: 0.1203 - acc: 0.9581 - val_loss: 1.5436 - val_acc: 0.7286
Epoch 17/19
8083/8083 [==============================] - 3s - loss: 0.1174 - acc: 0.9600 - val_loss: 1.6000 - val_acc: 0.7341
Epoch 18/19
8083/8083 [==============================] - 3s - loss: 0.1189 - acc: 0.9548 - val_loss: 1.5404 - val_acc: 0.7497
Epoch 19/19
8083/8083 [==============================] - 3s - loss: 0.1095 - acc: 0.9583 - val_loss: 1.6486 - val_acc: 0.7341
2080/2246 [==========================>...] - ETA: 0sin the else
this is the index: 5
and this is the gene: LR 0.119733
activations [softsign, relu, softmax, softsign, elu, relu,...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene5
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene5+model.h5
nb_layers 10
optimizer sgd
Name: 5, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 47s - loss: 2.5914 - acc: 0.3507 - val_loss: 2.4286 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 46s - loss: 2.4157 - acc: 0.3515 - val_loss: 2.4155 - val_acc: 0.3537
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 2.4093 - acc: 0.3514_______Stopping after 120 seconds.
8083/8083 [==============================] - 1600s - loss: 2.4093 - acc: 0.3515 - val_loss: 2.4105 - val_acc: 0.3537
2246/2246 [==============================] - 4s
in the else
this is the index: 6
and this is the gene: LR 0.00263484
activations [tanh, hard_sigmoid, sigmoid, softsign, linear...
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene6
layer_units [149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
model_name lab3000_n1e1p1b2+Gen1+gene6+model.h5
nb_layers 10
optimizer Adagrad
Name: 6, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 12s - loss: 3.5224 - acc: 0.3194 - val_loss: 3.4021 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 6s - loss: 3.3310 - acc: 0.3515 - val_loss: 3.2742 - val_acc: 0.3537
Epoch 3/5
8083/8083 [==============================] - 6s - loss: 3.2232 - acc: 0.3515 - val_loss: 3.1838 - val_acc: 0.3537
Epoch 4/5
8083/8083 [==============================] - 6s - loss: 3.1425 - acc: 0.3515 - val_loss: 3.1130 - val_acc: 0.3537
Epoch 5/5
8083/8083 [==============================] - 6s - loss: 3.0777 - acc: 0.3515 - val_loss: 3.0547 - val_acc: 0.3537
2144/2246 [===========================>..] - ETA: 0sin the else
this is the index: 7
and this is the gene: LR 0.354097
activations [softplus, hard_sigmoid, elu, elu, elu, elu, e...
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene7
layer_units [416, 89, 497, 2, 2, 2, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen1+gene7+model.h5
nb_layers 10
optimizer Adagrad
Name: 7, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 96s - loss: 2.9682 - acc: 0.3146 - val_loss: 2.6668 - val_acc: 0.3537
Epoch 2/9
8080/8083 [============================>.] - ETA: 0s - loss: 2.5680 - acc: 0.3516_______Stopping after 120 seconds.
8083/8083 [==============================] - 96s - loss: 2.5686 - acc: 0.3515 - val_loss: 2.5140 - val_acc: 0.3537
2246/2246 [==============================] - 3s
in the else
this is the index: 8
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene8
layer_units [384]
model_name lab3000_n1e1p1b2+Gen1+gene8+model.h5
nb_layers 1
optimizer Adam
Name: 8, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 113s - loss: 0.9869 - acc: 0.7803 - val_loss: 0.7745 - val_acc: 0.8387
Epoch 2/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.3193 - acc: 0.9262_______Stopping after 120 seconds.
8083/8083 [==============================] - 111s - loss: 0.3192 - acc: 0.9263 - val_loss: 0.8530 - val_acc: 0.8154
2246/2246 [==============================] - 3s
in the else
this is the index: 9
and this is the gene: LR 0.0539181
activations [relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
batch_size 32
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene9
layer_units [14, 392, 25, 2, 2, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen1+gene9+model.h5
nb_layers 9
optimizer RMSProp
Name: 9, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 8s - loss: 3.3849 - acc: 0.1947 - val_loss: 3.0737 - val_acc: 0.2191
Epoch 2/16
8083/8083 [==============================] - 4s - loss: 2.8178 - acc: 0.2168 - val_loss: 2.6092 - val_acc: 0.2191
Epoch 3/16
8083/8083 [==============================] - 5s - loss: 2.4975 - acc: 0.3011 - val_loss: 2.4325 - val_acc: 0.3537
Epoch 4/16
8083/8083 [==============================] - 5s - loss: 2.4142 - acc: 0.3515 - val_loss: 2.4128 - val_acc: 0.3537
Epoch 5/16
8083/8083 [==============================] - 4s - loss: 2.4053 - acc: 0.3515 - val_loss: 2.4107 - val_acc: 0.3537
Epoch 6/16
8083/8083 [==============================] - 4s - loss: 2.4036 - acc: 0.3515 - val_loss: 2.4103 - val_acc: 0.3537
Epoch 7/16
8083/8083 [==============================] - 4s - loss: 2.4030 - acc: 0.3515 - val_loss: 2.4101 - val_acc: 0.3537
Epoch 8/16
8083/8083 [==============================] - 4s - loss: 2.4031 - acc: 0.3515 - val_loss: 2.4096 - val_acc: 0.3537
Epoch 9/16
8083/8083 [==============================] - 4s - loss: 2.4029 - acc: 0.3515 - val_loss: 2.4095 - val_acc: 0.3537
Epoch 10/16
8083/8083 [==============================] - 4s - loss: 2.4032 - acc: 0.3515 - val_loss: 2.4099 - val_acc: 0.3537
Epoch 11/16
8083/8083 [==============================] - 4s - loss: 2.4036 - acc: 0.3515 - val_loss: 2.4102 - val_acc: 0.3537
Epoch 12/16
8083/8083 [==============================] - 4s - loss: 2.4036 - acc: 0.3515 - val_loss: 2.4102 - val_acc: 0.3537
Epoch 13/16
8083/8083 [==============================] - 4s - loss: 2.4036 - acc: 0.3515 - val_loss: 2.4107 - val_acc: 0.3537
Epoch 14/16
8083/8083 [==============================] - 4s - loss: 2.4039 - acc: 0.3515 - val_loss: 2.4109 - val_acc: 0.3537
Epoch 15/16
8083/8083 [==============================] - 4s - loss: 2.4040 - acc: 0.3515 - val_loss: 2.4110 - val_acc: 0.3537
Epoch 16/16
8083/8083 [==============================] - 4s - loss: 2.4041 - acc: 0.3515 - val_loss: 2.4106 - val_acc: 0.3537
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 10
and this is the gene: LR 0.0138704
activations [softplus, hard_sigmoid, elu]
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene10
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen1+gene10+model.h5
nb_layers 3
optimizer Adamax
Name: 10, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 15s - loss: 1.9282 - acc: 0.5253 - val_loss: 1.4679 - val_acc: 0.6574
Epoch 2/16
8083/8083 [==============================] - 12s - loss: 1.2221 - acc: 0.7153 - val_loss: 1.1870 - val_acc: 0.7175
Epoch 3/16
8083/8083 [==============================] - 12s - loss: 0.9486 - acc: 0.7778 - val_loss: 1.0596 - val_acc: 0.7497
Epoch 4/16
8083/8083 [==============================] - 12s - loss: 0.7557 - acc: 0.8221 - val_loss: 0.9617 - val_acc: 0.7831
Epoch 5/16
8083/8083 [==============================] - 12s - loss: 0.5966 - acc: 0.8571 - val_loss: 0.9196 - val_acc: 0.7931
Epoch 6/16
8083/8083 [==============================] - 12s - loss: 0.4782 - acc: 0.8863 - val_loss: 0.9049 - val_acc: 0.7976
Epoch 7/16
8083/8083 [==============================] - 11s - loss: 0.3752 - acc: 0.9140 - val_loss: 0.9020 - val_acc: 0.8087
Epoch 8/16
8083/8083 [==============================] - 12s - loss: 0.3033 - acc: 0.9273 - val_loss: 0.8974 - val_acc: 0.8131
Epoch 9/16
8083/8083 [==============================] - 13s - loss: 0.2448 - acc: 0.9385 - val_loss: 0.9385 - val_acc: 0.8176
Epoch 10/16
8064/8083 [============================>.] - ETA: 0s - loss: 0.2071 - acc: 0.9435_______Stopping after 120 seconds.
8083/8083 [==============================] - 13s - loss: 0.2069 - acc: 0.9435 - val_loss: 0.9586 - val_acc: 0.8098
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 11
and this is the gene: LR 0.00269799
activations [softplus, hard_sigmoid, elu]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene11
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen1+gene11+model.h5
nb_layers 3
optimizer sgd
Name: 11, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 10s - loss: 3.2532 - acc: 0.2468 - val_loss: 2.6642 - val_acc: 0.3537
Epoch 2/9
8083/8083 [==============================] - 6s - loss: 2.5804 - acc: 0.3515 - val_loss: 2.5275 - val_acc: 0.3537
Epoch 3/9
8083/8083 [==============================] - 8s - loss: 2.5007 - acc: 0.3515 - val_loss: 2.4839 - val_acc: 0.3537
Epoch 4/9
8083/8083 [==============================] - 7s - loss: 2.4644 - acc: 0.3515 - val_loss: 2.4591 - val_acc: 0.3537
Epoch 5/9
8083/8083 [==============================] - 6s - loss: 2.4434 - acc: 0.3515 - val_loss: 2.4446 - val_acc: 0.3537
Epoch 6/9
8083/8083 [==============================] - 6s - loss: 2.4302 - acc: 0.3515 - val_loss: 2.4350 - val_acc: 0.3537
Epoch 7/9
8083/8083 [==============================] - 6s - loss: 2.4215 - acc: 0.3515 - val_loss: 2.4278 - val_acc: 0.3537
Epoch 8/9
8083/8083 [==============================] - 7s - loss: 2.4148 - acc: 0.3515 - val_loss: 2.4222 - val_acc: 0.3537
Epoch 9/9
8083/8083 [==============================] - 6s - loss: 2.4101 - acc: 0.3515 - val_loss: 2.4175 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 12
and this is the gene: LR 0.0138704
activations [linear, hard_sigmoid, hard_sigmoid, hard_sigm...
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene12
layer_units [149, 159, 2, 125, 155, 351, 99, 384, 351, 263]
model_name lab3000_n1e1p1b2+Gen1+gene12+model.h5
nb_layers 10
optimizer Adagrad
Name: 12, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 10s - loss: 2.5197 - acc: 0.3384 - val_loss: 2.4272 - val_acc: 0.3537
Epoch 2/16
8083/8083 [==============================] - 7s - loss: 2.4162 - acc: 0.3515 - val_loss: 2.4199 - val_acc: 0.3537
Epoch 3/16
8083/8083 [==============================] - 7s - loss: 2.4122 - acc: 0.3515 - val_loss: 2.4161 - val_acc: 0.3537
Epoch 4/16
8083/8083 [==============================] - 7s - loss: 2.4102 - acc: 0.3515 - val_loss: 2.4370 - val_acc: 0.2191
Epoch 5/16
8083/8083 [==============================] - 7s - loss: 2.4104 - acc: 0.3479 - val_loss: 2.4130 - val_acc: 0.3537
Epoch 6/16
8083/8083 [==============================] - 7s - loss: 2.4099 - acc: 0.3515 - val_loss: 2.4221 - val_acc: 0.3537
Epoch 7/16
8083/8083 [==============================] - 7s - loss: 2.4091 - acc: 0.3515 - val_loss: 2.4181 - val_acc: 0.3537
Epoch 8/16
8083/8083 [==============================] - 7s - loss: 2.4080 - acc: 0.3515 - val_loss: 2.4119 - val_acc: 0.3537
Epoch 9/16
8083/8083 [==============================] - 6s - loss: 2.4083 - acc: 0.3515 - val_loss: 2.4150 - val_acc: 0.3537
Epoch 10/16
8083/8083 [==============================] - 6s - loss: 2.4076 - acc: 0.3515 - val_loss: 2.4175 - val_acc: 0.3537
Epoch 11/16
8083/8083 [==============================] - 6s - loss: 2.4078 - acc: 0.3515 - val_loss: 2.4135 - val_acc: 0.3537
Epoch 12/16
8083/8083 [==============================] - 7s - loss: 2.4072 - acc: 0.3515 - val_loss: 2.4132 - val_acc: 0.3537
Epoch 13/16
8083/8083 [==============================] - 7s - loss: 2.4076 - acc: 0.3515 - val_loss: 2.4137 - val_acc: 0.3537
Epoch 14/16
8083/8083 [==============================] - 7s - loss: 2.4068 - acc: 0.3515 - val_loss: 2.4177 - val_acc: 0.3537
Epoch 15/16
8083/8083 [==============================] - 6s - loss: 2.4062 - acc: 0.3515 - val_loss: 2.4098 - val_acc: 0.3537
Epoch 16/16
8083/8083 [==============================] - 7s - loss: 2.4058 - acc: 0.3515 - val_loss: 2.4251 - val_acc: 0.3537
2144/2246 [===========================>..] - ETA: 0sin the else
this is the index: 13
and this is the gene: LR 0.053448
activations [softplus, hard_sigmoid, softplus, softplus, s...
batch_size 256
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene13
layer_units [14, 392, 25, 2, 2, 2, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen1+gene13+model.h5
nb_layers 10
optimizer Adagrad
Name: 13, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 5s - loss: 3.6704 - acc: 0.1852 - val_loss: 3.5216 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 1s - loss: 3.4045 - acc: 0.3515 - val_loss: 3.3020 - val_acc: 0.3537
Epoch 3/5
8083/8083 [==============================] - 1s - loss: 3.1994 - acc: 0.3515 - val_loss: 3.1163 - val_acc: 0.3537
Epoch 4/5
8083/8083 [==============================] - 1s - loss: 3.0252 - acc: 0.3515 - val_loss: 2.9585 - val_acc: 0.3537
Epoch 5/5
8083/8083 [==============================] - 1s - loss: 2.8798 - acc: 0.3515 - val_loss: 2.8276 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 14
and this is the gene: LR 0.252698
activations [sigmoid, sigmoid, hard_sigmoid, relu, softplus]
batch_size 512
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene14
layer_units [494, 283, 95, 59, 186]
model_name lab3000_n1e1p1b2+Gen1+gene14+model.h5
nb_layers 5
optimizer RMSProp
Name: 14, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 11s - loss: 2.6461 - acc: 0.3066 - val_loss: 2.3032 - val_acc: 0.3582
Epoch 2/16
8083/8083 [==============================] - 8s - loss: 2.1568 - acc: 0.4057 - val_loss: 1.9804 - val_acc: 0.4661
Epoch 3/16
8083/8083 [==============================] - 8s - loss: 1.8666 - acc: 0.5023 - val_loss: 1.7534 - val_acc: 0.5517
Epoch 4/16
8083/8083 [==============================] - 8s - loss: 1.6650 - acc: 0.5740 - val_loss: 1.6367 - val_acc: 0.5840
Epoch 5/16
8083/8083 [==============================] - 8s - loss: 1.5782 - acc: 0.5935 - val_loss: 1.5911 - val_acc: 0.6007
Epoch 6/16
8083/8083 [==============================] - 8s - loss: 1.5297 - acc: 0.6139 - val_loss: 1.5809 - val_acc: 0.6140
Epoch 7/16
8083/8083 [==============================] - 8s - loss: 1.4687 - acc: 0.6323 - val_loss: 1.5270 - val_acc: 0.6151
Epoch 8/16
8083/8083 [==============================] - 8s - loss: 1.3799 - acc: 0.6637 - val_loss: 1.4549 - val_acc: 0.6496
Epoch 9/16
8083/8083 [==============================] - 8s - loss: 1.3070 - acc: 0.6825 - val_loss: 1.4666 - val_acc: 0.6307
Epoch 10/16
8083/8083 [==============================] - 8s - loss: 1.1955 - acc: 0.7143 - val_loss: 1.4007 - val_acc: 0.6596
Epoch 11/16
8083/8083 [==============================] - 8s - loss: 1.1808 - acc: 0.7148 - val_loss: 1.4119 - val_acc: 0.6641
Epoch 12/16
8083/8083 [==============================] - 8s - loss: 1.1220 - acc: 0.7298 - val_loss: 1.4723 - val_acc: 0.6318
Epoch 13/16
8083/8083 [==============================] - 8s - loss: 1.0746 - acc: 0.7375 - val_loss: 1.5463 - val_acc: 0.6263
Epoch 14/16
8083/8083 [==============================] - 7s - loss: 1.0333 - acc: 0.7516 - val_loss: 1.5009 - val_acc: 0.6541
Epoch 15/16
7680/8083 [===========================>..] - ETA: 0s - loss: 1.0129 - acc: 0.7527_______Stopping after 120 seconds.
8083/8083 [==============================] - 8s - loss: 1.0170 - acc: 0.7515 - val_loss: 1.3646 - val_acc: 0.6696
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 15
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene15
layer_units [99]
model_name lab3000_n1e1p1b2+Gen1+gene15+model.h5
nb_layers 1
optimizer Adam
Name: 15, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 42s - loss: 1.1151 - acc: 0.7601 - val_loss: 0.7862 - val_acc: 0.8287
Epoch 2/5
8083/8083 [==============================] - 38s - loss: 0.3820 - acc: 0.9176 - val_loss: 0.7615 - val_acc: 0.8309
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.2220 - acc: 0.9469_______Stopping after 120 seconds.
8083/8083 [==============================] - 40s - loss: 0.2219 - acc: 0.9469 - val_loss: 0.8293 - val_acc: 0.8220
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 16
and this is the gene: LR 0.252698
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene16
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene16+model.h5
nb_layers 10
optimizer sgd
Name: 16, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 57s - loss: 2.7507 - acc: 0.3507 - val_loss: 2.4646 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 54s - loss: 2.4330 - acc: 0.3515 - val_loss: 2.4282 - val_acc: 0.3537
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 2.4153 - acc: 0.3514_______Stopping after 120 seconds.
8083/8083 [==============================] - 55s - loss: 2.4148 - acc: 0.3515 - val_loss: 2.4182 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 17
and this is the gene: LR 0.252698
activations [softmax, softsign, elu]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene17
layer_units [139, 158, 491]
model_name lab3000_n1e1p1b2+Gen1+gene17+model.h5
nb_layers 3
optimizer Adam
Name: 17, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 7s - loss: 3.3771 - acc: 0.3254 - val_loss: 2.5791 - val_acc: 0.3537
Epoch 2/9
8083/8083 [==============================] - 3s - loss: 2.4549 - acc: 0.3736 - val_loss: 2.3635 - val_acc: 0.3971
Epoch 3/9
8083/8083 [==============================] - 3s - loss: 2.2525 - acc: 0.3715 - val_loss: 2.1104 - val_acc: 0.3993
Epoch 4/9
8083/8083 [==============================] - 3s - loss: 1.9574 - acc: 0.4784 - val_loss: 1.8062 - val_acc: 0.5306
Epoch 5/9
8083/8083 [==============================] - 3s - loss: 1.6566 - acc: 0.5931 - val_loss: 1.5930 - val_acc: 0.6107
Epoch 6/9
8083/8083 [==============================] - 3s - loss: 1.4599 - acc: 0.6482 - val_loss: 1.4734 - val_acc: 0.6418
Epoch 7/9
8083/8083 [==============================] - 4s - loss: 1.3115 - acc: 0.6629 - val_loss: 1.3863 - val_acc: 0.6585
Epoch 8/9
8083/8083 [==============================] - 4s - loss: 1.1816 - acc: 0.6922 - val_loss: 1.3264 - val_acc: 0.6652
Epoch 9/9
8083/8083 [==============================] - 4s - loss: 1.0571 - acc: 0.7117 - val_loss: 1.2812 - val_acc: 0.6785
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 18
and this is the gene: LR 0.0117768
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 512
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene18
layer_units [252, 481, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene18+model.h5
nb_layers 9
optimizer RMSProp
Name: 18, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 10s - loss: 2.6437 - acc: 0.2895 - val_loss: 2.3710 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 7s - loss: 2.3448 - acc: 0.3255 - val_loss: 2.2221 - val_acc: 0.2158
Epoch 3/5
8083/8083 [==============================] - 6s - loss: 2.1937 - acc: 0.3468 - val_loss: 2.0198 - val_acc: 0.3960
Epoch 4/5
8083/8083 [==============================] - 6s - loss: 1.9428 - acc: 0.3880 - val_loss: 2.5812 - val_acc: 0.3537
Epoch 5/5
8083/8083 [==============================] - 6s - loss: 1.9398 - acc: 0.3771 - val_loss: 1.9179 - val_acc: 0.3971
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 19
and this is the gene: LR 0.00263484
activations [softsign, softsign, softsign]
batch_size 32
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene19
layer_units [287, 2, 2]
model_name lab3000_n1e1p1b2+Gen1+gene19+model.h5
nb_layers 3
optimizer RMSProp
Name: 19, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 26s - loss: 3.5529 - acc: 0.4965 - val_loss: 3.3051 - val_acc: 0.5428
Epoch 2/5
8083/8083 [==============================] - 23s - loss: 3.0525 - acc: 0.5440 - val_loss: 2.8270 - val_acc: 0.5439
Epoch 3/5
8083/8083 [==============================] - 22s - loss: 2.5954 - acc: 0.5478 - val_loss: 2.4214 - val_acc: 0.5428
Epoch 4/5
8083/8083 [==============================] - 23s - loss: 2.2302 - acc: 0.5512 - val_loss: 2.1333 - val_acc: 0.5439
Epoch 5/5
8064/8083 [============================>.] - ETA: 0s - loss: 1.9813 - acc: 0.5526_______Stopping after 120 seconds.
8083/8083 [==============================] - 23s - loss: 1.9810 - acc: 0.5528 - val_loss: 1.9501 - val_acc: 0.5439
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 20
and this is the gene: LR 0.0138704
activations [softplus, hard_sigmoid, elu]
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene20
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen1+gene20+model.h5
nb_layers 3
optimizer Adamax
Name: 20, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 15s - loss: 1.9916 - acc: 0.4891 - val_loss: 1.5464 - val_acc: 0.6051
Epoch 2/16
8083/8083 [==============================] - 11s - loss: 1.2699 - acc: 0.7023 - val_loss: 1.1978 - val_acc: 0.7086
Epoch 3/16
8083/8083 [==============================] - 12s - loss: 0.9546 - acc: 0.7808 - val_loss: 1.0391 - val_acc: 0.7642
Epoch 4/16
8083/8083 [==============================] - 11s - loss: 0.7447 - acc: 0.8273 - val_loss: 0.9653 - val_acc: 0.7853
Epoch 5/16
8083/8083 [==============================] - 11s - loss: 0.5888 - acc: 0.8588 - val_loss: 0.9137 - val_acc: 0.7942
Epoch 6/16
8083/8083 [==============================] - 11s - loss: 0.4665 - acc: 0.8909 - val_loss: 0.8967 - val_acc: 0.8020
Epoch 7/16
8083/8083 [==============================] - 12s - loss: 0.3738 - acc: 0.9138 - val_loss: 0.8877 - val_acc: 0.8065
Epoch 8/16
8083/8083 [==============================] - 12s - loss: 0.3027 - acc: 0.9268 - val_loss: 0.9134 - val_acc: 0.8031
Epoch 9/16
8083/8083 [==============================] - 12s - loss: 0.2426 - acc: 0.9390 - val_loss: 0.9213 - val_acc: 0.8065
Epoch 10/16
8064/8083 [============================>.] - ETA: 0s - loss: 0.2082 - acc: 0.9456_______Stopping after 120 seconds.
8083/8083 [==============================] - 12s - loss: 0.2079 - acc: 0.9457 - val_loss: 0.9374 - val_acc: 0.8087
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 21
and this is the gene: LR 0.0138704
activations [linear, hard_sigmoid, hard_sigmoid]
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen1+gene21
layer_units [14, 392, 25]
model_name lab3000_n1e1p1b2+Gen1+gene21+model.h5
nb_layers 3
optimizer Nadam
Name: 21, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 5s - loss: 2.7984 - acc: 0.3471 - val_loss: 2.3574 - val_acc: 0.5039
Epoch 2/16
8083/8083 [==============================] - 2s - loss: 2.2183 - acc: 0.5363 - val_loss: 2.1512 - val_acc: 0.5417
Epoch 3/16
8083/8083 [==============================] - 2s - loss: 2.0453 - acc: 0.5491 - val_loss: 1.9764 - val_acc: 0.5417
Epoch 4/16
8083/8083 [==============================] - 2s - loss: 1.7766 - acc: 0.5807 - val_loss: 1.7446 - val_acc: 0.5806
Epoch 5/16
8083/8083 [==============================] - 2s - loss: 1.6005 - acc: 0.6145 - val_loss: 1.6669 - val_acc: 0.5884
Epoch 6/16
8083/8083 [==============================] - 2s - loss: 1.5022 - acc: 0.6493 - val_loss: 1.5939 - val_acc: 0.6140
Epoch 7/16
8083/8083 [==============================] - 2s - loss: 1.4297 - acc: 0.6562 - val_loss: 1.5566 - val_acc: 0.6140
Epoch 8/16
8083/8083 [==============================] - 2s - loss: 1.3733 - acc: 0.6588 - val_loss: 1.5248 - val_acc: 0.6218
Epoch 9/16
8083/8083 [==============================] - 2s - loss: 1.3282 - acc: 0.6569 - val_loss: 1.4968 - val_acc: 0.6196
Epoch 10/16
8083/8083 [==============================] - 2s - loss: 1.2902 - acc: 0.6602 - val_loss: 1.4852 - val_acc: 0.6196
Epoch 11/16
8083/8083 [==============================] - 2s - loss: 1.2537 - acc: 0.6678 - val_loss: 1.4614 - val_acc: 0.6240
Epoch 12/16
8083/8083 [==============================] - 2s - loss: 1.1888 - acc: 0.6726 - val_loss: 1.4445 - val_acc: 0.6274
Epoch 13/16
8083/8083 [==============================] - 2s - loss: 1.1442 - acc: 0.6849 - val_loss: 1.4293 - val_acc: 0.6329
Epoch 14/16
8083/8083 [==============================] - 2s - loss: 1.1114 - acc: 0.6919 - val_loss: 1.4313 - val_acc: 0.6263
Epoch 15/16
8083/8083 [==============================] - 2s - loss: 1.0864 - acc: 0.6963 - val_loss: 1.4169 - val_acc: 0.6285
Epoch 16/16
8083/8083 [==============================] - 2s - loss: 1.0611 - acc: 0.6978 - val_loss: 1.4075 - val_acc: 0.6407
2080/2246 [==========================>...] - ETA: 0sin the else
this is the index: 22
and this is the gene: LR 0.252698
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene22
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene22+model.h5
nb_layers 10
optimizer sgd
Name: 22, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 72s - loss: 2.7654 - acc: 0.3509 - val_loss: 2.4712 - val_acc: 0.3537
Epoch 2/9
8080/8083 [============================>.] - ETA: 0s - loss: 2.4375 - acc: 0.3515_______Stopping after 120 seconds.
8083/8083 [==============================] - 56s - loss: 2.4377 - acc: 0.3515 - val_loss: 2.4277 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 23
and this is the gene: LR 0.00268188
activations [tanh, hard_sigmoid, sigmoid, softsign, linear...
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene23
layer_units [252, 481, 165, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene23+model.h5
nb_layers 10
optimizer sgd
Name: 23, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 59s - loss: 3.1920 - acc: 0.3476 - val_loss: 2.7653 - val_acc: 0.3537
Epoch 2/5
8080/8083 [============================>.] - ETA: 0s - loss: 2.6479 - acc: 0.3516_______Stopping after 120 seconds.
8083/8083 [==============================] - 62s - loss: 2.6482 - acc: 0.3515 - val_loss: 2.5705 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 24
and this is the gene: LR 0.252698
activations [linear, softplus, relu, relu, softplus]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen1+gene24
layer_units [96, 345, 345, 198, 276]
model_name lab3000_n1e1p1b2+Gen1+gene24+model.h5
nb_layers 5
optimizer Adam
Name: 24, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 10s - loss: 2.5326 - acc: 0.3692 - val_loss: 2.0397 - val_acc: 0.4828
Epoch 2/9
8083/8083 [==============================] - 5s - loss: 1.8191 - acc: 0.5512 - val_loss: 1.6327 - val_acc: 0.6018
Epoch 3/9
8083/8083 [==============================] - 4s - loss: 1.4818 - acc: 0.6740 - val_loss: 1.4385 - val_acc: 0.6774
Epoch 4/9
8083/8083 [==============================] - 3s - loss: 1.1659 - acc: 0.7301 - val_loss: 1.2639 - val_acc: 0.7075
Epoch 5/9
8083/8083 [==============================] - 3s - loss: 0.9109 - acc: 0.7742 - val_loss: 1.2190 - val_acc: 0.7286
Epoch 6/9
8083/8083 [==============================] - 3s - loss: 0.6968 - acc: 0.8231 - val_loss: 1.2339 - val_acc: 0.7508
Epoch 7/9
8083/8083 [==============================] - 3s - loss: 0.5317 - acc: 0.8637 - val_loss: 1.3956 - val_acc: 0.7319
Epoch 8/9
8083/8083 [==============================] - 4s - loss: 0.4178 - acc: 0.8922 - val_loss: 1.4544 - val_acc: 0.7308
Epoch 9/9
8083/8083 [==============================] - 4s - loss: 0.3376 - acc: 0.9179 - val_loss: 1.4546 - val_acc: 0.7430
2144/2246 [===========================>..] - ETA: 0sin the else
this is the index: 25
and this is the gene: LR 0.0138704
activations [linear, hard_sigmoid]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene25
layer_units [4, 204]
model_name lab3000_n1e1p1b2+Gen1+gene25+model.h5
nb_layers 2
optimizer Nadam
Name: 25, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 15s - loss: 1.5349 - acc: 0.6387 - val_loss: 1.1704 - val_acc: 0.7075
Epoch 2/5
8083/8083 [==============================] - 13s - loss: 0.9469 - acc: 0.7705 - val_loss: 1.0628 - val_acc: 0.7453
Epoch 3/5
8083/8083 [==============================] - 11s - loss: 0.6754 - acc: 0.8353 - val_loss: 1.1228 - val_acc: 0.7408
Epoch 4/5
8083/8083 [==============================] - 12s - loss: 0.5226 - acc: 0.8713 - val_loss: 1.1578 - val_acc: 0.7531
Epoch 5/5
8083/8083 [==============================] - 12s - loss: 0.4212 - acc: 0.8999 - val_loss: 1.2023 - val_acc: 0.7586
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 26
and this is the gene: LR 0.00263484
activations [softsign, relu, softmax, softsign, elu]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene26
layer_units [96, 345, 345, 198, 276]
model_name lab3000_n1e1p1b2+Gen1+gene26+model.h5
nb_layers 5
optimizer Adam
Name: 26, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 56s - loss: 1.7009 - acc: 0.5719 - val_loss: 1.4266 - val_acc: 0.6174
Epoch 2/5
8083/8083 [==============================] - 53s - loss: 1.2286 - acc: 0.6754 - val_loss: 1.2572 - val_acc: 0.6863
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.9013 - acc: 0.7682_______Stopping after 120 seconds.
8083/8083 [==============================] - 51s - loss: 0.9010 - acc: 0.7683 - val_loss: 1.0827 - val_acc: 0.7430
2246/2246 [==============================] - 5s
in the else
this is the index: 27
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, softplus]
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen1+gene27
layer_units [345, 345, 276]
model_name lab3000_n1e1p1b2+Gen1+gene27+model.h5
nb_layers 3
optimizer RMSProp
Name: 27, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 37s - loss: 1.6374 - acc: 0.6097 - val_loss: 1.2608 - val_acc: 0.6986
Epoch 2/19
8083/8083 [==============================] - 28s - loss: 0.9946 - acc: 0.7688 - val_loss: 1.0036 - val_acc: 0.7720
Epoch 3/19
8083/8083 [==============================] - 27s - loss: 0.6930 - acc: 0.8351 - val_loss: 0.9286 - val_acc: 0.8009
Epoch 4/19
8064/8083 [============================>.] - ETA: 0s - loss: 0.5053 - acc: 0.8772_______Stopping after 120 seconds.
8083/8083 [==============================] - 41s - loss: 0.5061 - acc: 0.8771 - val_loss: 0.9812 - val_acc: 0.7820
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 28
and this is the gene: LR 0.0916252
activations [relu, sigmoid, elu, sigmoid, hard_sigmoid, ta...
batch_size 8
epochs 4
gene_name lab3000_n1e1p1b2+Gen1+gene28
layer_units [252, 481, 165, 512, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen1+gene28+model.h5
nb_layers 9
optimizer sgd
Name: 28, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/4
8083/8083 [==============================] - 62s - loss: 2.4472 - acc: 0.3442 - val_loss: 2.4191 - val_acc: 0.3537
Epoch 2/4
8080/8083 [============================>.] - ETA: 0s - loss: 2.4198 - acc: 0.3426_______Stopping after 120 seconds.
8083/8083 [==============================] - 59s - loss: 2.4200 - acc: 0.3426 - val_loss: 2.4168 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 29
and this is the gene: LR 0.00263484
activations [softplus]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen1+gene29
layer_units [287]
model_name lab3000_n1e1p1b2+Gen1+gene29+model.h5
nb_layers 1
optimizer Adam
Name: 29, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 15s - loss: 1.6924 - acc: 0.6214 - val_loss: 1.1677 - val_acc: 0.7275
Epoch 2/5
8083/8083 [==============================] - 9s - loss: 0.7760 - acc: 0.8265 - val_loss: 0.9350 - val_acc: 0.8120
Epoch 3/5
8083/8083 [==============================] - 9s - loss: 0.4672 - acc: 0.8982 - val_loss: 0.8632 - val_acc: 0.8287
Epoch 4/5
8083/8083 [==============================] - 9s - loss: 0.3173 - acc: 0.9307 - val_loss: 0.8687 - val_acc: 0.8220
Epoch 5/5
8083/8083 [==============================] - 9s - loss: 0.2459 - acc: 0.9438 - val_loss: 0.8818 - val_acc: 0.8198
2208/2246 [============================>.] - ETA: 0sin the else
In [16]:
n1e1p1b2_clade.select_parents()
In [17]:
n1e1p1b2_clade.breed()
In [18]:
n1e1p1b2_clade.current_generation
Out[18]:
2
In [19]:
n1e1p1b2_clade.seed_models()
In [20]:
n1e1p1b2_clade.grow_models()
this is the index: 0
and this is the gene: LR 0.0539181
activations [relu, tanh, tanh]
batch_size 32
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene0
layer_units [481, 415, 123]
model_name lab3000_n1e1p1b2+Gen2+gene0+model.h5
nb_layers 3
optimizer RMSProp
Name: 0, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 41s - loss: 1.1038 - acc: 0.7470 - val_loss: 0.8049 - val_acc: 0.8220
Epoch 2/5
8083/8083 [==============================] - 34s - loss: 0.4563 - acc: 0.8988 - val_loss: 0.7969 - val_acc: 0.8198
Epoch 3/5
8083/8083 [==============================] - 36s - loss: 0.2643 - acc: 0.9400 - val_loss: 0.8117 - val_acc: 0.8276
Epoch 4/5
8064/8083 [============================>.] - ETA: 0s - loss: 0.1961 - acc: 0.9492_______Stopping after 120 seconds.
8083/8083 [==============================] - 40s - loss: 0.1962 - acc: 0.9492 - val_loss: 0.9015 - val_acc: 0.8231
2208/2246 [============================>.] - ETA: 0sthis is the index: 1
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, elu]
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen2+gene1
layer_units [345, 345, 276]
model_name lab3000_n1e1p1b2+Gen2+gene1+model.h5
nb_layers 3
optimizer Adamax
Name: 1, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 30s - loss: 1.6536 - acc: 0.6091 - val_loss: 1.2209 - val_acc: 0.7130
Epoch 2/19
8083/8083 [==============================] - 24s - loss: 0.9777 - acc: 0.7734 - val_loss: 1.0095 - val_acc: 0.7764
Epoch 3/19
8083/8083 [==============================] - 24s - loss: 0.7036 - acc: 0.8374 - val_loss: 0.9843 - val_acc: 0.7786
Epoch 4/19
8083/8083 [==============================] - 25s - loss: 0.5002 - acc: 0.8820 - val_loss: 0.8916 - val_acc: 0.8198
Epoch 5/19
8064/8083 [============================>.] - ETA: 0s - loss: 0.3773 - acc: 0.9077_______Stopping after 120 seconds.
8083/8083 [==============================] - 24s - loss: 0.3775 - acc: 0.9076 - val_loss: 0.8908 - val_acc: 0.8187
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 2
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene2
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene2+model.h5
nb_layers 1
optimizer Adam
Name: 2, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 9s - loss: 1.9349 - acc: 0.6073 - val_loss: 1.2945 - val_acc: 0.7019
Epoch 2/5
8083/8083 [==============================] - 5s - loss: 0.9581 - acc: 0.7991 - val_loss: 0.9828 - val_acc: 0.8042
Epoch 3/5
8083/8083 [==============================] - 5s - loss: 0.6145 - acc: 0.8847 - val_loss: 0.8588 - val_acc: 0.8298
Epoch 4/5
8083/8083 [==============================] - 4s - loss: 0.4204 - acc: 0.9218 - val_loss: 0.8001 - val_acc: 0.8343
Epoch 5/5
8083/8083 [==============================] - 4s - loss: 0.3071 - acc: 0.9396 - val_loss: 0.7809 - val_acc: 0.8343
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 3
and this is the gene: LR 0.00263484
activations [softplus]
batch_size 128
epochs 19
gene_name lab3000_n1e1p1b2+Gen2+gene3
layer_units [287]
model_name lab3000_n1e1p1b2+Gen2+gene3+model.h5
nb_layers 1
optimizer RMSProp
Name: 3, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 11s - loss: 1.4673 - acc: 0.6719 - val_loss: 1.1238 - val_acc: 0.7442
Epoch 2/19
8083/8083 [==============================] - 7s - loss: 0.7469 - acc: 0.8378 - val_loss: 0.9343 - val_acc: 0.8053
Epoch 3/19
8083/8083 [==============================] - 7s - loss: 0.4669 - acc: 0.9004 - val_loss: 0.9148 - val_acc: 0.8042
Epoch 4/19
8083/8083 [==============================] - 7s - loss: 0.3281 - acc: 0.9285 - val_loss: 0.9273 - val_acc: 0.8109
Epoch 5/19
8083/8083 [==============================] - 7s - loss: 0.2608 - acc: 0.9406 - val_loss: 0.9466 - val_acc: 0.8109
Epoch 6/19
8083/8083 [==============================] - 7s - loss: 0.2128 - acc: 0.9463 - val_loss: 0.9653 - val_acc: 0.8231
Epoch 7/19
8083/8083 [==============================] - 8s - loss: 0.1891 - acc: 0.9504 - val_loss: 1.0031 - val_acc: 0.8120
Epoch 8/19
8083/8083 [==============================] - 8s - loss: 0.1701 - acc: 0.9526 - val_loss: 1.1011 - val_acc: 0.7998
Epoch 9/19
8083/8083 [==============================] - 9s - loss: 0.1628 - acc: 0.9546 - val_loss: 1.0805 - val_acc: 0.8020
Epoch 10/19
8083/8083 [==============================] - 7s - loss: 0.1509 - acc: 0.9572 - val_loss: 1.1452 - val_acc: 0.7887
Epoch 11/19
8083/8083 [==============================] - 8s - loss: 0.1532 - acc: 0.9547 - val_loss: 1.1138 - val_acc: 0.7964
Epoch 12/19
8083/8083 [==============================] - 7s - loss: 0.1421 - acc: 0.9557 - val_loss: 1.1281 - val_acc: 0.7976
Epoch 13/19
8083/8083 [==============================] - 8s - loss: 0.1357 - acc: 0.9563 - val_loss: 1.2889 - val_acc: 0.7809
Epoch 14/19
8083/8083 [==============================] - 8s - loss: 0.1333 - acc: 0.9566 - val_loss: 1.1556 - val_acc: 0.7953
Epoch 15/19
8064/8083 [============================>.] - ETA: 0s - loss: 0.1296 - acc: 0.9552_______Stopping after 120 seconds.
8083/8083 [==============================] - 8s - loss: 0.1295 - acc: 0.9552 - val_loss: 1.1522 - val_acc: 0.7998
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 4
and this is the gene: LR 0.0138704
activations [elu]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene4
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene4+model.h5
nb_layers 1
optimizer Adamax
Name: 4, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 7s - loss: 1.7656 - acc: 0.6359 - val_loss: 1.2242 - val_acc: 0.7353
Epoch 2/5
8083/8083 [==============================] - 4s - loss: 0.9228 - acc: 0.8069 - val_loss: 1.0071 - val_acc: 0.7931
Epoch 3/5
8083/8083 [==============================] - 4s - loss: 0.6625 - acc: 0.8660 - val_loss: 0.9130 - val_acc: 0.8131
Epoch 4/5
8083/8083 [==============================] - 3s - loss: 0.5054 - acc: 0.9036 - val_loss: 0.8606 - val_acc: 0.8198
Epoch 5/5
8083/8083 [==============================] - 3s - loss: 0.4029 - acc: 0.9201 - val_loss: 0.8334 - val_acc: 0.8287
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 5
and this is the gene: LR 0.0138704
activations [softsign, softsign, softsign]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene5
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen2+gene5+model.h5
nb_layers 3
optimizer Adamax
Name: 5, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 16s - loss: 1.5917 - acc: 0.6402 - val_loss: 1.1317 - val_acc: 0.7197
Epoch 2/5
8083/8083 [==============================] - 11s - loss: 0.8086 - acc: 0.8180 - val_loss: 0.8989 - val_acc: 0.7953
Epoch 3/5
8083/8083 [==============================] - 11s - loss: 0.4998 - acc: 0.8940 - val_loss: 0.8458 - val_acc: 0.8165
Epoch 4/5
8083/8083 [==============================] - 11s - loss: 0.3316 - acc: 0.9302 - val_loss: 0.8377 - val_acc: 0.8209
Epoch 5/5
8083/8083 [==============================] - 13s - loss: 0.2332 - acc: 0.9443 - val_loss: 0.8667 - val_acc: 0.8076
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 6
and this is the gene: LR 0.0539181
activations [softplus]
batch_size 128
epochs 19
gene_name lab3000_n1e1p1b2+Gen2+gene6
layer_units [276]
model_name lab3000_n1e1p1b2+Gen2+gene6+model.h5
nb_layers 1
optimizer Adam
Name: 6, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 13s - loss: 1.6921 - acc: 0.6155 - val_loss: 1.1593 - val_acc: 0.7308
Epoch 2/19
8083/8083 [==============================] - 8s - loss: 0.7880 - acc: 0.8243 - val_loss: 0.9476 - val_acc: 0.8076
Epoch 3/19
8083/8083 [==============================] - 8s - loss: 0.4710 - acc: 0.8999 - val_loss: 0.8564 - val_acc: 0.8209
Epoch 4/19
8083/8083 [==============================] - 8s - loss: 0.3236 - acc: 0.9302 - val_loss: 0.8818 - val_acc: 0.8220
Epoch 5/19
8083/8083 [==============================] - 11s - loss: 0.2445 - acc: 0.9428 - val_loss: 0.9054 - val_acc: 0.8154
Epoch 6/19
8083/8083 [==============================] - 10s - loss: 0.2015 - acc: 0.9501 - val_loss: 0.9264 - val_acc: 0.8131
Epoch 7/19
8083/8083 [==============================] - 9s - loss: 0.1792 - acc: 0.9537 - val_loss: 0.9350 - val_acc: 0.8154
Epoch 8/19
8083/8083 [==============================] - 9s - loss: 0.1614 - acc: 0.9560 - val_loss: 0.9480 - val_acc: 0.8142
Epoch 9/19
8083/8083 [==============================] - 8s - loss: 0.1529 - acc: 0.9548 - val_loss: 1.0022 - val_acc: 0.8087
Epoch 10/19
8083/8083 [==============================] - 9s - loss: 0.1454 - acc: 0.9569 - val_loss: 0.9674 - val_acc: 0.8154
Epoch 11/19
8083/8083 [==============================] - 8s - loss: 0.1396 - acc: 0.9579 - val_loss: 0.9752 - val_acc: 0.8187
Epoch 12/19
8083/8083 [==============================] - 8s - loss: 0.1336 - acc: 0.9571 - val_loss: 0.9783 - val_acc: 0.8053
Epoch 13/19
8064/8083 [============================>.] - ETA: 0s - loss: 0.1286 - acc: 0.9594_______Stopping after 120 seconds.
8083/8083 [==============================] - 9s - loss: 0.1285 - acc: 0.9594 - val_loss: 0.9841 - val_acc: 0.8098
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 7
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, softplus, softplus, h...
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen2+gene7
layer_units [252, 481, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen2+gene7+model.h5
nb_layers 9
optimizer RMSProp
Name: 7, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 29s - loss: 2.4875 - acc: 0.3292 - val_loss: 2.4339 - val_acc: 0.3537
Epoch 2/19
8083/8083 [==============================] - 25s - loss: 2.4255 - acc: 0.3478 - val_loss: 2.4296 - val_acc: 0.3537
Epoch 3/19
8083/8083 [==============================] - 26s - loss: 2.4223 - acc: 0.3515 - val_loss: 2.4167 - val_acc: 0.3537
Epoch 4/19
8083/8083 [==============================] - 26s - loss: 2.4167 - acc: 0.3515 - val_loss: 2.4254 - val_acc: 0.3537
Epoch 5/19
8064/8083 [============================>.] - ETA: 0s - loss: 2.4163 - acc: 0.3512_______Stopping after 120 seconds.
8083/8083 [==============================] - 24s - loss: 2.4157 - acc: 0.3515 - val_loss: 2.4146 - val_acc: 0.3537
2246/2246 [==============================] - 4s
in the else
this is the index: 8
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene8
layer_units [287]
model_name lab3000_n1e1p1b2+Gen2+gene8+model.h5
nb_layers 1
optimizer Adam
Name: 8, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 12s - loss: 1.5243 - acc: 0.6802 - val_loss: 0.9816 - val_acc: 0.7909
Epoch 2/5
8083/8083 [==============================] - 8s - loss: 0.5939 - acc: 0.8786 - val_loss: 0.8040 - val_acc: 0.8343
Epoch 3/5
8083/8083 [==============================] - 8s - loss: 0.3257 - acc: 0.9334 - val_loss: 0.7925 - val_acc: 0.8298
Epoch 4/5
8083/8083 [==============================] - 8s - loss: 0.2212 - acc: 0.9478 - val_loss: 0.7943 - val_acc: 0.8331
Epoch 5/5
8083/8083 [==============================] - 8s - loss: 0.1695 - acc: 0.9524 - val_loss: 0.8144 - val_acc: 0.8220
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 9
and this is the gene: LR 0.00263484
activations [softplus, hard_sigmoid, softplus]
batch_size 32
epochs 19
gene_name lab3000_n1e1p1b2+Gen2+gene9
layer_units [345, 345, 276]
model_name lab3000_n1e1p1b2+Gen2+gene9+model.h5
nb_layers 3
optimizer Adam
Name: 9, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/19
8083/8083 [==============================] - 33s - loss: 1.6400 - acc: 0.6152 - val_loss: 1.2161 - val_acc: 0.7119
Epoch 2/19
8083/8083 [==============================] - 29s - loss: 0.8828 - acc: 0.7997 - val_loss: 0.9815 - val_acc: 0.7798
Epoch 3/19
8083/8083 [==============================] - 29s - loss: 0.5148 - acc: 0.8750 - val_loss: 0.9855 - val_acc: 0.7976
Epoch 4/19
8064/8083 [============================>.] - ETA: 0s - loss: 0.3207 - acc: 0.9184_______Stopping after 120 seconds.
8083/8083 [==============================] - 29s - loss: 0.3209 - acc: 0.9183 - val_loss: 1.0003 - val_acc: 0.7931
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 10
and this is the gene: LR 0.0539181
activations [softplus, hard_sigmoid, softplus]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene10
layer_units [287, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene10+model.h5
nb_layers 3
optimizer Adam
Name: 10, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 12s - loss: 3.6922 - acc: 0.0294 - val_loss: 3.5443 - val_acc: 0.0478
Epoch 2/5
8083/8083 [==============================] - 8s - loss: 3.3996 - acc: 0.2135 - val_loss: 3.2630 - val_acc: 0.2191
Epoch 3/5
8083/8083 [==============================] - 8s - loss: 3.1136 - acc: 0.2168 - val_loss: 2.9783 - val_acc: 0.2191
Epoch 4/5
8083/8083 [==============================] - 9s - loss: 2.8317 - acc: 0.2168 - val_loss: 2.7130 - val_acc: 0.2191
Epoch 5/5
8083/8083 [==============================] - 8s - loss: 2.5888 - acc: 0.2889 - val_loss: 2.5052 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 11
and this is the gene: LR 0.0138704
activations [softplus, hard_sigmoid, elu]
batch_size 128
epochs 16
gene_name lab3000_n1e1p1b2+Gen2+gene11
layer_units [99, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene11+model.h5
nb_layers 3
optimizer Adamax
Name: 11, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/16
8083/8083 [==============================] - 9s - loss: 3.6122 - acc: 0.0428 - val_loss: 3.4416 - val_acc: 0.0445
Epoch 2/16
8083/8083 [==============================] - 4s - loss: 3.2848 - acc: 0.2060 - val_loss: 3.1209 - val_acc: 0.3537
Epoch 3/16
8083/8083 [==============================] - 4s - loss: 2.9685 - acc: 0.3515 - val_loss: 2.8235 - val_acc: 0.3537
Epoch 4/16
8083/8083 [==============================] - 4s - loss: 2.6918 - acc: 0.3515 - val_loss: 2.5802 - val_acc: 0.3537
Epoch 5/16
8083/8083 [==============================] - 4s - loss: 2.4763 - acc: 0.3515 - val_loss: 2.4024 - val_acc: 0.3537
Epoch 6/16
8083/8083 [==============================] - 4s - loss: 2.3286 - acc: 0.3515 - val_loss: 2.2872 - val_acc: 0.3537
Epoch 7/16
8083/8083 [==============================] - 3s - loss: 2.2362 - acc: 0.3515 - val_loss: 2.2127 - val_acc: 0.3537
Epoch 8/16
8083/8083 [==============================] - 3s - loss: 2.1782 - acc: 0.3515 - val_loss: 2.1657 - val_acc: 0.3537
Epoch 9/16
8083/8083 [==============================] - 3s - loss: 2.1382 - acc: 0.3515 - val_loss: 2.1324 - val_acc: 0.3537
Epoch 10/16
8083/8083 [==============================] - 3s - loss: 2.1084 - acc: 0.3515 - val_loss: 2.1090 - val_acc: 0.3537
Epoch 11/16
8083/8083 [==============================] - 3s - loss: 2.0839 - acc: 0.3515 - val_loss: 2.0903 - val_acc: 0.3537
Epoch 12/16
8083/8083 [==============================] - 4s - loss: 2.0625 - acc: 0.3515 - val_loss: 2.0747 - val_acc: 0.3537
Epoch 13/16
8083/8083 [==============================] - 4s - loss: 2.0431 - acc: 0.3515 - val_loss: 2.0590 - val_acc: 0.3537
Epoch 14/16
8083/8083 [==============================] - 4s - loss: 2.0255 - acc: 0.3515 - val_loss: 2.0476 - val_acc: 0.3537
Epoch 15/16
8083/8083 [==============================] - 4s - loss: 2.0099 - acc: 0.3515 - val_loss: 2.0374 - val_acc: 0.3537
Epoch 16/16
8083/8083 [==============================] - 4s - loss: 1.9948 - acc: 0.3515 - val_loss: 2.0218 - val_acc: 0.3537
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 12
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 64
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene12
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene12+model.h5
nb_layers 1
optimizer Nadam
Name: 12, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 11s - loss: 1.3440 - acc: 0.7184 - val_loss: 0.8778 - val_acc: 0.8187
Epoch 2/5
8083/8083 [==============================] - 7s - loss: 0.4829 - acc: 0.9025 - val_loss: 0.7850 - val_acc: 0.8309
Epoch 3/5
8083/8083 [==============================] - 7s - loss: 0.2523 - acc: 0.9405 - val_loss: 0.7859 - val_acc: 0.8298
Epoch 4/5
8083/8083 [==============================] - 7s - loss: 0.1746 - acc: 0.9493 - val_loss: 0.8293 - val_acc: 0.8176
Epoch 5/5
8083/8083 [==============================] - 7s - loss: 0.1391 - acc: 0.9529 - val_loss: 0.8555 - val_acc: 0.8142
2112/2246 [===========================>..] - ETA: 0sin the else
this is the index: 13
and this is the gene: LR 0.00263484
activations [softplus]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene13
layer_units [287]
model_name lab3000_n1e1p1b2+Gen2+gene13+model.h5
nb_layers 1
optimizer Adam
Name: 13, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 95s - loss: 1.1380 - acc: 0.7506 - val_loss: 0.7769 - val_acc: 0.8387
Epoch 2/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.3751 - acc: 0.9132_______Stopping after 120 seconds.
8083/8083 [==============================] - 96s - loss: 0.3751 - acc: 0.9132 - val_loss: 0.8755 - val_acc: 0.8109
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 14
and this is the gene: LR 0.00269799
activations [elu]
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen2+gene14
layer_units [497]
model_name lab3000_n1e1p1b2+Gen2+gene14+model.h5
nb_layers 1
optimizer Adam
Name: 14, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 14s - loss: 2.0266 - acc: 0.5993 - val_loss: 1.2484 - val_acc: 0.7175
Epoch 2/9
8083/8083 [==============================] - 8s - loss: 0.8208 - acc: 0.8261 - val_loss: 0.9652 - val_acc: 0.8120
Epoch 3/9
8083/8083 [==============================] - 8s - loss: 0.4794 - acc: 0.9047 - val_loss: 0.8548 - val_acc: 0.8198
Epoch 4/9
8083/8083 [==============================] - 7s - loss: 0.3160 - acc: 0.9339 - val_loss: 0.8366 - val_acc: 0.8287
Epoch 5/9
8083/8083 [==============================] - 7s - loss: 0.2258 - acc: 0.9475 - val_loss: 0.8390 - val_acc: 0.8242
Epoch 6/9
8083/8083 [==============================] - 8s - loss: 0.1800 - acc: 0.9530 - val_loss: 0.8615 - val_acc: 0.8187
Epoch 7/9
8083/8083 [==============================] - 7s - loss: 0.1455 - acc: 0.9565 - val_loss: 0.8796 - val_acc: 0.8220
Epoch 8/9
8083/8083 [==============================] - 7s - loss: 0.1319 - acc: 0.9582 - val_loss: 0.8983 - val_acc: 0.8131
Epoch 9/9
8083/8083 [==============================] - 7s - loss: 0.1163 - acc: 0.9576 - val_loss: 0.9212 - val_acc: 0.8098
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 15
and this is the gene: LR 0.00263484
activations [softplus]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene15
layer_units [287]
model_name lab3000_n1e1p1b2+Gen2+gene15+model.h5
nb_layers 1
optimizer Adam
Name: 15, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 119s - loss: 1.1341 - acc: 0.7532 - val_loss: 0.8329 - val_acc: 0.8198
Epoch 2/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.3706 - acc: 0.9156_______Stopping after 120 seconds.
8083/8083 [==============================] - 102s - loss: 0.3705 - acc: 0.9156 - val_loss: 0.9110 - val_acc: 0.8098
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 16
and this is the gene: LR 0.00263484
activations [softplus]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene16
layer_units [287]
model_name lab3000_n1e1p1b2+Gen2+gene16+model.h5
nb_layers 1
optimizer Adam
Name: 16, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 15s - loss: 1.5780 - acc: 0.6369 - val_loss: 1.1447 - val_acc: 0.7453
Epoch 2/5
8083/8083 [==============================] - 11s - loss: 0.7545 - acc: 0.8332 - val_loss: 0.9442 - val_acc: 0.8042
Epoch 3/5
8083/8083 [==============================] - 9s - loss: 0.4459 - acc: 0.9050 - val_loss: 0.8648 - val_acc: 0.8198
Epoch 4/5
8083/8083 [==============================] - 11s - loss: 0.3032 - acc: 0.9349 - val_loss: 0.8659 - val_acc: 0.8287
Epoch 5/5
8083/8083 [==============================] - 10s - loss: 0.2432 - acc: 0.9443 - val_loss: 0.8801 - val_acc: 0.8209
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 17
and this is the gene: LR 0.0117768
activations [hard_sigmoid, relu, tanh]
batch_size 512
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene17
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen2+gene17+model.h5
nb_layers 3
optimizer sgd
Name: 17, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 11s - loss: 3.2259 - acc: 0.2391 - val_loss: 2.7129 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 6s - loss: 2.6230 - acc: 0.3515 - val_loss: 2.5545 - val_acc: 0.3537
Epoch 3/5
8083/8083 [==============================] - 6s - loss: 2.5151 - acc: 0.3515 - val_loss: 2.4850 - val_acc: 0.3537
Epoch 4/5
8083/8083 [==============================] - 7s - loss: 2.4595 - acc: 0.3515 - val_loss: 2.4507 - val_acc: 0.3537
Epoch 5/5
8083/8083 [==============================] - 6s - loss: 2.4303 - acc: 0.3515 - val_loss: 2.4312 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 18
and this is the gene: LR 0.0117768
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene18
layer_units [287, 2, 2, 2, 2, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene18+model.h5
nb_layers 9
optimizer RMSProp
Name: 18, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 12s - loss: 3.7409 - acc: 0.0432 - val_loss: 3.6465 - val_acc: 0.0578
Epoch 2/5
8083/8083 [==============================] - 8s - loss: 3.5397 - acc: 0.0615 - val_loss: 3.4277 - val_acc: 0.0578
Epoch 3/5
8083/8083 [==============================] - 8s - loss: 3.3135 - acc: 0.1339 - val_loss: 3.2055 - val_acc: 0.2191
Epoch 4/5
8083/8083 [==============================] - 8s - loss: 3.1030 - acc: 0.2168 - val_loss: 3.0131 - val_acc: 0.2191
Epoch 5/5
8083/8083 [==============================] - 9s - loss: 2.9255 - acc: 0.2168 - val_loss: 2.8545 - val_acc: 0.2191
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 19
and this is the gene: LR 0.0117768
activations [softplus, hard_sigmoid, elu, softplus, softpl...
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen2+gene19
layer_units [416, 89, 497, 2, 2, 2, 2, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene19+model.h5
nb_layers 9
optimizer sgd
Name: 19, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 11s - loss: 3.7853 - acc: 0.0108 - val_loss: 3.7711 - val_acc: 0.0056
Epoch 2/9
8083/8083 [==============================] - 7s - loss: 3.7508 - acc: 0.0108 - val_loss: 3.7366 - val_acc: 0.0056
Epoch 3/9
8083/8083 [==============================] - 7s - loss: 3.7166 - acc: 0.0580 - val_loss: 3.7024 - val_acc: 0.2191
Epoch 4/9
8083/8083 [==============================] - 7s - loss: 3.6826 - acc: 0.2168 - val_loss: 3.6683 - val_acc: 0.2191
Epoch 5/9
8083/8083 [==============================] - 8s - loss: 3.6488 - acc: 0.2236 - val_loss: 3.6345 - val_acc: 0.3537
Epoch 6/9
8083/8083 [==============================] - 6s - loss: 3.6152 - acc: 0.3515 - val_loss: 3.6010 - val_acc: 0.3537
Epoch 7/9
8083/8083 [==============================] - 6s - loss: 3.5819 - acc: 0.3515 - val_loss: 3.5677 - val_acc: 0.3537
Epoch 8/9
8083/8083 [==============================] - 7s - loss: 3.5489 - acc: 0.3515 - val_loss: 3.5346 - val_acc: 0.3537
Epoch 9/9
8083/8083 [==============================] - 5s - loss: 3.5160 - acc: 0.3515 - val_loss: 3.5017 - val_acc: 0.3537
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 20
and this is the gene: LR 0.0117768
activations [softsign, linear, tanh]
batch_size 512
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene20
layer_units [512, 323, 25]
model_name lab3000_n1e1p1b2+Gen2+gene20+model.h5
nb_layers 3
optimizer RMSProp
Name: 20, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 15s - loss: 2.1213 - acc: 0.6051 - val_loss: 1.5789 - val_acc: 0.7175
Epoch 2/5
8083/8083 [==============================] - 11s - loss: 1.2720 - acc: 0.8035 - val_loss: 1.2838 - val_acc: 0.7764
Epoch 3/5
8083/8083 [==============================] - 9s - loss: 0.9466 - acc: 0.8702 - val_loss: 1.1357 - val_acc: 0.8020
Epoch 4/5
8083/8083 [==============================] - 8s - loss: 0.7367 - acc: 0.9072 - val_loss: 1.0918 - val_acc: 0.8087
Epoch 5/5
8083/8083 [==============================] - 8s - loss: 0.5930 - acc: 0.9258 - val_loss: 1.0373 - val_acc: 0.7931
2246/2246 [==============================] - 6s
in the else
this is the index: 21
and this is the gene: LR 0.00269799
activations [softplus, hard_sigmoid, elu]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene21
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen2+gene21+model.h5
nb_layers 3
optimizer sgd
Name: 21, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 13s - loss: 2.6645 - acc: 0.3396 - val_loss: 2.4641 - val_acc: 0.3537
Epoch 2/5
8083/8083 [==============================] - 9s - loss: 2.4270 - acc: 0.3515 - val_loss: 2.4235 - val_acc: 0.3537
Epoch 3/5
8083/8083 [==============================] - 11s - loss: 2.4036 - acc: 0.3515 - val_loss: 2.4108 - val_acc: 0.3537
Epoch 4/5
8083/8083 [==============================] - 9s - loss: 2.3939 - acc: 0.3515 - val_loss: 2.4093 - val_acc: 0.3537
Epoch 5/5
8083/8083 [==============================] - 11s - loss: 2.3845 - acc: 0.3515 - val_loss: 2.3851 - val_acc: 0.3537
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 22
and this is the gene: LR 0.0138704
activations [softplus, hard_sigmoid, elu]
batch_size 128
epochs 6
gene_name lab3000_n1e1p1b2+Gen2+gene22
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen2+gene22+model.h5
nb_layers 3
optimizer Adamax
Name: 22, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/6
8083/8083 [==============================] - 18s - loss: 1.9079 - acc: 0.5239 - val_loss: 1.4802 - val_acc: 0.6618
Epoch 2/6
8083/8083 [==============================] - 13s - loss: 1.2298 - acc: 0.7161 - val_loss: 1.1752 - val_acc: 0.7097
Epoch 3/6
8083/8083 [==============================] - 13s - loss: 0.9431 - acc: 0.7783 - val_loss: 1.0453 - val_acc: 0.7642
Epoch 4/6
8083/8083 [==============================] - 13s - loss: 0.7455 - acc: 0.8253 - val_loss: 0.9559 - val_acc: 0.7909
Epoch 5/6
8083/8083 [==============================] - 12s - loss: 0.5912 - acc: 0.8591 - val_loss: 0.9294 - val_acc: 0.8042
Epoch 6/6
8083/8083 [==============================] - 12s - loss: 0.4732 - acc: 0.8863 - val_loss: 0.9039 - val_acc: 0.8031
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 23
and this is the gene: LR 0.00263484
activations [softplus, hard_sigmoid, softplus]
batch_size 32
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene23
layer_units [384, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene23+model.h5
nb_layers 3
optimizer Adam
Name: 23, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 36s - loss: 3.4825 - acc: 0.0933 - val_loss: 3.1649 - val_acc: 0.2191
Epoch 2/5
8083/8083 [==============================] - 34s - loss: 2.8076 - acc: 0.2166 - val_loss: 2.4854 - val_acc: 0.2180
Epoch 3/5
8083/8083 [==============================] - 33s - loss: 2.3255 - acc: 0.3390 - val_loss: 2.1985 - val_acc: 0.3537
Epoch 4/5
8064/8083 [============================>.] - ETA: 0s - loss: 2.1331 - acc: 0.3516_______Stopping after 120 seconds.
8083/8083 [==============================] - 33s - loss: 2.1328 - acc: 0.3515 - val_loss: 2.0917 - val_acc: 0.3537
2246/2246 [==============================] - 5s
in the else
this is the index: 24
and this is the gene: LR 0.0138704
activations [softsign, softsign, softsign]
batch_size 128
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene24
layer_units [99, 2, 2]
model_name lab3000_n1e1p1b2+Gen2+gene24+model.h5
nb_layers 3
optimizer Adam
Name: 24, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 10s - loss: 3.6907 - acc: 0.3599 - val_loss: 3.6126 - val_acc: 0.4360
Epoch 2/5
8083/8083 [==============================] - 5s - loss: 3.5261 - acc: 0.4532 - val_loss: 3.4506 - val_acc: 0.4149
Epoch 3/5
8083/8083 [==============================] - 5s - loss: 3.3503 - acc: 0.4208 - val_loss: 3.2747 - val_acc: 0.4171
Epoch 4/5
8083/8083 [==============================] - 4s - loss: 3.1663 - acc: 0.4277 - val_loss: 3.0994 - val_acc: 0.4216
Epoch 5/5
8083/8083 [==============================] - 4s - loss: 2.9871 - acc: 0.4389 - val_loss: 2.9318 - val_acc: 0.4305
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 25
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene25
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene25+model.h5
nb_layers 1
optimizer Adam
Name: 25, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 46s - loss: 1.1243 - acc: 0.7565 - val_loss: 0.7822 - val_acc: 0.8320
Epoch 2/5
8083/8083 [==============================] - 41s - loss: 0.3832 - acc: 0.9164 - val_loss: 0.7750 - val_acc: 0.8354
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.2235 - acc: 0.9467_______Stopping after 120 seconds.
8083/8083 [==============================] - 40s - loss: 0.2235 - acc: 0.9467 - val_loss: 0.8182 - val_acc: 0.8209
2208/2246 [============================>.] - ETA: 0sin the else
this is the index: 26
and this is the gene: LR 0.0117768
activations [softsign]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene26
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene26+model.h5
nb_layers 1
optimizer RMSProp
Name: 26, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8083/8083 [==============================] - 52s - loss: 1.1741 - acc: 0.7433 - val_loss: 0.9036 - val_acc: 0.8131
Epoch 2/5
8083/8083 [==============================] - 45s - loss: 0.6300 - acc: 0.8656 - val_loss: 0.8040 - val_acc: 0.8331
Epoch 3/5
8080/8083 [============================>.] - ETA: 0s - loss: 0.4637 - acc: 0.9004_______Stopping after 120 seconds.
8083/8083 [==============================] - 44s - loss: 0.4637 - acc: 0.9003 - val_loss: 0.8054 - val_acc: 0.8242
2176/2246 [============================>.] - ETA: 0sin the else
this is the index: 27
and this is the gene: LR 0.00263484
activations [softsign]
batch_size 8
epochs 9
gene_name lab3000_n1e1p1b2+Gen2+gene27
layer_units [99]
model_name lab3000_n1e1p1b2+Gen2+gene27+model.h5
nb_layers 1
optimizer sgd
Name: 27, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 39s - loss: 1.9690 - acc: 0.5473 - val_loss: 1.6048 - val_acc: 0.6307
Epoch 2/9
8083/8083 [==============================] - 28s - loss: 1.4281 - acc: 0.6871 - val_loss: 1.3734 - val_acc: 0.6952
Epoch 3/9
8083/8083 [==============================] - 29s - loss: 1.2287 - acc: 0.7255 - val_loss: 1.2431 - val_acc: 0.7130
Epoch 4/9
8080/8083 [============================>.] - ETA: 0s - loss: 1.0948 - acc: 0.7541_______Stopping after 120 seconds.
8083/8083 [==============================] - 29s - loss: 1.0950 - acc: 0.7541 - val_loss: 1.1617 - val_acc: 0.7319
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 28
and this is the gene: LR 0.0138704
activations [softsign, softsign, linear]
batch_size 8
epochs 5
gene_name lab3000_n1e1p1b2+Gen2+gene28
layer_units [416, 89, 497]
model_name lab3000_n1e1p1b2+Gen2+gene28+model.h5
nb_layers 3
optimizer Adam
Name: 28, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/5
8080/8083 [============================>.] - ETA: 0s - loss: 1.0818 - acc: 0.7512_______Stopping after 120 seconds.
8083/8083 [==============================] - 148s - loss: 1.0819 - acc: 0.7511 - val_loss: 0.8437 - val_acc: 0.8009
2240/2246 [============================>.] - ETA: 0sin the else
this is the index: 29
and this is the gene: LR 0.00269799
activations [tanh, softsign, softsign, softmax, sigmoid, r...
batch_size 512
epochs 9
gene_name lab3000_n1e1p1b2+Gen2+gene29
layer_units [252, 481, 512, 323, 85, 25, 415, 351, 123]
model_name lab3000_n1e1p1b2+Gen2+gene29+model.h5
nb_layers 9
optimizer sgd
Name: 29, dtype: object
Train on 8083 samples, validate on 899 samples
Epoch 1/9
8083/8083 [==============================] - 12s - loss: 3.2584 - acc: 0.2794 - val_loss: 2.7795 - val_acc: 0.3537
Epoch 2/9
8083/8083 [==============================] - 7s - loss: 2.6770 - acc: 0.3515 - val_loss: 2.6095 - val_acc: 0.3537
Epoch 3/9
8083/8083 [==============================] - 8s - loss: 2.5802 - acc: 0.3515 - val_loss: 2.5534 - val_acc: 0.3537
Epoch 4/9
8083/8083 [==============================] - 6s - loss: 2.5299 - acc: 0.3515 - val_loss: 2.5172 - val_acc: 0.3537
Epoch 5/9
8083/8083 [==============================] - 6s - loss: 2.4966 - acc: 0.3515 - val_loss: 2.4925 - val_acc: 0.3537
Epoch 6/9
8083/8083 [==============================] - 6s - loss: 2.4735 - acc: 0.3515 - val_loss: 2.4748 - val_acc: 0.3537
Epoch 7/9
8083/8083 [==============================] - 6s - loss: 2.4577 - acc: 0.3515 - val_loss: 2.4622 - val_acc: 0.3537
Epoch 8/9
8083/8083 [==============================] - 6s - loss: 2.4459 - acc: 0.3515 - val_loss: 2.4530 - val_acc: 0.3537
Epoch 9/9
8083/8083 [==============================] - 6s - loss: 2.4379 - acc: 0.3515 - val_loss: 2.4455 - val_acc: 0.3537
2246/2246 [==============================] - 5s
in the else
In [ ]:
Content source: lab3000/deeplearngene
Similar notebooks: