In [1]:
#Loading useful packages
import numpy as np
import pandas as pd
import random
import matplotlib.pyplot as plt
import os.path
import sys
import argparse
import warnings
warnings.filterwarnings('ignore')
#General purpose AI packages
from sklearn.cross_validation import train_test_split,KFold
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.metrics import confusion_matrix, mean_squared_error
from sklearn.model_selection import ParameterGrid
from sklearn.gaussian_process import GaussianProcess
#Keras packages
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, ActivityRegularization
from keras.callbacks import Callback, ModelCheckpoint, EarlyStopping
from keras.optimizers import RMSprop
from keras import regularizers
/Users/cappe/anaconda/lib/python3.6/site-packages/sklearn/cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
"This module will be removed in 0.20.", DeprecationWarning)
Using TensorFlow backend.
In [2]:
############## LOSSHISTORY CALLBACK CLASS ######################################
class LossHistory(Callback):
def on_train_begin(self, logs={}):
self.losses = []
self.val_losses = []
def on_epoch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
self.val_losses.append(logs.get('val_loss'))
In [3]:
DATAFILE = os.path.join('data','data.csv')
TARGETFILE = os.path.join('data','target.csv')
OUTDIR = os.path.join('results')
In [4]:
def train_nn(X_tr,Y_tr,X_val,Y_val):
verbose = 1
#Model callbacks
filepath = os.path.join('results','weights.best.hdf5')
mdlcheck = ModelCheckpoint(filepath, verbose=0, save_best_only=True)
mdllosses = LossHistory()
mdlstop = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=0, mode='auto')
#Model fit
n_epochs = 5000
n_batch = 68
kf = KFold(n = np.shape(X_tr_val)[0], n_folds = 5)
performance_cv = []
#mdllosses_cv = []
models = []
i = 1
for tr_idx, val_idx in kf:
#'activation_1': 0, 'activation_2': 1, 'activation_3': 1, 'dropout_1': 0.08813572098580352,
#'dropout_2': 0.03155693545556867, 'fit_n_batch': 68.0, 'n_nodes_1': 436.0, 'n_nodes_2': 969.0, 'n_nodes_3': 373.0
#Build NN
model = Sequential()
model.add(Dense(units=436, input_dim=np.shape(X_tr)[1], activity_regularizer=regularizers.l2(0)))
model.add(Activation('relu'))
model.add(Dropout(0.0881357))
model.add(Dense(units=969,activity_regularizer=regularizers.l2(0)))
model.add(Activation('sigmoid'))
model.add(Dropout(0.0315569))
model.add(Dense(units=373,activity_regularizer=regularizers.l2(0)))
model.add(Activation('sigmoid'))
model.add(Dense(units=1))
opt = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
model.compile(loss='mse',optimizer=opt)
print("Fold: ",i," of 5")
i = i+1
X_train, X_valid = X_tr_val[tr_idx], X_tr_val[val_idx]
Y_train, Y_valid = Y_tr_val[tr_idx], Y_tr_val[val_idx]
scaler = StandardScaler().fit(X_train)
X_train = scaler.transform(X_train)
X_valid = scaler.transform(X_valid)
history = model.fit(X_train, Y_train, validation_data = (X_valid, Y_valid), epochs = n_epochs, batch_size = n_batch, callbacks = [mdlstop,mdlcheck,mdllosses],verbose = verbose)
#Recalling best weights and appending loss value and loss history
model.load_weights(filepath)
models.append(model)
performance_cv.append(min(mdllosses.val_losses))
#mdllosses_cv.append(mdllosses)
#Calculating in-cv std
loss_std = np.std(performance_cv)
print('Obtained loss: ', np.mean(performance_cv), ' (', loss_std, ')')
return model, min(mdllosses.val_losses)
In [ ]:
############## PREPARING DATA ##################################################
dataset_trans = pd.read_table(os.path.join('data','dataset_trans.csv'),sep=',')
target = np.asarray(dataset_trans['Y'])
pazienti = np.asarray(dataset_trans['subj'])
del dataset_trans['Y']
del dataset_trans['min_risk']
train = np.asarray(dataset_trans)
train_val_size = 0.8 #80% training+validation set and 20% test set
train_size = 0.7 #70% training set and 30% validation set
X_tr_val, X_te, Y_tr_val, Y_te = train_test_split(train, target, train_size=train_val_size, random_state=1)
X_tr, X_val, Y_tr, Y_val = train_test_split(X_tr_val, Y_tr_val, train_size=train_size, random_state=1)
paz_tr_val = X_tr_val[:,0]
paz_tr = X_tr[:,0]
paz_val = X_val[:,0]
paz_te = X_te[:,0]
X_tr_val = X_tr_val[:,1:14]
X_tr = X_tr[:,1:14]
X_val = X_val[:,1:14]
X_te = X_te[:,1:14]
scaler = StandardScaler().fit(X_tr)
X_tr = scaler.transform(X_tr)
X_val = scaler.transform(X_val)
X_te = scaler.transform(X_te)
model, score = train_nn(X_tr,Y_tr,X_val,Y_val)
Fold: 1 of 5
Train on 6376 samples, validate on 1594 samples
Epoch 1/5000
6376/6376 [==============================] - 2s - loss: 7138.2390 - val_loss: 6357.9258 - ETA: 2s - loss: 7649.9949
Epoch 2/5000
6376/6376 [==============================] - 2s - loss: 6260.5433 - val_loss: 5853.2121
Epoch 3/5000
6376/6376 [==============================] - 2s - loss: 5898.4973 - val_loss: 5576.7515
Epoch 4/5000
6376/6376 [==============================] - 2s - loss: 5621.7907 - val_loss: 5400.7980
Epoch 5/5000
6376/6376 [==============================] - 2s - loss: 5374.7451 - val_loss: 5233.7571
Epoch 6/5000
6376/6376 [==============================] - 2s - loss: 5195.8528 - val_loss: 5029.9611
Epoch 7/5000
6376/6376 [==============================] - 2s - loss: 4998.1785 - val_loss: 4919.9632
Epoch 8/5000
6376/6376 [==============================] - 3s - loss: 4858.9043 - val_loss: 4704.9354
Epoch 9/5000
6376/6376 [==============================] - 2s - loss: 4715.6149 - val_loss: 4607.1140
Epoch 10/5000
6376/6376 [==============================] - 2s - loss: 4613.2265 - val_loss: 4562.1301
Epoch 11/5000
6376/6376 [==============================] - 2s - loss: 4481.3982 - val_loss: 4296.8631
Epoch 12/5000
6376/6376 [==============================] - 2s - loss: 4320.1573 - val_loss: 3987.4513
Epoch 13/5000
6376/6376 [==============================] - 2s - loss: 3943.5533 - val_loss: 3499.4721
Epoch 14/5000
6376/6376 [==============================] - 2s - loss: 3542.3725 - val_loss: 3202.7369
Epoch 15/5000
6376/6376 [==============================] - 2s - loss: 3297.1156 - val_loss: 3010.9809
Epoch 16/5000
6376/6376 [==============================] - 2s - loss: 3088.2165 - val_loss: 2872.6598
Epoch 17/5000
6376/6376 [==============================] - 2s - loss: 2963.9187 - val_loss: 2846.6969
Epoch 18/5000
6376/6376 [==============================] - 2s - loss: 2799.8281 - val_loss: 2839.9128
Epoch 19/5000
6376/6376 [==============================] - 2s - loss: 2648.6750 - val_loss: 2441.4741
Epoch 20/5000
6376/6376 [==============================] - 2s - loss: 2565.7019 - val_loss: 2323.2453
Epoch 21/5000
6376/6376 [==============================] - 2s - loss: 2438.8602 - val_loss: 2251.5229
Epoch 22/5000
6376/6376 [==============================] - 2s - loss: 2361.1925 - val_loss: 2135.0485
Epoch 23/5000
6376/6376 [==============================] - 2s - loss: 2240.8928 - val_loss: 2002.8126
Epoch 24/5000
6376/6376 [==============================] - 2s - loss: 2173.9670 - val_loss: 1971.5760
Epoch 25/5000
6376/6376 [==============================] - 2s - loss: 2090.6424 - val_loss: 2026.0808
Epoch 26/5000
6376/6376 [==============================] - 2s - loss: 1987.2660 - val_loss: 1907.8603
Epoch 27/5000
6376/6376 [==============================] - 2s - loss: 1933.3820 - val_loss: 1725.8978
Epoch 28/5000
6376/6376 [==============================] - 2s - loss: 1912.9576 - val_loss: 1861.1898
Epoch 29/5000
6376/6376 [==============================] - 2s - loss: 1848.2655 - val_loss: 1750.0835
Epoch 30/5000
6376/6376 [==============================] - 2s - loss: 1769.3986 - val_loss: 1587.0436
Epoch 31/5000
6376/6376 [==============================] - 2s - loss: 1735.2329 - val_loss: 1577.1135
Epoch 32/5000
6376/6376 [==============================] - 2s - loss: 1674.8359 - val_loss: 1911.7848
Epoch 33/5000
6376/6376 [==============================] - 2s - loss: 1635.5690 - val_loss: 1723.6705
Epoch 34/5000
6376/6376 [==============================] - 2s - loss: 1580.6154 - val_loss: 1504.4737
Epoch 35/5000
6376/6376 [==============================] - 2s - loss: 1577.2632 - val_loss: 1509.0574
Epoch 36/5000
6376/6376 [==============================] - 2s - loss: 1529.6091 - val_loss: 1467.7119
Epoch 37/5000
6376/6376 [==============================] - 2s - loss: 1432.2748 - val_loss: 1499.2146
Epoch 38/5000
6376/6376 [==============================] - 2s - loss: 1476.7062 - val_loss: 1458.3170 - ETA: 1s - loss: 1469.8590
Epoch 39/5000
6376/6376 [==============================] - 2s - loss: 1381.7802 - val_loss: 1489.5332
Epoch 40/5000
6376/6376 [==============================] - 2s - loss: 1376.3823 - val_loss: 1263.8132
Epoch 41/5000
6376/6376 [==============================] - 2s - loss: 1329.2540 - val_loss: 1273.0193
Epoch 42/5000
6376/6376 [==============================] - 2s - loss: 1263.0100 - val_loss: 1272.1014
Epoch 43/5000
6376/6376 [==============================] - 2s - loss: 1255.4779 - val_loss: 1321.2659
Epoch 44/5000
6376/6376 [==============================] - 2s - loss: 1228.0181 - val_loss: 1155.0631
Epoch 45/5000
6376/6376 [==============================] - 2s - loss: 1202.9309 - val_loss: 1333.4729
Epoch 46/5000
6376/6376 [==============================] - 2s - loss: 1173.9921 - val_loss: 1126.6647
Epoch 47/5000
6376/6376 [==============================] - 2s - loss: 1152.1241 - val_loss: 1164.9753
Epoch 48/5000
6376/6376 [==============================] - 2s - loss: 1114.9641 - val_loss: 1074.4601
Epoch 49/5000
6376/6376 [==============================] - 2s - loss: 1077.6663 - val_loss: 1032.0177
Epoch 50/5000
6376/6376 [==============================] - 2s - loss: 1074.6242 - val_loss: 1245.8449
Epoch 51/5000
6376/6376 [==============================] - 2s - loss: 1013.4851 - val_loss: 1066.5229
Epoch 52/5000
6376/6376 [==============================] - 2s - loss: 1024.1011 - val_loss: 1013.9222
Epoch 53/5000
6376/6376 [==============================] - 2s - loss: 1015.8954 - val_loss: 965.3017
Epoch 54/5000
6376/6376 [==============================] - 2s - loss: 976.3877 - val_loss: 1154.2271
Epoch 55/5000
6376/6376 [==============================] - 2s - loss: 954.3201 - val_loss: 1007.0056
Epoch 56/5000
6376/6376 [==============================] - 2s - loss: 958.9642 - val_loss: 908.0454
Epoch 57/5000
6376/6376 [==============================] - 2s - loss: 924.5293 - val_loss: 1072.9823
Epoch 58/5000
6376/6376 [==============================] - 2s - loss: 927.6735 - val_loss: 885.0766
Epoch 59/5000
6376/6376 [==============================] - 2s - loss: 864.6822 - val_loss: 960.0853 - ETA: 0s - loss: 872.9359
Epoch 60/5000
6376/6376 [==============================] - 2s - loss: 859.0511 - val_loss: 963.8747
Epoch 61/5000
6376/6376 [==============================] - 2s - loss: 865.7977 - val_loss: 892.4175
Epoch 62/5000
6376/6376 [==============================] - 2s - loss: 828.1361 - val_loss: 1243.4731
Epoch 63/5000
6376/6376 [==============================] - 2s - loss: 844.1287 - val_loss: 917.5635
Epoch 64/5000
6376/6376 [==============================] - 2s - loss: 833.0158 - val_loss: 837.4520
Epoch 65/5000
6376/6376 [==============================] - 2s - loss: 808.1633 - val_loss: 954.9990
Epoch 66/5000
6376/6376 [==============================] - 2s - loss: 775.6721 - val_loss: 791.4557
Epoch 67/5000
6376/6376 [==============================] - 2s - loss: 760.1897 - val_loss: 875.4911
Epoch 68/5000
6376/6376 [==============================] - 2s - loss: 741.7499 - val_loss: 820.4986
Epoch 69/5000
6376/6376 [==============================] - 2s - loss: 755.5160 - val_loss: 851.8846
Epoch 70/5000
6376/6376 [==============================] - 2s - loss: 736.5304 - val_loss: 824.4752
Epoch 71/5000
6376/6376 [==============================] - 2s - loss: 711.0412 - val_loss: 763.7905
Epoch 72/5000
6376/6376 [==============================] - 2s - loss: 694.7048 - val_loss: 775.7587
Epoch 73/5000
6376/6376 [==============================] - 2s - loss: 686.2912 - val_loss: 758.3546
Epoch 74/5000
6376/6376 [==============================] - 2s - loss: 695.1109 - val_loss: 734.3423
Epoch 75/5000
6376/6376 [==============================] - 2s - loss: 668.4621 - val_loss: 680.8413
Epoch 76/5000
6376/6376 [==============================] - 2s - loss: 654.3291 - val_loss: 699.9668
Epoch 77/5000
6376/6376 [==============================] - 2s - loss: 654.5810 - val_loss: 713.1007
Epoch 78/5000
6376/6376 [==============================] - 2s - loss: 617.6424 - val_loss: 711.2631
Epoch 79/5000
6376/6376 [==============================] - 2s - loss: 629.7741 - val_loss: 735.3794
Epoch 80/5000
6376/6376 [==============================] - 2s - loss: 621.3338 - val_loss: 679.0998
Epoch 81/5000
6376/6376 [==============================] - 1s - loss: 569.8190 - val_loss: 759.7245
Epoch 82/5000
6376/6376 [==============================] - 2s - loss: 593.1383 - val_loss: 715.9900
Epoch 83/5000
6376/6376 [==============================] - 2s - loss: 598.4121 - val_loss: 742.2966
Epoch 84/5000
6376/6376 [==============================] - 2s - loss: 577.0089 - val_loss: 682.5574
Epoch 85/5000
6376/6376 [==============================] - 2s - loss: 566.0580 - val_loss: 634.0963
Epoch 86/5000
6376/6376 [==============================] - 2s - loss: 574.2247 - val_loss: 664.4848
Epoch 87/5000
6376/6376 [==============================] - 2s - loss: 535.4514 - val_loss: 635.5921
Epoch 88/5000
6376/6376 [==============================] - 2s - loss: 550.6272 - val_loss: 624.3212
Epoch 89/5000
6376/6376 [==============================] - 2s - loss: 540.8488 - val_loss: 629.2985
Epoch 90/5000
6376/6376 [==============================] - 2s - loss: 542.6560 - val_loss: 577.5091
Epoch 91/5000
6376/6376 [==============================] - 4s - loss: 496.6352 - val_loss: 674.1550 - ETA: 2s - loss: 478.2187
Epoch 92/5000
6376/6376 [==============================] - 3s - loss: 511.1635 - val_loss: 677.4649
Epoch 93/5000
6376/6376 [==============================] - 5s - loss: 488.2151 - val_loss: 574.0613 - ETA: 4s - loss: 415.1320
Epoch 94/5000
6376/6376 [==============================] - 3s - loss: 495.8834 - val_loss: 562.1015
Epoch 95/5000
6376/6376 [==============================] - 2s - loss: 494.7429 - val_loss: 577.7129
Epoch 96/5000
6376/6376 [==============================] - 2s - loss: 478.3602 - val_loss: 575.2054
Epoch 97/5000
6376/6376 [==============================] - 2s - loss: 473.1298 - val_loss: 602.4129
Epoch 98/5000
6376/6376 [==============================] - 3s - loss: 472.5055 - val_loss: 592.8615
Epoch 99/5000
6376/6376 [==============================] - 2s - loss: 455.0331 - val_loss: 565.3704
Epoch 100/5000
6376/6376 [==============================] - 2s - loss: 463.6156 - val_loss: 572.2149
Epoch 101/5000
6376/6376 [==============================] - 3s - loss: 429.3549 - val_loss: 515.5559
Epoch 102/5000
6376/6376 [==============================] - 2s - loss: 440.4310 - val_loss: 519.6835
Epoch 103/5000
6376/6376 [==============================] - 2s - loss: 434.6552 - val_loss: 573.5142
Epoch 104/5000
6376/6376 [==============================] - 2s - loss: 412.9577 - val_loss: 568.8201
Epoch 105/5000
6376/6376 [==============================] - 2s - loss: 423.4315 - val_loss: 521.8303
Epoch 106/5000
6376/6376 [==============================] - 2s - loss: 415.6071 - val_loss: 588.0556
Epoch 107/5000
6376/6376 [==============================] - 2s - loss: 405.3746 - val_loss: 523.6664
Epoch 108/5000
6376/6376 [==============================] - 2s - loss: 395.3764 - val_loss: 515.3450
Epoch 109/5000
6376/6376 [==============================] - 2s - loss: 409.4241 - val_loss: 545.7064 - ETA: 1s - loss: 389.7344 - ETA: 0s - loss: 380.9205
Epoch 110/5000
6376/6376 [==============================] - 2s - loss: 383.5259 - val_loss: 529.4929
Epoch 111/5000
6376/6376 [==============================] - 2s - loss: 389.4776 - val_loss: 481.7036
Epoch 112/5000
6376/6376 [==============================] - 2s - loss: 381.4958 - val_loss: 498.4760 - ETA: 0s - loss: 347.8462
Epoch 113/5000
6376/6376 [==============================] - 2s - loss: 381.0473 - val_loss: 465.1631
Epoch 114/5000
6376/6376 [==============================] - 2s - loss: 369.9101 - val_loss: 566.9036
Epoch 115/5000
6376/6376 [==============================] - 2s - loss: 361.9676 - val_loss: 502.6792
Epoch 116/5000
6376/6376 [==============================] - 2s - loss: 368.4952 - val_loss: 534.0688
Epoch 117/5000
6376/6376 [==============================] - 2s - loss: 351.2958 - val_loss: 496.4781
Epoch 118/5000
6376/6376 [==============================] - 2s - loss: 350.4209 - val_loss: 454.0935
Epoch 119/5000
6376/6376 [==============================] - 2s - loss: 338.7162 - val_loss: 503.9173
Epoch 120/5000
6376/6376 [==============================] - 2s - loss: 339.1352 - val_loss: 453.4643
Epoch 121/5000
6376/6376 [==============================] - 2s - loss: 359.4564 - val_loss: 444.8920
Epoch 122/5000
6376/6376 [==============================] - 2s - loss: 342.3882 - val_loss: 511.0459
Epoch 123/5000
6376/6376 [==============================] - 2s - loss: 333.1008 - val_loss: 434.9266 - ETA: 1s - loss: 338.2684
Epoch 124/5000
6376/6376 [==============================] - 2s - loss: 324.4483 - val_loss: 508.6458
Epoch 125/5000
6376/6376 [==============================] - 2s - loss: 320.0071 - val_loss: 409.4885
Epoch 126/5000
6376/6376 [==============================] - 2s - loss: 320.4922 - val_loss: 430.9737
Epoch 127/5000
6376/6376 [==============================] - 2s - loss: 309.0025 - val_loss: 504.5948
Epoch 128/5000
6376/6376 [==============================] - 2s - loss: 310.5561 - val_loss: 406.9339
Epoch 129/5000
6376/6376 [==============================] - 2s - loss: 299.8347 - val_loss: 441.5873 - ETA: 0s - loss: 290.2404
Epoch 130/5000
6376/6376 [==============================] - 2s - loss: 314.3755 - val_loss: 430.8558 - ETA: 1s - loss: 294.9440 - ETA: 1s - loss: 292.8620
Epoch 131/5000
6376/6376 [==============================] - 2s - loss: 300.0542 - val_loss: 427.9194
Epoch 132/5000
6376/6376 [==============================] - 2s - loss: 307.6585 - val_loss: 414.5126
Epoch 133/5000
6376/6376 [==============================] - 2s - loss: 302.0241 - val_loss: 408.6178
Epoch 134/5000
6376/6376 [==============================] - 2s - loss: 297.7209 - val_loss: 398.1856
Epoch 135/5000
6376/6376 [==============================] - 2s - loss: 295.2241 - val_loss: 421.7222
Epoch 136/5000
6376/6376 [==============================] - 2s - loss: 288.0325 - val_loss: 421.7140
Epoch 137/5000
6376/6376 [==============================] - 2s - loss: 289.6970 - val_loss: 397.8797
Epoch 138/5000
6376/6376 [==============================] - 2s - loss: 269.7142 - val_loss: 416.7166 - ETA: 0s - loss: 269.6136
Epoch 139/5000
6376/6376 [==============================] - 2s - loss: 278.4292 - val_loss: 415.3575
Epoch 140/5000
6376/6376 [==============================] - 2s - loss: 271.7690 - val_loss: 415.9077
Epoch 141/5000
6376/6376 [==============================] - 2s - loss: 263.6242 - val_loss: 410.0867
Epoch 142/5000
6376/6376 [==============================] - 2s - loss: 269.7703 - val_loss: 387.9933
Epoch 143/5000
6376/6376 [==============================] - 2s - loss: 262.5585 - val_loss: 442.6343
Epoch 144/5000
6376/6376 [==============================] - 2s - loss: 254.1117 - val_loss: 411.7476
Epoch 145/5000
6376/6376 [==============================] - 2s - loss: 251.3245 - val_loss: 381.9970
Epoch 146/5000
6376/6376 [==============================] - 2s - loss: 251.0036 - val_loss: 371.7517 - ETA: 0s - loss: 253.4178
Epoch 147/5000
6376/6376 [==============================] - 2s - loss: 251.5545 - val_loss: 408.3530
Epoch 148/5000
6376/6376 [==============================] - 2s - loss: 248.6814 - val_loss: 431.7020
Epoch 149/5000
6376/6376 [==============================] - 2s - loss: 246.0028 - val_loss: 356.8761 - ETA: 1s - loss: 214.2277
Epoch 150/5000
6376/6376 [==============================] - 2s - loss: 244.4102 - val_loss: 370.0087 - ETA: 1s - loss: 232.3284
Epoch 151/5000
6376/6376 [==============================] - 2s - loss: 242.6997 - val_loss: 376.1691
Epoch 152/5000
6376/6376 [==============================] - 2s - loss: 235.2658 - val_loss: 375.9975 - ETA: 1s - loss: 223.0722
Epoch 153/5000
6376/6376 [==============================] - 2s - loss: 235.0509 - val_loss: 351.8406
Epoch 154/5000
6376/6376 [==============================] - 2s - loss: 235.1593 - val_loss: 357.2579
Epoch 155/5000
6376/6376 [==============================] - 2s - loss: 225.3974 - val_loss: 409.4413
Epoch 156/5000
6376/6376 [==============================] - 2s - loss: 229.7362 - val_loss: 386.8396
Epoch 157/5000
6376/6376 [==============================] - 2s - loss: 238.4520 - val_loss: 409.3231
Epoch 158/5000
6376/6376 [==============================] - 2s - loss: 219.7753 - val_loss: 337.1143
Epoch 159/5000
6376/6376 [==============================] - 2s - loss: 223.1259 - val_loss: 386.6237
Epoch 160/5000
6376/6376 [==============================] - 2s - loss: 209.3809 - val_loss: 376.8379
Epoch 161/5000
6376/6376 [==============================] - 2s - loss: 223.7295 - val_loss: 339.8419 - ETA: 0s - loss: 229.5501
Epoch 162/5000
6376/6376 [==============================] - 2s - loss: 217.8510 - val_loss: 340.4462 - ETA: 1s - loss: 197.3443
Epoch 163/5000
6376/6376 [==============================] - 2s - loss: 212.8624 - val_loss: 405.8453
Epoch 164/5000
6376/6376 [==============================] - 2s - loss: 204.9531 - val_loss: 325.3988
Epoch 165/5000
6376/6376 [==============================] - 2s - loss: 216.6081 - val_loss: 333.8157 - ETA: 0s - loss: 216.2435
Epoch 166/5000
6376/6376 [==============================] - 2s - loss: 204.4003 - val_loss: 351.8520
Epoch 167/5000
6376/6376 [==============================] - 2s - loss: 205.3051 - val_loss: 345.3563
Epoch 168/5000
6376/6376 [==============================] - 2s - loss: 199.6658 - val_loss: 378.7524
Epoch 169/5000
6376/6376 [==============================] - 2s - loss: 200.7754 - val_loss: 335.7146 - ETA: 1s - loss: 217.8991
Epoch 170/5000
6376/6376 [==============================] - 2s - loss: 205.8408 - val_loss: 345.9794
Epoch 171/5000
6376/6376 [==============================] - 2s - loss: 192.7175 - val_loss: 312.3871
Epoch 172/5000
6376/6376 [==============================] - 2s - loss: 200.4118 - val_loss: 318.8749
Epoch 173/5000
6376/6376 [==============================] - 2s - loss: 187.0580 - val_loss: 320.7725
Epoch 174/5000
6376/6376 [==============================] - 2s - loss: 190.8787 - val_loss: 328.4264
Epoch 175/5000
6376/6376 [==============================] - 2s - loss: 190.7789 - val_loss: 350.2365
Epoch 176/5000
6376/6376 [==============================] - 2s - loss: 184.1860 - val_loss: 344.7594
Epoch 177/5000
6376/6376 [==============================] - 2s - loss: 181.8646 - val_loss: 334.3463
Epoch 178/5000
6376/6376 [==============================] - 2s - loss: 177.4673 - val_loss: 336.4578 - ETA: 1s - loss: 163.5599
Epoch 179/5000
6376/6376 [==============================] - 2s - loss: 180.5277 - val_loss: 316.6467
Epoch 180/5000
6376/6376 [==============================] - 2s - loss: 180.8873 - val_loss: 321.5234
Epoch 181/5000
6376/6376 [==============================] - 2s - loss: 181.9737 - val_loss: 306.3882
Epoch 182/5000
6376/6376 [==============================] - 2s - loss: 178.7869 - val_loss: 322.6755
Epoch 183/5000
6376/6376 [==============================] - 2s - loss: 177.0916 - val_loss: 347.6008
Epoch 184/5000
6376/6376 [==============================] - 2s - loss: 175.4740 - val_loss: 330.0362 - ETA: 1s - loss: 174.8325
Epoch 185/5000
6376/6376 [==============================] - 2s - loss: 169.0114 - val_loss: 327.4327
Epoch 186/5000
6376/6376 [==============================] - 2s - loss: 176.7899 - val_loss: 332.5076
Epoch 187/5000
6376/6376 [==============================] - 2s - loss: 169.4498 - val_loss: 322.4046
Epoch 188/5000
6376/6376 [==============================] - 2s - loss: 162.5871 - val_loss: 367.9199
Epoch 189/5000
6376/6376 [==============================] - 2s - loss: 170.9428 - val_loss: 326.6891 - ETA: 0s - loss: 173.4873
Epoch 190/5000
6376/6376 [==============================] - 2s - loss: 172.5105 - val_loss: 321.2531
Epoch 191/5000
6376/6376 [==============================] - 2s - loss: 163.6267 - val_loss: 316.5971 - ETA: 0s - loss: 160.7152
Epoch 192/5000
6376/6376 [==============================] - 2s - loss: 165.1806 - val_loss: 321.8971
Fold: 2 of 5
Train on 6376 samples, validate on 1594 samples
Epoch 1/5000
6376/6376 [==============================] - 2s - loss: 7012.7772 - val_loss: 6716.2876
Epoch 2/5000
6376/6376 [==============================] - 2s - loss: 6198.1992 - val_loss: 6260.5673 - ETA: 0s - loss: 6257.5008
Epoch 3/5000
6376/6376 [==============================] - 2s - loss: 5860.6826 - val_loss: 5923.6516
Epoch 4/5000
6376/6376 [==============================] - 2s - loss: 5640.4151 - val_loss: 5771.1653 - ETA: 0s - loss: 5599.6586
Epoch 5/5000
6376/6376 [==============================] - 2s - loss: 5464.8403 - val_loss: 5516.6209
Epoch 6/5000
6376/6376 [==============================] - 2s - loss: 5264.9330 - val_loss: 5318.7958 - ETA: 0s - loss: 5267.8357
Epoch 7/5000
6376/6376 [==============================] - 2s - loss: 5057.1370 - val_loss: 5112.0525
Epoch 8/5000
6376/6376 [==============================] - 2s - loss: 4789.8815 - val_loss: 4946.2825
Epoch 9/5000
6376/6376 [==============================] - 2s - loss: 4612.7908 - val_loss: 4924.0176
Epoch 10/5000
6376/6376 [==============================] - 2s - loss: 4425.8025 - val_loss: 4320.5708
Epoch 11/5000
6376/6376 [==============================] - 2s - loss: 3866.2387 - val_loss: 4022.2710
Epoch 12/5000
6376/6376 [==============================] - 2s - loss: 3521.0676 - val_loss: 3500.5543
Epoch 13/5000
6376/6376 [==============================] - 2s - loss: 3291.0746 - val_loss: 3099.9817
Epoch 14/5000
6376/6376 [==============================] - 2s - loss: 3077.3022 - val_loss: 2874.7147
Epoch 15/5000
6376/6376 [==============================] - 2s - loss: 2891.8864 - val_loss: 2805.4394
Epoch 16/5000
6376/6376 [==============================] - 2s - loss: 2746.3086 - val_loss: 2594.7406 - ETA: 0s - loss: 2736.2578
Epoch 17/5000
6376/6376 [==============================] - 2s - loss: 2631.4630 - val_loss: 2486.5087 - ETA: 1s - loss: 2592.1762
Epoch 18/5000
6376/6376 [==============================] - 2s - loss: 2545.3720 - val_loss: 2320.8769 - ETA: 0s - loss: 2540.6395
Epoch 19/5000
6376/6376 [==============================] - 2s - loss: 2413.8318 - val_loss: 2244.7641
Epoch 20/5000
6376/6376 [==============================] - 2s - loss: 2327.3923 - val_loss: 2227.2387 - ETA: 1s - loss: 2223.9242
Epoch 21/5000
6376/6376 [==============================] - 2s - loss: 2262.5447 - val_loss: 2113.9882 - ETA: 1s - loss: 2343.2163
Epoch 22/5000
6376/6376 [==============================] - 2s - loss: 2181.0829 - val_loss: 2206.2633
Epoch 23/5000
6376/6376 [==============================] - 2s - loss: 2072.2665 - val_loss: 1897.2897
Epoch 24/5000
6376/6376 [==============================] - 2s - loss: 2000.3565 - val_loss: 2178.1353
Epoch 25/5000
6376/6376 [==============================] - 2s - loss: 1923.7693 - val_loss: 2174.5562
Epoch 26/5000
6376/6376 [==============================] - 2s - loss: 1888.0061 - val_loss: 1846.3025 - ETA: 0s - loss: 1885.4868
Epoch 27/5000
6376/6376 [==============================] - 2s - loss: 1846.4995 - val_loss: 1700.8989
Epoch 28/5000
6376/6376 [==============================] - 2s - loss: 1743.1250 - val_loss: 1641.9393
Epoch 29/5000
6376/6376 [==============================] - 2s - loss: 1734.2749 - val_loss: 1591.0205
Epoch 30/5000
6376/6376 [==============================] - 2s - loss: 1742.3295 - val_loss: 1509.0555
Epoch 31/5000
6376/6376 [==============================] - 2s - loss: 1681.1697 - val_loss: 1698.5937
Epoch 32/5000
6376/6376 [==============================] - 2s - loss: 1617.2293 - val_loss: 1555.2677 - ETA: 0s - loss: 1558.4873
Epoch 33/5000
6376/6376 [==============================] - 2s - loss: 1575.2703 - val_loss: 1428.2318
Epoch 34/5000
6376/6376 [==============================] - 2s - loss: 1530.1670 - val_loss: 1631.5676
Epoch 35/5000
6376/6376 [==============================] - 2s - loss: 1485.0062 - val_loss: 1566.3097
Epoch 36/5000
6376/6376 [==============================] - 2s - loss: 1436.0464 - val_loss: 1352.6041
Epoch 37/5000
6376/6376 [==============================] - 2s - loss: 1434.2758 - val_loss: 1273.7989 - ETA: 1s - loss: 1386.9944 - ETA: 1s - loss: 1457.3662
Epoch 38/5000
6376/6376 [==============================] - 2s - loss: 1393.8755 - val_loss: 1307.9831
Epoch 39/5000
6376/6376 [==============================] - 2s - loss: 1349.3106 - val_loss: 1327.4470 - ETA: 1s - loss: 1380.0995
Epoch 40/5000
6376/6376 [==============================] - 2s - loss: 1288.0232 - val_loss: 1191.6098
Epoch 41/5000
6376/6376 [==============================] - 2s - loss: 1274.9883 - val_loss: 1377.0917 - ETA: 0s - loss: 1257.7713
Epoch 42/5000
6376/6376 [==============================] - 2s - loss: 1271.9457 - val_loss: 1106.8239
Epoch 43/5000
6376/6376 [==============================] - 2s - loss: 1223.5611 - val_loss: 1120.3633
Epoch 44/5000
6376/6376 [==============================] - 2s - loss: 1223.1205 - val_loss: 1126.6248
Epoch 45/5000
6376/6376 [==============================] - 2s - loss: 1198.9523 - val_loss: 1086.4313
Epoch 46/5000
6376/6376 [==============================] - 2s - loss: 1148.8847 - val_loss: 1120.2463
Epoch 47/5000
6376/6376 [==============================] - 2s - loss: 1133.0746 - val_loss: 1199.3381
Epoch 48/5000
6376/6376 [==============================] - 2s - loss: 1117.8460 - val_loss: 1081.4119 - ETA: 1s - loss: 1227.4643
Epoch 49/5000
6376/6376 [==============================] - 2s - loss: 1107.3030 - val_loss: 1097.3039
Epoch 50/5000
6376/6376 [==============================] - 2s - loss: 1083.5108 - val_loss: 1002.2713
Epoch 51/5000
6376/6376 [==============================] - 2s - loss: 1057.8267 - val_loss: 990.7017
Epoch 52/5000
6376/6376 [==============================] - 2s - loss: 1022.1515 - val_loss: 1071.7563
Epoch 53/5000
6376/6376 [==============================] - 2s - loss: 984.6310 - val_loss: 925.1210
Epoch 54/5000
6376/6376 [==============================] - 2s - loss: 1028.2513 - val_loss: 1042.7283
Epoch 55/5000
6376/6376 [==============================] - 2s - loss: 944.0288 - val_loss: 1002.2733
Epoch 56/5000
6376/6376 [==============================] - 2s - loss: 992.7575 - val_loss: 901.0433
Epoch 57/5000
6376/6376 [==============================] - 2s - loss: 966.9402 - val_loss: 887.2758 - ETA: 0s - loss: 965.9010
Epoch 58/5000
6376/6376 [==============================] - 2s - loss: 940.1894 - val_loss: 1059.4072 - ETA: 1s - loss: 916.4288
Epoch 59/5000
6376/6376 [==============================] - 2s - loss: 917.7023 - val_loss: 931.3381
Epoch 60/5000
6376/6376 [==============================] - 2s - loss: 903.6300 - val_loss: 879.8395
Epoch 61/5000
6376/6376 [==============================] - 2s - loss: 888.8660 - val_loss: 818.8133
Epoch 62/5000
6376/6376 [==============================] - 2s - loss: 870.3669 - val_loss: 806.1114
Epoch 63/5000
6376/6376 [==============================] - 2s - loss: 870.8371 - val_loss: 901.5044
Epoch 64/5000
6376/6376 [==============================] - 2s - loss: 838.9931 - val_loss: 763.7579
Epoch 65/5000
6376/6376 [==============================] - 2s - loss: 835.2893 - val_loss: 809.3969
Epoch 66/5000
6376/6376 [==============================] - 2s - loss: 798.9599 - val_loss: 757.5268 - ETA: 1s - loss: 772.1078
Epoch 67/5000
6376/6376 [==============================] - 2s - loss: 845.0707 - val_loss: 756.0283 - ETA: 0s - loss: 856.6960
Epoch 68/5000
6376/6376 [==============================] - 2s - loss: 818.1893 - val_loss: 765.2271
Epoch 69/5000
6376/6376 [==============================] - 2s - loss: 786.0900 - val_loss: 738.9303
Epoch 70/5000
6376/6376 [==============================] - 2s - loss: 749.4355 - val_loss: 760.3416
Epoch 71/5000
6376/6376 [==============================] - 2s - loss: 757.2479 - val_loss: 746.9140
Epoch 72/5000
6376/6376 [==============================] - 2s - loss: 756.8258 - val_loss: 710.4046
Epoch 73/5000
6376/6376 [==============================] - 2s - loss: 745.8884 - val_loss: 714.5812
Epoch 74/5000
6376/6376 [==============================] - 2s - loss: 698.1827 - val_loss: 855.8762
Epoch 75/5000
6376/6376 [==============================] - 2s - loss: 704.2450 - val_loss: 764.1164
Epoch 76/5000
6376/6376 [==============================] - 2s - loss: 691.2968 - val_loss: 666.7314
Epoch 77/5000
6376/6376 [==============================] - 2s - loss: 696.2001 - val_loss: 709.7046
Epoch 78/5000
6376/6376 [==============================] - 2s - loss: 682.6674 - val_loss: 619.7955
Epoch 79/5000
6376/6376 [==============================] - 2s - loss: 657.5746 - val_loss: 724.1896
Epoch 80/5000
6376/6376 [==============================] - 2s - loss: 636.9627 - val_loss: 649.1290
Epoch 81/5000
6376/6376 [==============================] - 2s - loss: 640.7270 - val_loss: 700.3498 - ETA: 1s - loss: 608.4079
Epoch 82/5000
6376/6376 [==============================] - 2s - loss: 616.4802 - val_loss: 595.4607 - ETA: 0s - loss: 633.8886
Epoch 83/5000
6376/6376 [==============================] - 2s - loss: 612.5932 - val_loss: 597.9549
Epoch 84/5000
6376/6376 [==============================] - 2s - loss: 591.9524 - val_loss: 678.9694
Epoch 85/5000
6376/6376 [==============================] - 2s - loss: 585.6363 - val_loss: 637.1391
Epoch 86/5000
6376/6376 [==============================] - 2s - loss: 600.1408 - val_loss: 605.2812 - ETA: 1s - loss: 566.0762 - ETA: 0s - loss: 594.2909
Epoch 87/5000
6376/6376 [==============================] - 2s - loss: 580.0507 - val_loss: 603.2736
Epoch 88/5000
6376/6376 [==============================] - 2s - loss: 577.9983 - val_loss: 592.1563
Epoch 89/5000
6376/6376 [==============================] - 2s - loss: 549.7219 - val_loss: 580.3915
Epoch 90/5000
6376/6376 [==============================] - 2s - loss: 564.4465 - val_loss: 669.5748
Epoch 91/5000
6376/6376 [==============================] - 2s - loss: 539.0239 - val_loss: 565.1177
Epoch 92/5000
6376/6376 [==============================] - 2s - loss: 548.3225 - val_loss: 542.7685 - ETA: 0s - loss: 519.0080
Epoch 93/5000
6376/6376 [==============================] - 2s - loss: 512.2074 - val_loss: 569.0747
Epoch 94/5000
6376/6376 [==============================] - 2s - loss: 527.3486 - val_loss: 604.6419
Epoch 95/5000
6376/6376 [==============================] - 2s - loss: 507.2013 - val_loss: 577.1797
Epoch 96/5000
6376/6376 [==============================] - 2s - loss: 517.2601 - val_loss: 572.0588
Epoch 97/5000
6376/6376 [==============================] - 2s - loss: 489.4241 - val_loss: 500.6904
Epoch 98/5000
6376/6376 [==============================] - 2s - loss: 491.5053 - val_loss: 598.9561
Epoch 99/5000
6376/6376 [==============================] - 2s - loss: 472.6406 - val_loss: 544.3355 - ETA: 1s - loss: 450.7426
Epoch 100/5000
6376/6376 [==============================] - 2s - loss: 473.5269 - val_loss: 544.4797
Epoch 101/5000
6376/6376 [==============================] - 2s - loss: 478.5470 - val_loss: 519.4036
Epoch 102/5000
6376/6376 [==============================] - 2s - loss: 460.7868 - val_loss: 502.9877 - ETA: 1s - loss: 358.2355 - ETA: 1s - loss: 374.4787
Epoch 103/5000
6376/6376 [==============================] - 2s - loss: 447.2783 - val_loss: 568.9991
Epoch 104/5000
6376/6376 [==============================] - 2s - loss: 454.2192 - val_loss: 551.6766
Epoch 105/5000
6376/6376 [==============================] - 2s - loss: 444.7355 - val_loss: 498.7602
Epoch 106/5000
6376/6376 [==============================] - 2s - loss: 441.9231 - val_loss: 531.5473 - ETA: 0s - loss: 441.4646
Epoch 107/5000
6376/6376 [==============================] - 2s - loss: 439.6389 - val_loss: 494.4906
Epoch 108/5000
6376/6376 [==============================] - 2s - loss: 427.5487 - val_loss: 597.1705
Epoch 109/5000
6376/6376 [==============================] - 2s - loss: 421.5472 - val_loss: 492.1972
Epoch 110/5000
6376/6376 [==============================] - 2s - loss: 404.4633 - val_loss: 503.2457 - ETA: 0s - loss: 414.6827
Epoch 111/5000
6376/6376 [==============================] - 2s - loss: 416.6580 - val_loss: 541.1130
Epoch 112/5000
6376/6376 [==============================] - 2s - loss: 399.8648 - val_loss: 460.0106
Epoch 113/5000
6376/6376 [==============================] - 2s - loss: 396.9721 - val_loss: 466.5512 - ETA: 0s - loss: 416.5138
Epoch 114/5000
6376/6376 [==============================] - 2s - loss: 405.4862 - val_loss: 491.5518
Epoch 115/5000
6376/6376 [==============================] - 2s - loss: 388.1532 - val_loss: 458.8238 - ETA: 0s - loss: 386.1903
Epoch 116/5000
6376/6376 [==============================] - 2s - loss: 392.7700 - val_loss: 454.0431
Epoch 117/5000
6376/6376 [==============================] - 2s - loss: 371.1845 - val_loss: 433.7427
Epoch 118/5000
6376/6376 [==============================] - 2s - loss: 368.8505 - val_loss: 428.5316
Epoch 119/5000
6376/6376 [==============================] - 2s - loss: 384.8811 - val_loss: 530.5972
Epoch 120/5000
6376/6376 [==============================] - 2s - loss: 358.0019 - val_loss: 478.9433
Epoch 121/5000
6376/6376 [==============================] - 2s - loss: 361.3044 - val_loss: 457.4464
Epoch 122/5000
6376/6376 [==============================] - 2s - loss: 376.1426 - val_loss: 427.4235
Epoch 123/5000
6376/6376 [==============================] - 2s - loss: 356.7627 - val_loss: 409.5972
Epoch 124/5000
6376/6376 [==============================] - 2s - loss: 354.8703 - val_loss: 508.2210
Epoch 125/5000
6376/6376 [==============================] - 2s - loss: 346.2068 - val_loss: 413.2243
Epoch 126/5000
6376/6376 [==============================] - 2s - loss: 340.0368 - val_loss: 443.4084 - ETA: 0s - loss: 344.9513
Epoch 127/5000
6376/6376 [==============================] - 2s - loss: 336.2935 - val_loss: 403.3765
Epoch 128/5000
6376/6376 [==============================] - 2s - loss: 340.2706 - val_loss: 397.6974
Epoch 129/5000
6376/6376 [==============================] - 2s - loss: 339.1319 - val_loss: 459.1941
Epoch 130/5000
6376/6376 [==============================] - 2s - loss: 330.6657 - val_loss: 410.8032
Epoch 131/5000
6376/6376 [==============================] - 2s - loss: 326.4972 - val_loss: 403.2897
Epoch 132/5000
6376/6376 [==============================] - 2s - loss: 306.9231 - val_loss: 419.3486
Epoch 133/5000
6376/6376 [==============================] - 2s - loss: 307.3429 - val_loss: 417.8272
Epoch 134/5000
6376/6376 [==============================] - 2s - loss: 315.5585 - val_loss: 386.7914
Epoch 135/5000
6376/6376 [==============================] - 2s - loss: 315.2871 - val_loss: 381.3089 - ETA: 0s - loss: 307.5574
Epoch 136/5000
6376/6376 [==============================] - 2s - loss: 304.8883 - val_loss: 380.1262
Epoch 137/5000
6376/6376 [==============================] - 2s - loss: 308.7515 - val_loss: 428.1545
Epoch 138/5000
6376/6376 [==============================] - 2s - loss: 295.5151 - val_loss: 429.5649 - ETA: 1s - loss: 282.1595
Epoch 139/5000
6376/6376 [==============================] - 2s - loss: 293.1938 - val_loss: 402.3482
Epoch 140/5000
6376/6376 [==============================] - 2s - loss: 293.0165 - val_loss: 376.7069 - ETA: 0s - loss: 280.5739
Epoch 141/5000
6376/6376 [==============================] - 2s - loss: 292.7585 - val_loss: 381.3087
Epoch 142/5000
6376/6376 [==============================] - 2s - loss: 286.3921 - val_loss: 421.1004
Epoch 143/5000
6376/6376 [==============================] - 2s - loss: 281.7598 - val_loss: 371.0059
Epoch 144/5000
6376/6376 [==============================] - 2s - loss: 281.6752 - val_loss: 377.3411 - ETA: 1s - loss: 303.1046
Epoch 145/5000
6376/6376 [==============================] - 2s - loss: 281.3787 - val_loss: 368.0354
Epoch 146/5000
6376/6376 [==============================] - 2s - loss: 273.7300 - val_loss: 366.0006
Epoch 147/5000
6376/6376 [==============================] - 2s - loss: 274.0975 - val_loss: 373.3022
Epoch 148/5000
6376/6376 [==============================] - 2s - loss: 273.2442 - val_loss: 406.4980
Epoch 149/5000
6376/6376 [==============================] - 2s - loss: 271.4283 - val_loss: 366.7529 - ETA: 1s - loss: 253.0401
Epoch 150/5000
6376/6376 [==============================] - 2s - loss: 266.0827 - val_loss: 358.6891
Epoch 151/5000
6376/6376 [==============================] - 2s - loss: 267.1136 - val_loss: 418.9281
Epoch 152/5000
6376/6376 [==============================] - 2s - loss: 261.9600 - val_loss: 385.6806
Epoch 153/5000
6376/6376 [==============================] - 2s - loss: 248.4568 - val_loss: 359.6250
Epoch 154/5000
6376/6376 [==============================] - 2s - loss: 250.3988 - val_loss: 345.5317
Epoch 155/5000
6376/6376 [==============================] - 2s - loss: 256.0819 - val_loss: 364.2042
Epoch 156/5000
6376/6376 [==============================] - 2s - loss: 245.0696 - val_loss: 351.3753
Epoch 157/5000
6376/6376 [==============================] - 2s - loss: 242.0407 - val_loss: 395.9199
Epoch 158/5000
6376/6376 [==============================] - 2s - loss: 241.8387 - val_loss: 354.5611
Epoch 159/5000
6376/6376 [==============================] - 2s - loss: 237.5328 - val_loss: 370.0754
Epoch 160/5000
6376/6376 [==============================] - 2s - loss: 237.8950 - val_loss: 336.4967
Epoch 161/5000
6376/6376 [==============================] - 2s - loss: 233.5552 - val_loss: 382.7300 - ETA: 0s - loss: 241.6200
Epoch 162/5000
6376/6376 [==============================] - 2s - loss: 233.8977 - val_loss: 344.7353 - ETA: 0s - loss: 235.9447
Epoch 163/5000
6376/6376 [==============================] - 2s - loss: 224.0225 - val_loss: 342.1237
Epoch 164/5000
6376/6376 [==============================] - 2s - loss: 224.9111 - val_loss: 335.7950
Epoch 165/5000
6376/6376 [==============================] - 2s - loss: 233.9216 - val_loss: 385.0255
Epoch 166/5000
6376/6376 [==============================] - 2s - loss: 220.3029 - val_loss: 344.0955
Epoch 167/5000
6376/6376 [==============================] - 2s - loss: 220.4889 - val_loss: 340.0044
Epoch 168/5000
6376/6376 [==============================] - 2s - loss: 217.7189 - val_loss: 329.1221
Epoch 169/5000
6376/6376 [==============================] - 2s - loss: 213.0971 - val_loss: 327.2432 - ETA: 1s - loss: 186.6186
Epoch 170/5000
6376/6376 [==============================] - 2s - loss: 217.7491 - val_loss: 330.2724
Epoch 171/5000
6376/6376 [==============================] - 2s - loss: 204.7888 - val_loss: 316.5372
Epoch 172/5000
6376/6376 [==============================] - 2s - loss: 209.7054 - val_loss: 324.1481
Epoch 173/5000
6376/6376 [==============================] - 2s - loss: 210.6206 - val_loss: 329.4484
Epoch 174/5000
6376/6376 [==============================] - 2s - loss: 205.2185 - val_loss: 362.5105
In [ ]:
############## EVALUATING RESULTS #############################################
Y_te = np.squeeze(Y_te)
Y_NN = np.squeeze(model.predict(X_te))
#MSE
print('\n Score NN: ',mean_squared_error(Y_NN,Y_te))
#Plot train and validation losses
#plt.plot(loss.losses)
#plt.plot(loss.val_losses)
#plt.show()
#Boxplot of the difference between actual values and estimates
data_to_plot = [Y_te-Y_NN]
plt.boxplot(data_to_plot)
plt.show()
#Histogram of the difference between actual values and estimates
plt.hist(data_to_plot,bins=40)
plt.show()
#Plot of the actual values and estimates
plt.plot(Y_te, marker='^')
plt.plot(Y_NN, marker='o')
plt.show()
In [ ]:
np.save('Y_NN',Y_NN)
np.save('Y_te',Y_te)
np.save('paz_te',paz_te)
np.save('X_te',X_te)
In [ ]:
Content source: gcappon/bolusModulatioNN
Similar notebooks: