In [1]:
%matplotlib inline
import math
import random

import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.datasets import load_boston

import numpy as np
import tensorflow as tf

sns.set(style="ticks", color_codes=True)

In [2]:
data = pd.read_csv("./data/NYCnumber.csv", sep=',')
print data.head()


        1       3       4    17    19    24    27    28           29        31
0  1282.0  2835.0  3463.0   7.3  31.5  16.2  52.2   7.7  103446100.0  410400.0
1  1491.0  2015.0  1262.0   5.3  17.4   2.2  61.1   6.1  150492800.0  354100.0
2   920.0  2455.0  2569.0   5.3   8.1  10.9  45.7   9.2   93610300.0  418300.0
3  1126.0  3167.0  1676.0   2.7  38.0  20.1  52.2  19.4   36375100.0  360000.0
4   601.0  3961.0  3578.0  34.0  24.7   4.6  42.5  12.6  110563800.0  370800.0

In [3]:
# convert housing data to numpy format
data_array = data.as_matrix().astype(float)
# split data into feature and target sets
X = data_array[:, :-1]
y = data_array[:, -1]
# normalize the data per feature by dividing by the maximum value in each column
X = X / X.max(axis=0)
y = y / 1000

In [4]:
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.regularizers import l2
from keras.callbacks import ModelCheckpoint

# model hyperparameters
batch_size = 256
nb_epoch = 500

num_hidden_1 = 1024
num_hidden_2 = 1024
num_hidden_3 = 1024
dropout = 0.15


Using Theano backend.

In [5]:
model = Sequential()

model.add(Dense(output_dim=num_hidden_1, input_dim=X.shape[1], W_regularizer=l2(0.0005)))
model.add(Activation("tanh"))
model.add(Dropout(dropout))
model.add(Dense(num_hidden_2, W_regularizer=l2(0.0005)))
model.add(Activation("tanh"))
model.add(Dropout(dropout))
model.add(Dense(num_hidden_3, W_regularizer=l2(0.0005)))
model.add(Activation("tanh"))
model.add(Dropout(dropout))
model.add(Dense(1)) # single neuron in output layer for regression problem

# save out model each time it performs better than previous epochs
checkpoint_name = "-model_keras-regression.hdf5"
checkpointer = ModelCheckpoint(checkpoint_name, verbose=0, save_best_only=True)

# mean squared logarithmic error for regression problme
model.compile(loss='mean_squared_logarithmic_error', optimizer='adam')

# fit model using a 25% validation split (keras will automatically split the data into training and validation sets)
history = model.fit(X, y, validation_split=0.25, batch_size=batch_size, nb_epoch=nb_epoch,
          verbose=2, callbacks=[checkpointer])


Train on 1283 samples, validate on 428 samples
Epoch 1/500
0s - loss: 21.0868 - val_loss: 6.5639
Epoch 2/500
0s - loss: 7.6496 - val_loss: 5.1981
Epoch 3/500
0s - loss: 6.8303 - val_loss: 4.7373
Epoch 4/500
0s - loss: 6.3792 - val_loss: 4.3590
Epoch 5/500
0s - loss: 5.9978 - val_loss: 4.0428
Epoch 6/500
0s - loss: 5.6678 - val_loss: 3.7767
Epoch 7/500
0s - loss: 5.3811 - val_loss: 3.5470
Epoch 8/500
0s - loss: 5.1254 - val_loss: 3.3448
Epoch 9/500
0s - loss: 4.8983 - val_loss: 3.1678
Epoch 10/500
0s - loss: 4.6941 - val_loss: 3.0112
Epoch 11/500
0s - loss: 4.5181 - val_loss: 2.8746
Epoch 12/500
0s - loss: 4.3481 - val_loss: 2.7482
Epoch 13/500
0s - loss: 4.1972 - val_loss: 2.6324
Epoch 14/500
0s - loss: 4.0532 - val_loss: 2.5258
Epoch 15/500
0s - loss: 3.9207 - val_loss: 2.4273
Epoch 16/500
0s - loss: 3.7962 - val_loss: 2.3365
Epoch 17/500
0s - loss: 3.6813 - val_loss: 2.2527
Epoch 18/500
0s - loss: 3.5689 - val_loss: 2.1753
Epoch 19/500
0s - loss: 3.4727 - val_loss: 2.1034
Epoch 20/500
0s - loss: 3.3749 - val_loss: 2.0361
Epoch 21/500
0s - loss: 3.2859 - val_loss: 1.9719
Epoch 22/500
0s - loss: 3.1984 - val_loss: 1.9123
Epoch 23/500
0s - loss: 3.1161 - val_loss: 1.8552
Epoch 24/500
0s - loss: 3.0381 - val_loss: 1.8015
Epoch 25/500
0s - loss: 2.9635 - val_loss: 1.7504
Epoch 26/500
0s - loss: 2.8915 - val_loss: 1.7012
Epoch 27/500
0s - loss: 2.8220 - val_loss: 1.6545
Epoch 28/500
0s - loss: 2.7542 - val_loss: 1.6102
Epoch 29/500
0s - loss: 2.6911 - val_loss: 1.5681
Epoch 30/500
0s - loss: 2.6322 - val_loss: 1.5278
Epoch 31/500
0s - loss: 2.5731 - val_loss: 1.4890
Epoch 32/500
0s - loss: 2.5156 - val_loss: 1.4513
Epoch 33/500
0s - loss: 2.4593 - val_loss: 1.4148
Epoch 34/500
0s - loss: 2.4081 - val_loss: 1.3803
Epoch 35/500
1s - loss: 2.3571 - val_loss: 1.3476
Epoch 36/500
0s - loss: 2.3047 - val_loss: 1.3161
Epoch 37/500
0s - loss: 2.2573 - val_loss: 1.2858
Epoch 38/500
0s - loss: 2.2126 - val_loss: 1.2565
Epoch 39/500
0s - loss: 2.1690 - val_loss: 1.2289
Epoch 40/500
0s - loss: 2.1249 - val_loss: 1.2022
Epoch 41/500
0s - loss: 2.0839 - val_loss: 1.1765
Epoch 42/500
1s - loss: 2.0429 - val_loss: 1.1515
Epoch 43/500
1s - loss: 2.0047 - val_loss: 1.1273
Epoch 44/500
1s - loss: 1.9671 - val_loss: 1.1039
Epoch 45/500
1s - loss: 1.9310 - val_loss: 1.0813
Epoch 46/500
1s - loss: 1.8958 - val_loss: 1.0600
Epoch 47/500
1s - loss: 1.8634 - val_loss: 1.0395
Epoch 48/500
1s - loss: 1.8284 - val_loss: 1.0196
Epoch 49/500
1s - loss: 1.7989 - val_loss: 1.0001
Epoch 50/500
1s - loss: 1.7660 - val_loss: 0.9809
Epoch 51/500
1s - loss: 1.7339 - val_loss: 0.9625
Epoch 52/500
1s - loss: 1.7064 - val_loss: 0.9443
Epoch 53/500
1s - loss: 1.6763 - val_loss: 0.9266
Epoch 54/500
1s - loss: 1.6477 - val_loss: 0.9093
Epoch 55/500
1s - loss: 1.6182 - val_loss: 0.8930
Epoch 56/500
1s - loss: 1.5931 - val_loss: 0.8771
Epoch 57/500
1s - loss: 1.5683 - val_loss: 0.8619
Epoch 58/500
1s - loss: 1.5423 - val_loss: 0.8471
Epoch 59/500
1s - loss: 1.5173 - val_loss: 0.8324
Epoch 60/500
1s - loss: 1.4940 - val_loss: 0.8180
Epoch 61/500
1s - loss: 1.4712 - val_loss: 0.8043
Epoch 62/500
1s - loss: 1.4468 - val_loss: 0.7908
Epoch 63/500
1s - loss: 1.4248 - val_loss: 0.7778
Epoch 64/500
1s - loss: 1.4032 - val_loss: 0.7653
Epoch 65/500
1s - loss: 1.3846 - val_loss: 0.7531
Epoch 66/500
1s - loss: 1.3622 - val_loss: 0.7408
Epoch 67/500
1s - loss: 1.3424 - val_loss: 0.7287
Epoch 68/500
1s - loss: 1.3222 - val_loss: 0.7169
Epoch 69/500
1s - loss: 1.3021 - val_loss: 0.7055
Epoch 70/500
1s - loss: 1.2844 - val_loss: 0.6943
Epoch 71/500
1s - loss: 1.2652 - val_loss: 0.6833
Epoch 72/500
1s - loss: 1.2479 - val_loss: 0.6729
Epoch 73/500
1s - loss: 1.2302 - val_loss: 0.6635
Epoch 74/500
1s - loss: 1.2143 - val_loss: 0.6535
Epoch 75/500
1s - loss: 1.1956 - val_loss: 0.6433
Epoch 76/500
1s - loss: 1.1799 - val_loss: 0.6335
Epoch 77/500
1s - loss: 1.1630 - val_loss: 0.6242
Epoch 78/500
1s - loss: 1.1473 - val_loss: 0.6150
Epoch 79/500
1s - loss: 1.1315 - val_loss: 0.6057
Epoch 80/500
1s - loss: 1.1165 - val_loss: 0.5967
Epoch 81/500
1s - loss: 1.1012 - val_loss: 0.5880
Epoch 82/500
1s - loss: 1.0866 - val_loss: 0.5795
Epoch 83/500
1s - loss: 1.0720 - val_loss: 0.5711
Epoch 84/500
1s - loss: 1.0594 - val_loss: 0.5630
Epoch 85/500
1s - loss: 1.0452 - val_loss: 0.5551
Epoch 86/500
1s - loss: 1.0322 - val_loss: 0.5474
Epoch 87/500
1s - loss: 1.0192 - val_loss: 0.5397
Epoch 88/500
1s - loss: 1.0062 - val_loss: 0.5320
Epoch 89/500
1s - loss: 0.9939 - val_loss: 0.5245
Epoch 90/500
1s - loss: 0.9808 - val_loss: 0.5171
Epoch 91/500
1s - loss: 0.9689 - val_loss: 0.5101
Epoch 92/500
1s - loss: 0.9562 - val_loss: 0.5029
Epoch 93/500
2s - loss: 0.9446 - val_loss: 0.4960
Epoch 94/500
1s - loss: 0.9332 - val_loss: 0.4893
Epoch 95/500
1s - loss: 0.9220 - val_loss: 0.4828
Epoch 96/500
2s - loss: 0.9116 - val_loss: 0.4766
Epoch 97/500
2s - loss: 0.9010 - val_loss: 0.4702
Epoch 98/500
1s - loss: 0.8902 - val_loss: 0.4638
Epoch 99/500
2s - loss: 0.8788 - val_loss: 0.4578
Epoch 100/500
1s - loss: 0.8692 - val_loss: 0.4520
Epoch 101/500
1s - loss: 0.8592 - val_loss: 0.4462
Epoch 102/500
1s - loss: 0.8506 - val_loss: 0.4403
Epoch 103/500
2s - loss: 0.8404 - val_loss: 0.4347
Epoch 104/500
2s - loss: 0.8310 - val_loss: 0.4298
Epoch 105/500
2s - loss: 0.8209 - val_loss: 0.4245
Epoch 106/500
2s - loss: 0.8126 - val_loss: 0.4191
Epoch 107/500
2s - loss: 0.8041 - val_loss: 0.4140
Epoch 108/500
2s - loss: 0.7944 - val_loss: 0.4090
Epoch 109/500
1s - loss: 0.7880 - val_loss: 0.4042
Epoch 110/500
1s - loss: 0.7784 - val_loss: 0.3994
Epoch 111/500
1s - loss: 0.7702 - val_loss: 0.3946
Epoch 112/500
1s - loss: 0.7625 - val_loss: 0.3902
Epoch 113/500
1s - loss: 0.7532 - val_loss: 0.3856
Epoch 114/500
1s - loss: 0.7467 - val_loss: 0.3810
Epoch 115/500
1s - loss: 0.7387 - val_loss: 0.3768
Epoch 116/500
1s - loss: 0.7320 - val_loss: 0.3724
Epoch 117/500
1s - loss: 0.7245 - val_loss: 0.3680
Epoch 118/500
1s - loss: 0.7177 - val_loss: 0.3634
Epoch 119/500
1s - loss: 0.7096 - val_loss: 0.3591
Epoch 120/500
1s - loss: 0.7019 - val_loss: 0.3550
Epoch 121/500
1s - loss: 0.6955 - val_loss: 0.3512
Epoch 122/500
1s - loss: 0.6888 - val_loss: 0.3476
Epoch 123/500
1s - loss: 0.6827 - val_loss: 0.3439
Epoch 124/500
1s - loss: 0.6760 - val_loss: 0.3402
Epoch 125/500
1s - loss: 0.6695 - val_loss: 0.3363
Epoch 126/500
1s - loss: 0.6638 - val_loss: 0.3324
Epoch 127/500
1s - loss: 0.6577 - val_loss: 0.3287
Epoch 128/500
1s - loss: 0.6512 - val_loss: 0.3251
Epoch 129/500
0s - loss: 0.6451 - val_loss: 0.3216
Epoch 130/500
1s - loss: 0.6392 - val_loss: 0.3181
Epoch 131/500
1s - loss: 0.6324 - val_loss: 0.3147
Epoch 132/500
0s - loss: 0.6268 - val_loss: 0.3116
Epoch 133/500
1s - loss: 0.6212 - val_loss: 0.3084
Epoch 134/500
1s - loss: 0.6161 - val_loss: 0.3051
Epoch 135/500
1s - loss: 0.6105 - val_loss: 0.3018
Epoch 136/500
1s - loss: 0.6062 - val_loss: 0.2987
Epoch 137/500
1s - loss: 0.6004 - val_loss: 0.2956
Epoch 138/500
1s - loss: 0.5938 - val_loss: 0.2924
Epoch 139/500
1s - loss: 0.5894 - val_loss: 0.2893
Epoch 140/500
1s - loss: 0.5836 - val_loss: 0.2862
Epoch 141/500
1s - loss: 0.5792 - val_loss: 0.2833
Epoch 142/500
1s - loss: 0.5742 - val_loss: 0.2804
Epoch 143/500
1s - loss: 0.5691 - val_loss: 0.2777
Epoch 144/500
1s - loss: 0.5647 - val_loss: 0.2750
Epoch 145/500
1s - loss: 0.5600 - val_loss: 0.2723
Epoch 146/500
0s - loss: 0.5559 - val_loss: 0.2697
Epoch 147/500
1s - loss: 0.5504 - val_loss: 0.2671
Epoch 148/500
0s - loss: 0.5467 - val_loss: 0.2645
Epoch 149/500
1s - loss: 0.5428 - val_loss: 0.2619
Epoch 150/500
1s - loss: 0.5369 - val_loss: 0.2597
Epoch 151/500
0s - loss: 0.5346 - val_loss: 0.2581
Epoch 152/500
1s - loss: 0.5300 - val_loss: 0.2559
Epoch 153/500
1s - loss: 0.5252 - val_loss: 0.2533
Epoch 154/500
0s - loss: 0.5211 - val_loss: 0.2507
Epoch 155/500
0s - loss: 0.5171 - val_loss: 0.2481
Epoch 156/500
0s - loss: 0.5140 - val_loss: 0.2456
Epoch 157/500
1s - loss: 0.5102 - val_loss: 0.2433
Epoch 158/500
1s - loss: 0.5062 - val_loss: 0.2410
Epoch 159/500
1s - loss: 0.5027 - val_loss: 0.2388
Epoch 160/500
1s - loss: 0.4977 - val_loss: 0.2368
Epoch 161/500
1s - loss: 0.4950 - val_loss: 0.2346
Epoch 162/500
1s - loss: 0.4902 - val_loss: 0.2324
Epoch 163/500
1s - loss: 0.4859 - val_loss: 0.2302
Epoch 164/500
1s - loss: 0.4829 - val_loss: 0.2282
Epoch 165/500
1s - loss: 0.4796 - val_loss: 0.2260
Epoch 166/500
1s - loss: 0.4764 - val_loss: 0.2240
Epoch 167/500
1s - loss: 0.4738 - val_loss: 0.2223
Epoch 168/500
0s - loss: 0.4699 - val_loss: 0.2203
Epoch 169/500
1s - loss: 0.4657 - val_loss: 0.2185
Epoch 170/500
0s - loss: 0.4639 - val_loss: 0.2167
Epoch 171/500
0s - loss: 0.4595 - val_loss: 0.2149
Epoch 172/500
1s - loss: 0.4562 - val_loss: 0.2132
Epoch 173/500
1s - loss: 0.4533 - val_loss: 0.2113
Epoch 174/500
0s - loss: 0.4505 - val_loss: 0.2094
Epoch 175/500
1s - loss: 0.4477 - val_loss: 0.2077
Epoch 176/500
0s - loss: 0.4449 - val_loss: 0.2059
Epoch 177/500
0s - loss: 0.4420 - val_loss: 0.2042
Epoch 178/500
0s - loss: 0.4389 - val_loss: 0.2025
Epoch 179/500
1s - loss: 0.4366 - val_loss: 0.2007
Epoch 180/500
1s - loss: 0.4328 - val_loss: 0.1991
Epoch 181/500
1s - loss: 0.4300 - val_loss: 0.1977
Epoch 182/500
0s - loss: 0.4280 - val_loss: 0.1964
Epoch 183/500
0s - loss: 0.4250 - val_loss: 0.1949
Epoch 184/500
1s - loss: 0.4222 - val_loss: 0.1933
Epoch 185/500
1s - loss: 0.4192 - val_loss: 0.1919
Epoch 186/500
1s - loss: 0.4163 - val_loss: 0.1908
Epoch 187/500
1s - loss: 0.4136 - val_loss: 0.1894
Epoch 188/500
1s - loss: 0.4115 - val_loss: 0.1880
Epoch 189/500
0s - loss: 0.4083 - val_loss: 0.1865
Epoch 190/500
0s - loss: 0.4067 - val_loss: 0.1850
Epoch 191/500
1s - loss: 0.4049 - val_loss: 0.1836
Epoch 192/500
0s - loss: 0.4018 - val_loss: 0.1824
Epoch 193/500
0s - loss: 0.3994 - val_loss: 0.1814
Epoch 194/500
1s - loss: 0.3974 - val_loss: 0.1800
Epoch 195/500
1s - loss: 0.3957 - val_loss: 0.1786
Epoch 196/500
1s - loss: 0.3932 - val_loss: 0.1772
Epoch 197/500
1s - loss: 0.3907 - val_loss: 0.1758
Epoch 198/500
1s - loss: 0.3886 - val_loss: 0.1745
Epoch 199/500
0s - loss: 0.3865 - val_loss: 0.1731
Epoch 200/500
0s - loss: 0.3839 - val_loss: 0.1719
Epoch 201/500
1s - loss: 0.3818 - val_loss: 0.1706
Epoch 202/500
0s - loss: 0.3789 - val_loss: 0.1695
Epoch 203/500
1s - loss: 0.3777 - val_loss: 0.1685
Epoch 204/500
1s - loss: 0.3748 - val_loss: 0.1676
Epoch 205/500
1s - loss: 0.3740 - val_loss: 0.1666
Epoch 206/500
1s - loss: 0.3721 - val_loss: 0.1655
Epoch 207/500
1s - loss: 0.3698 - val_loss: 0.1643
Epoch 208/500
1s - loss: 0.3677 - val_loss: 0.1632
Epoch 209/500
1s - loss: 0.3661 - val_loss: 0.1620
Epoch 210/500
0s - loss: 0.3626 - val_loss: 0.1610
Epoch 211/500
1s - loss: 0.3622 - val_loss: 0.1604
Epoch 212/500
0s - loss: 0.3605 - val_loss: 0.1605
Epoch 213/500
0s - loss: 0.3581 - val_loss: 0.1597
Epoch 214/500
0s - loss: 0.3568 - val_loss: 0.1584
Epoch 215/500
0s - loss: 0.3544 - val_loss: 0.1572
Epoch 216/500
0s - loss: 0.3531 - val_loss: 0.1560
Epoch 217/500
0s - loss: 0.3520 - val_loss: 0.1551
Epoch 218/500
0s - loss: 0.3499 - val_loss: 0.1553
Epoch 219/500
1s - loss: 0.3485 - val_loss: 0.1542
Epoch 220/500
1s - loss: 0.3464 - val_loss: 0.1530
Epoch 221/500
0s - loss: 0.3452 - val_loss: 0.1517
Epoch 222/500
1s - loss: 0.3428 - val_loss: 0.1507
Epoch 223/500
0s - loss: 0.3414 - val_loss: 0.1497
Epoch 224/500
2s - loss: 0.3402 - val_loss: 0.1489
Epoch 225/500
0s - loss: 0.3389 - val_loss: 0.1484
Epoch 226/500
1s - loss: 0.3373 - val_loss: 0.1476
Epoch 227/500
0s - loss: 0.3353 - val_loss: 0.1466
Epoch 228/500
1s - loss: 0.3340 - val_loss: 0.1456
Epoch 229/500
1s - loss: 0.3323 - val_loss: 0.1448
Epoch 230/500
1s - loss: 0.3309 - val_loss: 0.1439
Epoch 231/500
0s - loss: 0.3290 - val_loss: 0.1431
Epoch 232/500
1s - loss: 0.3283 - val_loss: 0.1424
Epoch 233/500
1s - loss: 0.3265 - val_loss: 0.1418
Epoch 234/500
1s - loss: 0.3246 - val_loss: 0.1410
Epoch 235/500
1s - loss: 0.3231 - val_loss: 0.1403
Epoch 236/500
1s - loss: 0.3219 - val_loss: 0.1395
Epoch 237/500
1s - loss: 0.3201 - val_loss: 0.1387
Epoch 238/500
1s - loss: 0.3188 - val_loss: 0.1383
Epoch 239/500
1s - loss: 0.3169 - val_loss: 0.1377
Epoch 240/500
1s - loss: 0.3161 - val_loss: 0.1371
Epoch 241/500
1s - loss: 0.3150 - val_loss: 0.1364
Epoch 242/500
1s - loss: 0.3134 - val_loss: 0.1358
Epoch 243/500
1s - loss: 0.3128 - val_loss: 0.1351
Epoch 244/500
1s - loss: 0.3115 - val_loss: 0.1346
Epoch 245/500
1s - loss: 0.3100 - val_loss: 0.1342
Epoch 246/500
1s - loss: 0.3091 - val_loss: 0.1336
Epoch 247/500
1s - loss: 0.3077 - val_loss: 0.1329
Epoch 248/500
1s - loss: 0.3058 - val_loss: 0.1323
Epoch 249/500
0s - loss: 0.3050 - val_loss: 0.1316
Epoch 250/500
1s - loss: 0.3040 - val_loss: 0.1308
Epoch 251/500
1s - loss: 0.3021 - val_loss: 0.1301
Epoch 252/500
1s - loss: 0.3011 - val_loss: 0.1294
Epoch 253/500
1s - loss: 0.2991 - val_loss: 0.1288
Epoch 254/500
1s - loss: 0.2985 - val_loss: 0.1283
Epoch 255/500
1s - loss: 0.2978 - val_loss: 0.1280
Epoch 256/500
1s - loss: 0.2961 - val_loss: 0.1280
Epoch 257/500
1s - loss: 0.2952 - val_loss: 0.1273
Epoch 258/500
1s - loss: 0.2942 - val_loss: 0.1266
Epoch 259/500
1s - loss: 0.2930 - val_loss: 0.1259
Epoch 260/500
1s - loss: 0.2923 - val_loss: 0.1254
Epoch 261/500
1s - loss: 0.2901 - val_loss: 0.1250
Epoch 262/500
1s - loss: 0.2897 - val_loss: 0.1246
Epoch 263/500
1s - loss: 0.2886 - val_loss: 0.1242
Epoch 264/500
1s - loss: 0.2873 - val_loss: 0.1237
Epoch 265/500
1s - loss: 0.2866 - val_loss: 0.1235
Epoch 266/500
1s - loss: 0.2853 - val_loss: 0.1231
Epoch 267/500
1s - loss: 0.2847 - val_loss: 0.1227
Epoch 268/500
1s - loss: 0.2835 - val_loss: 0.1220
Epoch 269/500
1s - loss: 0.2832 - val_loss: 0.1215
Epoch 270/500
1s - loss: 0.2820 - val_loss: 0.1213
Epoch 271/500
1s - loss: 0.2815 - val_loss: 0.1209
Epoch 272/500
1s - loss: 0.2801 - val_loss: 0.1206
Epoch 273/500
1s - loss: 0.2797 - val_loss: 0.1204
Epoch 274/500
1s - loss: 0.2786 - val_loss: 0.1200
Epoch 275/500
1s - loss: 0.2771 - val_loss: 0.1199
Epoch 276/500
1s - loss: 0.2770 - val_loss: 0.1193
Epoch 277/500
1s - loss: 0.2759 - val_loss: 0.1188
Epoch 278/500
1s - loss: 0.2744 - val_loss: 0.1185
Epoch 279/500
1s - loss: 0.2744 - val_loss: 0.1181
Epoch 280/500
1s - loss: 0.2734 - val_loss: 0.1176
Epoch 281/500
1s - loss: 0.2729 - val_loss: 0.1172
Epoch 282/500
1s - loss: 0.2711 - val_loss: 0.1170
Epoch 283/500
1s - loss: 0.2703 - val_loss: 0.1167
Epoch 284/500
1s - loss: 0.2700 - val_loss: 0.1165
Epoch 285/500
1s - loss: 0.2689 - val_loss: 0.1165
Epoch 286/500
1s - loss: 0.2690 - val_loss: 0.1160
Epoch 287/500
1s - loss: 0.2677 - val_loss: 0.1153
Epoch 288/500
1s - loss: 0.2667 - val_loss: 0.1150
Epoch 289/500
1s - loss: 0.2659 - val_loss: 0.1147
Epoch 290/500
1s - loss: 0.2651 - val_loss: 0.1145
Epoch 291/500
1s - loss: 0.2637 - val_loss: 0.1142
Epoch 292/500
1s - loss: 0.2626 - val_loss: 0.1137
Epoch 293/500
1s - loss: 0.2627 - val_loss: 0.1132
Epoch 294/500
1s - loss: 0.2621 - val_loss: 0.1129
Epoch 295/500
1s - loss: 0.2612 - val_loss: 0.1128
Epoch 296/500
1s - loss: 0.2606 - val_loss: 0.1126
Epoch 297/500
0s - loss: 0.2594 - val_loss: 0.1128
Epoch 298/500
0s - loss: 0.2589 - val_loss: 0.1128
Epoch 299/500
0s - loss: 0.2588 - val_loss: 0.1127
Epoch 300/500
0s - loss: 0.2592 - val_loss: 0.1144
Epoch 301/500
0s - loss: 0.2590 - val_loss: 0.1129
Epoch 302/500
1s - loss: 0.2579 - val_loss: 0.1117
Epoch 303/500
1s - loss: 0.2565 - val_loss: 0.1116
Epoch 304/500
1s - loss: 0.2557 - val_loss: 0.1114
Epoch 305/500
1s - loss: 0.2551 - val_loss: 0.1111
Epoch 306/500
1s - loss: 0.2540 - val_loss: 0.1109
Epoch 307/500
1s - loss: 0.2532 - val_loss: 0.1108
Epoch 308/500
1s - loss: 0.2528 - val_loss: 0.1106
Epoch 309/500
1s - loss: 0.2528 - val_loss: 0.1104
Epoch 310/500
1s - loss: 0.2513 - val_loss: 0.1101
Epoch 311/500
1s - loss: 0.2512 - val_loss: 0.1099
Epoch 312/500
1s - loss: 0.2513 - val_loss: 0.1096
Epoch 313/500
1s - loss: 0.2496 - val_loss: 0.1093
Epoch 314/500
1s - loss: 0.2487 - val_loss: 0.1092
Epoch 315/500
1s - loss: 0.2487 - val_loss: 0.1092
Epoch 316/500
1s - loss: 0.2484 - val_loss: 0.1090
Epoch 317/500
1s - loss: 0.2474 - val_loss: 0.1086
Epoch 318/500
1s - loss: 0.2471 - val_loss: 0.1084
Epoch 319/500
1s - loss: 0.2464 - val_loss: 0.1083
Epoch 320/500
1s - loss: 0.2463 - val_loss: 0.1082
Epoch 321/500
1s - loss: 0.2453 - val_loss: 0.1080
Epoch 322/500
1s - loss: 0.2445 - val_loss: 0.1079
Epoch 323/500
1s - loss: 0.2443 - val_loss: 0.1078
Epoch 324/500
1s - loss: 0.2431 - val_loss: 0.1076
Epoch 325/500
1s - loss: 0.2421 - val_loss: 0.1074
Epoch 326/500
1s - loss: 0.2420 - val_loss: 0.1072
Epoch 327/500
1s - loss: 0.2422 - val_loss: 0.1071
Epoch 328/500
1s - loss: 0.2414 - val_loss: 0.1070
Epoch 329/500
1s - loss: 0.2405 - val_loss: 0.1068
Epoch 330/500
1s - loss: 0.2409 - val_loss: 0.1068
Epoch 331/500
1s - loss: 0.2398 - val_loss: 0.1066
Epoch 332/500
1s - loss: 0.2395 - val_loss: 0.1065
Epoch 333/500
1s - loss: 0.2383 - val_loss: 0.1063
Epoch 334/500
1s - loss: 0.2389 - val_loss: 0.1062
Epoch 335/500
1s - loss: 0.2379 - val_loss: 0.1061
Epoch 336/500
1s - loss: 0.2374 - val_loss: 0.1060
Epoch 337/500
1s - loss: 0.2367 - val_loss: 0.1059
Epoch 338/500
1s - loss: 0.2355 - val_loss: 0.1058
Epoch 339/500
0s - loss: 0.2352 - val_loss: 0.1059
Epoch 340/500
0s - loss: 0.2364 - val_loss: 0.1059
Epoch 341/500
1s - loss: 0.2352 - val_loss: 0.1056
Epoch 342/500
1s - loss: 0.2346 - val_loss: 0.1054
Epoch 343/500
1s - loss: 0.2346 - val_loss: 0.1054
Epoch 344/500
1s - loss: 0.2336 - val_loss: 0.1053
Epoch 345/500
1s - loss: 0.2336 - val_loss: 0.1052
Epoch 346/500
1s - loss: 0.2327 - val_loss: 0.1051
Epoch 347/500
1s - loss: 0.2326 - val_loss: 0.1050
Epoch 348/500
0s - loss: 0.2320 - val_loss: 0.1051
Epoch 349/500
0s - loss: 0.2346 - val_loss: 0.1063
Epoch 350/500
1s - loss: 0.2348 - val_loss: 0.1050
Epoch 351/500
1s - loss: 0.2323 - val_loss: 0.1048
Epoch 352/500
1s - loss: 0.2322 - val_loss: 0.1048
Epoch 353/500
1s - loss: 0.2313 - val_loss: 0.1047
Epoch 354/500
1s - loss: 0.2302 - val_loss: 0.1045
Epoch 355/500
1s - loss: 0.2293 - val_loss: 0.1044
Epoch 356/500
1s - loss: 0.2294 - val_loss: 0.1043
Epoch 357/500
0s - loss: 0.2297 - val_loss: 0.1044
Epoch 358/500
1s - loss: 0.2286 - val_loss: 0.1042
Epoch 359/500
0s - loss: 0.2288 - val_loss: 0.1043
Epoch 360/500
0s - loss: 0.2287 - val_loss: 0.1044
Epoch 361/500
0s - loss: 0.2277 - val_loss: 0.1042
Epoch 362/500
1s - loss: 0.2272 - val_loss: 0.1040
Epoch 363/500
1s - loss: 0.2265 - val_loss: 0.1040
Epoch 364/500
1s - loss: 0.2262 - val_loss: 0.1039
Epoch 365/500
1s - loss: 0.2255 - val_loss: 0.1037
Epoch 366/500
0s - loss: 0.2259 - val_loss: 0.1037
Epoch 367/500
0s - loss: 0.2253 - val_loss: 0.1037
Epoch 368/500
0s - loss: 0.2252 - val_loss: 0.1037
Epoch 369/500
0s - loss: 0.2246 - val_loss: 0.1038
Epoch 370/500
1s - loss: 0.2242 - val_loss: 0.1035
Epoch 371/500
1s - loss: 0.2233 - val_loss: 0.1032
Epoch 372/500
1s - loss: 0.2230 - val_loss: 0.1031
Epoch 373/500
0s - loss: 0.2241 - val_loss: 0.1039
Epoch 374/500
1s - loss: 0.2263 - val_loss: 0.1030
Epoch 375/500
0s - loss: 0.2229 - val_loss: 0.1034
Epoch 376/500
0s - loss: 0.2222 - val_loss: 0.1038
Epoch 377/500
0s - loss: 0.2231 - val_loss: 0.1037
Epoch 378/500
0s - loss: 0.2223 - val_loss: 0.1033
Epoch 379/500
0s - loss: 0.2214 - val_loss: 0.1031
Epoch 380/500
0s - loss: 0.2212 - val_loss: 0.1031
Epoch 381/500
1s - loss: 0.2210 - val_loss: 0.1029
Epoch 382/500
1s - loss: 0.2203 - val_loss: 0.1027
Epoch 383/500
0s - loss: 0.2213 - val_loss: 0.1033
Epoch 384/500
1s - loss: 0.2270 - val_loss: 0.1027
Epoch 385/500
0s - loss: 0.2221 - val_loss: 0.1027
Epoch 386/500
0s - loss: 0.2209 - val_loss: 0.1034
Epoch 387/500
0s - loss: 0.2198 - val_loss: 0.1033
Epoch 388/500
1s - loss: 0.2199 - val_loss: 0.1030
Epoch 389/500
0s - loss: 0.2190 - val_loss: 0.1029
Epoch 390/500
0s - loss: 0.2178 - val_loss: 0.1028
Epoch 391/500
0s - loss: 0.2173 - val_loss: 0.1029
Epoch 392/500
0s - loss: 0.2178 - val_loss: 0.1029
Epoch 393/500
0s - loss: 0.2171 - val_loss: 0.1029
Epoch 394/500
1s - loss: 0.2165 - val_loss: 0.1028
Epoch 395/500
1s - loss: 0.2177 - val_loss: 0.1024
Epoch 396/500
1s - loss: 0.2170 - val_loss: 0.1016
Epoch 397/500
1s - loss: 0.2171 - val_loss: 0.1015
Epoch 398/500
0s - loss: 0.2170 - val_loss: 0.1021
Epoch 399/500
1s - loss: 0.2157 - val_loss: 0.1021
Epoch 400/500
1s - loss: 0.2158 - val_loss: 0.1022
Epoch 401/500
1s - loss: 0.2154 - val_loss: 0.1020
Epoch 402/500
1s - loss: 0.2147 - val_loss: 0.1015
Epoch 403/500
1s - loss: 0.2147 - val_loss: 0.1020
Epoch 404/500
1s - loss: 0.2151 - val_loss: 0.1043
Epoch 405/500
0s - loss: 0.2162 - val_loss: 0.1046
Epoch 406/500
0s - loss: 0.2159 - val_loss: 0.1036
Epoch 407/500
1s - loss: 0.2143 - val_loss: 0.1028
Epoch 408/500
0s - loss: 0.2132 - val_loss: 0.1028
Epoch 409/500
1s - loss: 0.2133 - val_loss: 0.1027
Epoch 410/500
1s - loss: 0.2126 - val_loss: 0.1019
Epoch 411/500
0s - loss: 0.2120 - val_loss: 0.1020
Epoch 412/500
0s - loss: 0.2118 - val_loss: 0.1025
Epoch 413/500
0s - loss: 0.2123 - val_loss: 0.1025
Epoch 414/500
1s - loss: 0.2115 - val_loss: 0.1011
Epoch 415/500
1s - loss: 0.2124 - val_loss: 0.1006
Epoch 416/500
0s - loss: 0.2120 - val_loss: 0.1025
Epoch 417/500
0s - loss: 0.2117 - val_loss: 0.1035
Epoch 418/500
0s - loss: 0.2116 - val_loss: 0.1029
Epoch 419/500
0s - loss: 0.2110 - val_loss: 0.1013
Epoch 420/500
0s - loss: 0.2098 - val_loss: 0.1008
Epoch 421/500
0s - loss: 0.2099 - val_loss: 0.1011
Epoch 422/500
0s - loss: 0.2100 - val_loss: 0.1028
Epoch 423/500
0s - loss: 0.2098 - val_loss: 0.1023
Epoch 424/500
0s - loss: 0.2099 - val_loss: 0.1020
Epoch 425/500
0s - loss: 0.2087 - val_loss: 0.1023
Epoch 426/500
1s - loss: 0.2087 - val_loss: 0.1013
Epoch 427/500
1s - loss: 0.2093 - val_loss: 0.0998
Epoch 428/500
0s - loss: 0.2084 - val_loss: 0.1009
Epoch 429/500
0s - loss: 0.2086 - val_loss: 0.1014
Epoch 430/500
0s - loss: 0.2082 - val_loss: 0.1028
Epoch 431/500
0s - loss: 0.2081 - val_loss: 0.1021
Epoch 432/500
0s - loss: 0.2079 - val_loss: 0.1007
Epoch 433/500
0s - loss: 0.2070 - val_loss: 0.1023
Epoch 434/500
0s - loss: 0.2074 - val_loss: 0.1021
Epoch 435/500
0s - loss: 0.2065 - val_loss: 0.1006
Epoch 436/500
0s - loss: 0.2071 - val_loss: 0.1019
Epoch 437/500
0s - loss: 0.2060 - val_loss: 0.1029
Epoch 438/500
0s - loss: 0.2061 - val_loss: 0.1017
Epoch 439/500
0s - loss: 0.2056 - val_loss: 0.1000
Epoch 440/500
1s - loss: 0.2073 - val_loss: 0.0988
Epoch 441/500
0s - loss: 0.2058 - val_loss: 0.1007
Epoch 442/500
0s - loss: 0.2050 - val_loss: 0.1029
Epoch 443/500
0s - loss: 0.2052 - val_loss: 0.1007
Epoch 444/500
0s - loss: 0.2043 - val_loss: 0.1004
Epoch 445/500
0s - loss: 0.2041 - val_loss: 0.0988
Epoch 446/500
0s - loss: 0.2047 - val_loss: 0.0995
Epoch 447/500
0s - loss: 0.2044 - val_loss: 0.1015
Epoch 448/500
0s - loss: 0.2037 - val_loss: 0.1003
Epoch 449/500
1s - loss: 0.2033 - val_loss: 0.0982
Epoch 450/500
0s - loss: 0.2056 - val_loss: 0.0991
Epoch 451/500
0s - loss: 0.2036 - val_loss: 0.1035
Epoch 452/500
1s - loss: 0.2048 - val_loss: 0.1034
Epoch 453/500
1s - loss: 0.2042 - val_loss: 0.1009
Epoch 454/500
1s - loss: 0.2027 - val_loss: 0.0987
Epoch 455/500
0s - loss: 0.2023 - val_loss: 0.0992
Epoch 456/500
0s - loss: 0.2014 - val_loss: 0.0991
Epoch 457/500
1s - loss: 0.2025 - val_loss: 0.0976
Epoch 458/500
0s - loss: 0.2029 - val_loss: 0.0990
Epoch 459/500
0s - loss: 0.2021 - val_loss: 0.1036
Epoch 460/500
0s - loss: 0.2029 - val_loss: 0.1000
Epoch 461/500
1s - loss: 0.2023 - val_loss: 0.0973
Epoch 462/500
0s - loss: 0.2048 - val_loss: 0.0975
Epoch 463/500
0s - loss: 0.2018 - val_loss: 0.0996
Epoch 464/500
0s - loss: 0.2012 - val_loss: 0.1009
Epoch 465/500
0s - loss: 0.2006 - val_loss: 0.0973
Epoch 466/500
1s - loss: 0.2030 - val_loss: 0.0973
Epoch 467/500
0s - loss: 0.2019 - val_loss: 0.1016
Epoch 468/500
0s - loss: 0.2027 - val_loss: 0.1036
Epoch 469/500
0s - loss: 0.2018 - val_loss: 0.1003
Epoch 470/500
1s - loss: 0.1991 - val_loss: 0.0976
Epoch 471/500
1s - loss: 0.1992 - val_loss: 0.0984
Epoch 472/500
1s - loss: 0.1983 - val_loss: 0.1024
Epoch 473/500
1s - loss: 0.2008 - val_loss: 0.1013
Epoch 474/500
1s - loss: 0.1993 - val_loss: 0.0990
Epoch 475/500
1s - loss: 0.1976 - val_loss: 0.0973
Epoch 476/500
1s - loss: 0.1980 - val_loss: 0.0988
Epoch 477/500
1s - loss: 0.1983 - val_loss: 0.1043
Epoch 478/500
1s - loss: 0.2011 - val_loss: 0.1043
Epoch 479/500
1s - loss: 0.1984 - val_loss: 0.0976
Epoch 480/500
1s - loss: 0.1982 - val_loss: 0.0961
Epoch 481/500
0s - loss: 0.1973 - val_loss: 0.0964
Epoch 482/500
0s - loss: 0.1967 - val_loss: 0.0964
Epoch 483/500
0s - loss: 0.1968 - val_loss: 0.0972
Epoch 484/500
0s - loss: 0.1961 - val_loss: 0.0981
Epoch 485/500
1s - loss: 0.1955 - val_loss: 0.0955
Epoch 486/500
0s - loss: 0.1995 - val_loss: 0.0966
Epoch 487/500
0s - loss: 0.1978 - val_loss: 0.1052
Epoch 488/500
0s - loss: 0.2024 - val_loss: 0.1056
Epoch 489/500
0s - loss: 0.2005 - val_loss: 0.1000
Epoch 490/500
1s - loss: 0.1961 - val_loss: 0.0953
Epoch 491/500
1s - loss: 0.2022 - val_loss: 0.0950
Epoch 492/500
0s - loss: 0.1958 - val_loss: 0.0986
Epoch 493/500
0s - loss: 0.1968 - val_loss: 0.1006
Epoch 494/500
0s - loss: 0.1959 - val_loss: 0.0966
Epoch 495/500
0s - loss: 0.1932 - val_loss: 0.0956
Epoch 496/500
0s - loss: 0.1935 - val_loss: 0.0966
Epoch 497/500
1s - loss: 0.1930 - val_loss: 0.0944
Epoch 498/500
0s - loss: 0.1920 - val_loss: 0.0979
Epoch 499/500
0s - loss: 0.1940 - val_loss: 0.1006
Epoch 500/500
0s - loss: 0.1933 - val_loss: 0.0934

In [6]:
# plot history of loss in training and validation data
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()



In [7]:
# list all data in history
print(history.history.keys())


['loss', 'val_loss']

In [8]:
_y = model.predict(X)[:,0]
plot = plt.hist([y, _y], color=['r','b'], alpha=0.5)


[ 446.64718628  425.23141479  407.80392456 ...,  437.40924072  381.09912109
  420.9815979 ]