In [61]:
%matplotlib inline
import math
import random
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.regularizers import l2
from keras.callbacks import ModelCheckpoint
from keras.utils import np_utils
sns.set(style="ticks", color_codes=True)
In [62]:
data = pd.read_csv("./data/NYCnumber.csv", sep=',')
In [81]:
num_classes = 10
b = data['31'].max() / num_classes
data['target'] = [int(min(num_classes-1,math.floor(x / b))) for x in data['31']]
In [82]:
print data.head()
1 3 4 17 19 24 27 28 29 \
0 1282.0 2835.0 3463.0 7.3 31.5 16.2 52.2 7.7 103446100.0
1 1491.0 2015.0 1262.0 5.3 17.4 2.2 61.1 6.1 150492800.0
2 920.0 2455.0 2569.0 5.3 8.1 10.9 45.7 9.2 93610300.0
3 1126.0 3167.0 1676.0 2.7 38.0 20.1 52.2 19.4 36375100.0
4 601.0 3961.0 3578.0 34.0 24.7 4.6 42.5 12.6 110563800.0
31 target
0 410400.0 4
1 354100.0 3
2 418300.0 4
3 360000.0 3
4 370800.0 3
In [83]:
# convert housing data to numpy format
data_array = data.as_matrix().astype(float)
# split data into feature and target sets
X = data_array[:, :-1]
y = data_array[:, -1]
# normalize the data per feature by dividing by the maximum value in each column
X = X / X.max(axis=0)
y = np_utils.to_categorical(y, num_classes)
In [84]:
# model hyperparameters
batch_size = 256
nb_epoch = 200
num_hidden_1 = 512
num_hidden_2 = 512
num_hidden_3 = 512
dropout = 0.25
In [85]:
model = Sequential()
model.add(Dense(output_dim=num_hidden_1, input_dim=X.shape[1]))
model.add(Activation("relu"))
model.add(Dropout(dropout))
model.add(Dense(num_hidden_2))
model.add(Activation("relu"))
model.add(Dropout(dropout))
model.add(Dense(num_hidden_3))
model.add(Activation("relu"))
model.add(Dropout(dropout))
model.add(Dense(num_classes))
model.add(Activation('softmax'))
# save out model each time it performs better than previous epochs
checkpoint_name = "-model_keras-classification.hdf5"
checkpointer = ModelCheckpoint(checkpoint_name, verbose=0, save_best_only=True)
# categorical crossentropy error for classification problem
model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])
# fit model using a 25% validation split (keras will automatically split the data into training and validation sets)
history = model.fit(X, y, validation_split=0.25, batch_size=batch_size, nb_epoch=nb_epoch,
verbose=2, callbacks=[checkpointer])
10
Train on 1283 samples, validate on 428 samples
Epoch 1/200
0s - loss: 2.2361 - acc: 0.1972 - val_loss: 2.1122 - val_acc: 0.2944
Epoch 2/200
0s - loss: 2.1011 - acc: 0.2440 - val_loss: 1.9834 - val_acc: 0.2944
Epoch 3/200
0s - loss: 2.0335 - acc: 0.2455 - val_loss: 1.9776 - val_acc: 0.3061
Epoch 4/200
0s - loss: 1.9970 - acc: 0.2860 - val_loss: 1.8622 - val_acc: 0.2944
Epoch 5/200
0s - loss: 1.9540 - acc: 0.2783 - val_loss: 1.9111 - val_acc: 0.2804
Epoch 6/200
0s - loss: 1.9103 - acc: 0.3336 - val_loss: 1.8319 - val_acc: 0.3014
Epoch 7/200
0s - loss: 1.8440 - acc: 0.3203 - val_loss: 1.7449 - val_acc: 0.2991
Epoch 8/200
0s - loss: 1.7978 - acc: 0.3422 - val_loss: 1.7179 - val_acc: 0.2991
Epoch 9/200
0s - loss: 1.7532 - acc: 0.3429 - val_loss: 1.6790 - val_acc: 0.3575
Epoch 10/200
0s - loss: 1.6946 - acc: 0.3601 - val_loss: 1.6910 - val_acc: 0.2967
Epoch 11/200
0s - loss: 1.6530 - acc: 0.3687 - val_loss: 1.8019 - val_acc: 0.2173
Epoch 12/200
0s - loss: 1.6246 - acc: 0.3835 - val_loss: 1.7664 - val_acc: 0.2336
Epoch 13/200
0s - loss: 1.5805 - acc: 0.4131 - val_loss: 1.6989 - val_acc: 0.2593
Epoch 14/200
0s - loss: 1.5558 - acc: 0.4186 - val_loss: 1.6303 - val_acc: 0.3131
Epoch 15/200
0s - loss: 1.5236 - acc: 0.4022 - val_loss: 1.5387 - val_acc: 0.3388
Epoch 16/200
0s - loss: 1.5037 - acc: 0.4147 - val_loss: 1.5209 - val_acc: 0.3435
Epoch 17/200
0s - loss: 1.4930 - acc: 0.4092 - val_loss: 1.5321 - val_acc: 0.3738
Epoch 18/200
0s - loss: 1.4692 - acc: 0.4318 - val_loss: 1.5174 - val_acc: 0.3201
Epoch 19/200
0s - loss: 1.4365 - acc: 0.4209 - val_loss: 1.5370 - val_acc: 0.3294
Epoch 20/200
0s - loss: 1.4081 - acc: 0.4443 - val_loss: 1.4403 - val_acc: 0.3458
Epoch 21/200
0s - loss: 1.3883 - acc: 0.4248 - val_loss: 1.6245 - val_acc: 0.2593
Epoch 22/200
0s - loss: 1.3935 - acc: 0.4474 - val_loss: 1.3487 - val_acc: 0.4673
Epoch 23/200
0s - loss: 1.3524 - acc: 0.4708 - val_loss: 1.3834 - val_acc: 0.3902
Epoch 24/200
0s - loss: 1.3379 - acc: 0.4638 - val_loss: 1.3859 - val_acc: 0.4112
Epoch 25/200
0s - loss: 1.3245 - acc: 0.4669 - val_loss: 1.4443 - val_acc: 0.3668
Epoch 26/200
0s - loss: 1.3103 - acc: 0.4583 - val_loss: 1.2965 - val_acc: 0.4206
Epoch 27/200
0s - loss: 1.2837 - acc: 0.4754 - val_loss: 1.2820 - val_acc: 0.4322
Epoch 28/200
0s - loss: 1.2565 - acc: 0.5058 - val_loss: 1.2677 - val_acc: 0.4393
Epoch 29/200
0s - loss: 1.2383 - acc: 0.4895 - val_loss: 1.2999 - val_acc: 0.4579
Epoch 30/200
0s - loss: 1.2554 - acc: 0.4973 - val_loss: 1.2280 - val_acc: 0.4416
Epoch 31/200
0s - loss: 1.2435 - acc: 0.4801 - val_loss: 1.2351 - val_acc: 0.4042
Epoch 32/200
0s - loss: 1.2055 - acc: 0.4957 - val_loss: 1.2503 - val_acc: 0.3808
Epoch 33/200
0s - loss: 1.2053 - acc: 0.4981 - val_loss: 1.1871 - val_acc: 0.4556
Epoch 34/200
0s - loss: 1.1738 - acc: 0.5082 - val_loss: 1.2432 - val_acc: 0.4299
Epoch 35/200
0s - loss: 1.1763 - acc: 0.5121 - val_loss: 1.1635 - val_acc: 0.4182
Epoch 36/200
0s - loss: 1.1565 - acc: 0.5090 - val_loss: 1.1796 - val_acc: 0.4883
Epoch 37/200
0s - loss: 1.1597 - acc: 0.5004 - val_loss: 1.0937 - val_acc: 0.5000
Epoch 38/200
0s - loss: 1.1221 - acc: 0.5370 - val_loss: 1.1297 - val_acc: 0.5210
Epoch 39/200
0s - loss: 1.1205 - acc: 0.5362 - val_loss: 1.2640 - val_acc: 0.3949
Epoch 40/200
0s - loss: 1.1381 - acc: 0.5472 - val_loss: 1.0856 - val_acc: 0.5280
Epoch 41/200
0s - loss: 1.0903 - acc: 0.5612 - val_loss: 1.1302 - val_acc: 0.5164
Epoch 42/200
0s - loss: 1.0858 - acc: 0.5464 - val_loss: 1.2201 - val_acc: 0.4696
Epoch 43/200
0s - loss: 1.1188 - acc: 0.5269 - val_loss: 1.3310 - val_acc: 0.3224
Epoch 44/200
0s - loss: 1.0991 - acc: 0.5440 - val_loss: 1.1365 - val_acc: 0.4696
Epoch 45/200
0s - loss: 1.0804 - acc: 0.5440 - val_loss: 1.0499 - val_acc: 0.5093
Epoch 46/200
0s - loss: 1.0403 - acc: 0.5877 - val_loss: 1.1977 - val_acc: 0.3902
Epoch 47/200
0s - loss: 1.0746 - acc: 0.5472 - val_loss: 1.0008 - val_acc: 0.5631
Epoch 48/200
0s - loss: 1.0237 - acc: 0.5846 - val_loss: 1.1022 - val_acc: 0.5678
Epoch 49/200
0s - loss: 1.0470 - acc: 0.5620 - val_loss: 0.9747 - val_acc: 0.6005
Epoch 50/200
0s - loss: 0.9951 - acc: 0.5744 - val_loss: 0.9809 - val_acc: 0.5584
Epoch 51/200
0s - loss: 1.0053 - acc: 0.5799 - val_loss: 1.0824 - val_acc: 0.4579
Epoch 52/200
0s - loss: 0.9883 - acc: 0.5939 - val_loss: 0.9798 - val_acc: 0.5421
Epoch 53/200
0s - loss: 0.9862 - acc: 0.5737 - val_loss: 1.0500 - val_acc: 0.4930
Epoch 54/200
0s - loss: 1.0027 - acc: 0.5861 - val_loss: 0.9655 - val_acc: 0.6379
Epoch 55/200
0s - loss: 0.9742 - acc: 0.6157 - val_loss: 0.9305 - val_acc: 0.6075
Epoch 56/200
0s - loss: 0.9594 - acc: 0.6235 - val_loss: 1.0282 - val_acc: 0.5981
Epoch 57/200
0s - loss: 0.9753 - acc: 0.6056 - val_loss: 0.9751 - val_acc: 0.5537
Epoch 58/200
0s - loss: 0.9514 - acc: 0.5970 - val_loss: 0.9583 - val_acc: 0.5981
Epoch 59/200
0s - loss: 0.9638 - acc: 0.5861 - val_loss: 0.9755 - val_acc: 0.5607
Epoch 60/200
0s - loss: 0.9461 - acc: 0.6228 - val_loss: 0.9877 - val_acc: 0.5958
Epoch 61/200
0s - loss: 0.9711 - acc: 0.6048 - val_loss: 0.8891 - val_acc: 0.6636
Epoch 62/200
0s - loss: 0.9312 - acc: 0.6337 - val_loss: 0.8814 - val_acc: 0.6308
Epoch 63/200
0s - loss: 0.9275 - acc: 0.6267 - val_loss: 0.8548 - val_acc: 0.6752
Epoch 64/200
0s - loss: 0.9055 - acc: 0.6454 - val_loss: 0.9179 - val_acc: 0.6005
Epoch 65/200
0s - loss: 0.9498 - acc: 0.5963 - val_loss: 0.9231 - val_acc: 0.5981
Epoch 66/200
0s - loss: 0.9440 - acc: 0.5955 - val_loss: 0.8460 - val_acc: 0.6659
Epoch 67/200
0s - loss: 0.9047 - acc: 0.6321 - val_loss: 0.8497 - val_acc: 0.6495
Epoch 68/200
0s - loss: 0.9031 - acc: 0.6220 - val_loss: 0.9360 - val_acc: 0.5397
Epoch 69/200
0s - loss: 0.9003 - acc: 0.6267 - val_loss: 0.9654 - val_acc: 0.6075
Epoch 70/200
0s - loss: 0.9288 - acc: 0.6173 - val_loss: 0.8297 - val_acc: 0.6916
Epoch 71/200
0s - loss: 0.8705 - acc: 0.6274 - val_loss: 0.8356 - val_acc: 0.5911
Epoch 72/200
0s - loss: 0.9032 - acc: 0.6002 - val_loss: 0.8210 - val_acc: 0.6519
Epoch 73/200
0s - loss: 0.8686 - acc: 0.6212 - val_loss: 0.8475 - val_acc: 0.5981
Epoch 74/200
0s - loss: 0.8763 - acc: 0.6345 - val_loss: 0.8309 - val_acc: 0.6519
Epoch 75/200
0s - loss: 0.8521 - acc: 0.6500 - val_loss: 0.8022 - val_acc: 0.6729
Epoch 76/200
0s - loss: 0.8559 - acc: 0.6469 - val_loss: 0.8114 - val_acc: 0.6822
Epoch 77/200
0s - loss: 0.8531 - acc: 0.6313 - val_loss: 0.8289 - val_acc: 0.6589
Epoch 78/200
0s - loss: 0.8800 - acc: 0.6173 - val_loss: 0.7597 - val_acc: 0.7196
Epoch 79/200
0s - loss: 0.8529 - acc: 0.6508 - val_loss: 0.7350 - val_acc: 0.7220
Epoch 80/200
0s - loss: 0.8535 - acc: 0.6422 - val_loss: 0.9880 - val_acc: 0.5047
Epoch 81/200
0s - loss: 0.8584 - acc: 0.6329 - val_loss: 0.7291 - val_acc: 0.7196
Epoch 82/200
0s - loss: 0.8332 - acc: 0.6446 - val_loss: 0.8383 - val_acc: 0.6636
Epoch 83/200
0s - loss: 0.8543 - acc: 0.6422 - val_loss: 0.7409 - val_acc: 0.7593
Epoch 84/200
0s - loss: 0.8123 - acc: 0.6633 - val_loss: 0.7185 - val_acc: 0.7687
Epoch 85/200
0s - loss: 0.8289 - acc: 0.6563 - val_loss: 0.8019 - val_acc: 0.6846
Epoch 86/200
0s - loss: 0.8264 - acc: 0.6493 - val_loss: 0.8156 - val_acc: 0.6659
Epoch 87/200
0s - loss: 0.8483 - acc: 0.6399 - val_loss: 0.7817 - val_acc: 0.6542
Epoch 88/200
0s - loss: 0.8220 - acc: 0.6532 - val_loss: 0.7244 - val_acc: 0.7383
Epoch 89/200
0s - loss: 0.8105 - acc: 0.6656 - val_loss: 0.7407 - val_acc: 0.7243
Epoch 90/200
0s - loss: 0.7967 - acc: 0.6672 - val_loss: 0.7393 - val_acc: 0.6869
Epoch 91/200
0s - loss: 0.8284 - acc: 0.6376 - val_loss: 0.6876 - val_acc: 0.7874
Epoch 92/200
0s - loss: 0.8092 - acc: 0.6477 - val_loss: 0.7751 - val_acc: 0.6776
Epoch 93/200
0s - loss: 0.8273 - acc: 0.6664 - val_loss: 0.6779 - val_acc: 0.7500
Epoch 94/200
0s - loss: 0.8104 - acc: 0.6532 - val_loss: 0.6720 - val_acc: 0.7664
Epoch 95/200
0s - loss: 0.7720 - acc: 0.6797 - val_loss: 0.7673 - val_acc: 0.6565
Epoch 96/200
0s - loss: 0.8062 - acc: 0.6648 - val_loss: 0.6955 - val_acc: 0.6986
Epoch 97/200
0s - loss: 0.7934 - acc: 0.6586 - val_loss: 0.8267 - val_acc: 0.5911
Epoch 98/200
0s - loss: 0.8078 - acc: 0.6524 - val_loss: 0.6841 - val_acc: 0.7336
Epoch 99/200
0s - loss: 0.7860 - acc: 0.6625 - val_loss: 0.6735 - val_acc: 0.7523
Epoch 100/200
0s - loss: 0.7872 - acc: 0.6586 - val_loss: 0.6890 - val_acc: 0.7313
Epoch 101/200
0s - loss: 0.7772 - acc: 0.6773 - val_loss: 0.7194 - val_acc: 0.6846
Epoch 102/200
0s - loss: 0.7711 - acc: 0.6765 - val_loss: 0.6556 - val_acc: 0.7827
Epoch 103/200
0s - loss: 0.7581 - acc: 0.6875 - val_loss: 0.7012 - val_acc: 0.7079
Epoch 104/200
0s - loss: 0.7712 - acc: 0.6703 - val_loss: 0.6908 - val_acc: 0.7336
Epoch 105/200
0s - loss: 0.7800 - acc: 0.6758 - val_loss: 0.6131 - val_acc: 0.8131
Epoch 106/200
0s - loss: 0.7388 - acc: 0.6960 - val_loss: 0.8868 - val_acc: 0.5467
Epoch 107/200
0s - loss: 0.7766 - acc: 0.6602 - val_loss: 0.6475 - val_acc: 0.7290
Epoch 108/200
0s - loss: 0.7604 - acc: 0.6687 - val_loss: 0.6318 - val_acc: 0.7874
Epoch 109/200
0s - loss: 0.7304 - acc: 0.6937 - val_loss: 0.6089 - val_acc: 0.7640
Epoch 110/200
0s - loss: 0.7542 - acc: 0.6906 - val_loss: 0.6125 - val_acc: 0.7734
Epoch 111/200
0s - loss: 0.7539 - acc: 0.6765 - val_loss: 0.6378 - val_acc: 0.7383
Epoch 112/200
0s - loss: 0.7411 - acc: 0.6906 - val_loss: 0.6463 - val_acc: 0.7243
Epoch 113/200
0s - loss: 0.7278 - acc: 0.6859 - val_loss: 0.7463 - val_acc: 0.6682
Epoch 114/200
0s - loss: 0.7652 - acc: 0.6726 - val_loss: 0.7278 - val_acc: 0.6729
Epoch 115/200
0s - loss: 0.7442 - acc: 0.6828 - val_loss: 0.5966 - val_acc: 0.8178
Epoch 116/200
0s - loss: 0.7065 - acc: 0.7101 - val_loss: 0.6902 - val_acc: 0.6893
Epoch 117/200
0s - loss: 0.7328 - acc: 0.6859 - val_loss: 0.6034 - val_acc: 0.7991
Epoch 118/200
0s - loss: 0.7208 - acc: 0.6999 - val_loss: 0.5848 - val_acc: 0.8061
Epoch 119/200
0s - loss: 0.7279 - acc: 0.6882 - val_loss: 0.5945 - val_acc: 0.8037
Epoch 120/200
0s - loss: 0.7349 - acc: 0.6836 - val_loss: 0.6791 - val_acc: 0.7500
Epoch 121/200
0s - loss: 0.7262 - acc: 0.6859 - val_loss: 0.5655 - val_acc: 0.8107
Epoch 122/200
0s - loss: 0.7224 - acc: 0.6859 - val_loss: 0.5988 - val_acc: 0.7827
Epoch 123/200
0s - loss: 0.7380 - acc: 0.6765 - val_loss: 0.7404 - val_acc: 0.6706
Epoch 124/200
0s - loss: 0.7128 - acc: 0.7038 - val_loss: 0.5784 - val_acc: 0.8178
Epoch 125/200
0s - loss: 0.7211 - acc: 0.6836 - val_loss: 0.5699 - val_acc: 0.8061
Epoch 126/200
0s - loss: 0.6934 - acc: 0.7140 - val_loss: 0.6445 - val_acc: 0.7313
Epoch 127/200
0s - loss: 0.7181 - acc: 0.6765 - val_loss: 0.5655 - val_acc: 0.8014
Epoch 128/200
0s - loss: 0.7033 - acc: 0.6976 - val_loss: 0.5914 - val_acc: 0.8014
Epoch 129/200
0s - loss: 0.6984 - acc: 0.6984 - val_loss: 0.5676 - val_acc: 0.8178
Epoch 130/200
0s - loss: 0.6960 - acc: 0.7007 - val_loss: 0.5675 - val_acc: 0.7967
Epoch 131/200
0s - loss: 0.6944 - acc: 0.6999 - val_loss: 0.5544 - val_acc: 0.8201
Epoch 132/200
0s - loss: 0.6828 - acc: 0.6952 - val_loss: 0.5870 - val_acc: 0.7593
Epoch 133/200
0s - loss: 0.7004 - acc: 0.7046 - val_loss: 0.5417 - val_acc: 0.8224
Epoch 134/200
0s - loss: 0.6955 - acc: 0.6952 - val_loss: 0.6188 - val_acc: 0.7757
Epoch 135/200
0s - loss: 0.6929 - acc: 0.7077 - val_loss: 0.7608 - val_acc: 0.6752
Epoch 136/200
0s - loss: 0.7230 - acc: 0.6929 - val_loss: 0.5344 - val_acc: 0.8388
Epoch 137/200
0s - loss: 0.6869 - acc: 0.7023 - val_loss: 0.5365 - val_acc: 0.8061
Epoch 138/200
0s - loss: 0.6732 - acc: 0.7140 - val_loss: 0.5192 - val_acc: 0.8201
Epoch 139/200
0s - loss: 0.6871 - acc: 0.7062 - val_loss: 0.7348 - val_acc: 0.6799
Epoch 140/200
0s - loss: 0.7097 - acc: 0.6882 - val_loss: 0.6458 - val_acc: 0.7336
Epoch 141/200
0s - loss: 0.6990 - acc: 0.7038 - val_loss: 0.7031 - val_acc: 0.6659
Epoch 142/200
0s - loss: 0.6984 - acc: 0.6976 - val_loss: 0.6972 - val_acc: 0.6846
Epoch 143/200
0s - loss: 0.7153 - acc: 0.6913 - val_loss: 0.5034 - val_acc: 0.8388
Epoch 144/200
0s - loss: 0.6911 - acc: 0.6984 - val_loss: 0.7389 - val_acc: 0.6285
Epoch 145/200
0s - loss: 0.6887 - acc: 0.6984 - val_loss: 0.7733 - val_acc: 0.6098
Epoch 146/200
0s - loss: 0.6965 - acc: 0.6921 - val_loss: 0.5902 - val_acc: 0.7804
Epoch 147/200
0s - loss: 0.6743 - acc: 0.7116 - val_loss: 0.6160 - val_acc: 0.7477
Epoch 148/200
0s - loss: 0.6987 - acc: 0.6952 - val_loss: 0.6577 - val_acc: 0.7150
Epoch 149/200
0s - loss: 0.6751 - acc: 0.7108 - val_loss: 0.4949 - val_acc: 0.8411
Epoch 150/200
0s - loss: 0.6684 - acc: 0.7163 - val_loss: 0.6038 - val_acc: 0.7967
Epoch 151/200
0s - loss: 0.6621 - acc: 0.7147 - val_loss: 0.5925 - val_acc: 0.7687
Epoch 152/200
0s - loss: 0.6571 - acc: 0.7171 - val_loss: 0.5140 - val_acc: 0.8435
Epoch 153/200
0s - loss: 0.6784 - acc: 0.7085 - val_loss: 0.5035 - val_acc: 0.8481
Epoch 154/200
0s - loss: 0.6665 - acc: 0.7116 - val_loss: 0.6746 - val_acc: 0.7009
Epoch 155/200
0s - loss: 0.7048 - acc: 0.6952 - val_loss: 0.5940 - val_acc: 0.7547
Epoch 156/200
0s - loss: 0.6725 - acc: 0.7093 - val_loss: 0.6361 - val_acc: 0.7383
Epoch 157/200
0s - loss: 0.6696 - acc: 0.7233 - val_loss: 0.4885 - val_acc: 0.8621
Epoch 158/200
0s - loss: 0.6546 - acc: 0.7155 - val_loss: 0.5057 - val_acc: 0.8084
Epoch 159/200
1s - loss: 0.6340 - acc: 0.7210 - val_loss: 0.4877 - val_acc: 0.8318
Epoch 160/200
0s - loss: 0.6347 - acc: 0.7373 - val_loss: 0.6030 - val_acc: 0.7266
Epoch 161/200
0s - loss: 0.6797 - acc: 0.6913 - val_loss: 0.5926 - val_acc: 0.7710
Epoch 162/200
0s - loss: 0.6662 - acc: 0.7023 - val_loss: 0.4762 - val_acc: 0.8598
Epoch 163/200
0s - loss: 0.6475 - acc: 0.7194 - val_loss: 0.4828 - val_acc: 0.8528
Epoch 164/200
0s - loss: 0.6332 - acc: 0.7249 - val_loss: 0.5176 - val_acc: 0.8107
Epoch 165/200
0s - loss: 0.6518 - acc: 0.7210 - val_loss: 0.7659 - val_acc: 0.6379
Epoch 166/200
0s - loss: 0.6728 - acc: 0.7069 - val_loss: 0.6354 - val_acc: 0.7290
Epoch 167/200
0s - loss: 0.6577 - acc: 0.7062 - val_loss: 0.4746 - val_acc: 0.8458
Epoch 168/200
0s - loss: 0.6216 - acc: 0.7506 - val_loss: 0.5999 - val_acc: 0.7617
Epoch 169/200
0s - loss: 0.6553 - acc: 0.6976 - val_loss: 0.5061 - val_acc: 0.8271
Epoch 170/200
0s - loss: 0.6465 - acc: 0.7342 - val_loss: 0.4910 - val_acc: 0.8341
Epoch 171/200
0s - loss: 0.6366 - acc: 0.7288 - val_loss: 0.6263 - val_acc: 0.7243
Epoch 172/200
0s - loss: 0.6630 - acc: 0.7249 - val_loss: 0.4563 - val_acc: 0.8598
Epoch 173/200
0s - loss: 0.6348 - acc: 0.7373 - val_loss: 0.4488 - val_acc: 0.8785
Epoch 174/200
0s - loss: 0.6235 - acc: 0.7366 - val_loss: 0.4812 - val_acc: 0.8738
Epoch 175/200
0s - loss: 0.6082 - acc: 0.7498 - val_loss: 0.5630 - val_acc: 0.7804
Epoch 176/200
0s - loss: 0.6301 - acc: 0.7280 - val_loss: 0.4602 - val_acc: 0.8435
Epoch 177/200
0s - loss: 0.6091 - acc: 0.7319 - val_loss: 0.5798 - val_acc: 0.7710
Epoch 178/200
0s - loss: 0.6267 - acc: 0.7210 - val_loss: 0.4422 - val_acc: 0.8645
Epoch 179/200
0s - loss: 0.6072 - acc: 0.7381 - val_loss: 0.5281 - val_acc: 0.7804
Epoch 180/200
0s - loss: 0.6258 - acc: 0.7147 - val_loss: 0.4252 - val_acc: 0.8808
Epoch 181/200
0s - loss: 0.6488 - acc: 0.7163 - val_loss: 0.6695 - val_acc: 0.7079
Epoch 182/200
0s - loss: 0.6699 - acc: 0.7015 - val_loss: 0.4769 - val_acc: 0.8318
Epoch 183/200
0s - loss: 0.6156 - acc: 0.7412 - val_loss: 0.5912 - val_acc: 0.7243
Epoch 184/200
0s - loss: 0.6245 - acc: 0.7412 - val_loss: 0.4778 - val_acc: 0.8364
Epoch 185/200
0s - loss: 0.6194 - acc: 0.7319 - val_loss: 0.8364 - val_acc: 0.5864
Epoch 186/200
0s - loss: 0.6709 - acc: 0.7069 - val_loss: 0.5893 - val_acc: 0.7290
Epoch 187/200
0s - loss: 0.6273 - acc: 0.7451 - val_loss: 0.6154 - val_acc: 0.7150
Epoch 188/200
0s - loss: 0.6375 - acc: 0.7171 - val_loss: 0.4704 - val_acc: 0.8201
Epoch 189/200
0s - loss: 0.6133 - acc: 0.7225 - val_loss: 0.5969 - val_acc: 0.7290
Epoch 190/200
0s - loss: 0.6204 - acc: 0.7436 - val_loss: 0.4625 - val_acc: 0.8318
Epoch 191/200
0s - loss: 0.5965 - acc: 0.7475 - val_loss: 0.5800 - val_acc: 0.7593
Epoch 192/200
0s - loss: 0.6444 - acc: 0.7217 - val_loss: 0.7426 - val_acc: 0.6402
Epoch 193/200
0s - loss: 0.6452 - acc: 0.7108 - val_loss: 0.4435 - val_acc: 0.8668
Epoch 194/200
0s - loss: 0.6078 - acc: 0.7194 - val_loss: 0.4393 - val_acc: 0.8762
Epoch 195/200
0s - loss: 0.5742 - acc: 0.7506 - val_loss: 0.4802 - val_acc: 0.8388
Epoch 196/200
0s - loss: 0.6054 - acc: 0.7311 - val_loss: 0.4661 - val_acc: 0.8435
Epoch 197/200
0s - loss: 0.5900 - acc: 0.7506 - val_loss: 0.4803 - val_acc: 0.8481
Epoch 198/200
0s - loss: 0.5870 - acc: 0.7498 - val_loss: 0.4361 - val_acc: 0.8855
Epoch 199/200
0s - loss: 0.5991 - acc: 0.7459 - val_loss: 0.4452 - val_acc: 0.8528
Epoch 200/200
0s - loss: 0.5922 - acc: 0.7506 - val_loss: 0.5052 - val_acc: 0.8061
In [86]:
# list all data in history
print(history.history.keys())
['acc', 'loss', 'val_acc', 'val_loss']
In [87]:
# plot history of loss in training and validation data
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model loss')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
In [88]:
_y = model.predict(X)
plot = plt.hist([np.argmax(y, axis=1), np.argmax(_y, axis=1)], color=['r','b'], alpha=0.5)
Content source: dmc-2016/dmc
Similar notebooks: