Keras MNIST v1

Loading dependencies


In [1]:
from __future__ import print_function
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
np.random.seed(1671) # for reproducibility


Using TensorFlow backend.

Designing neural network constants


In [2]:
# network and training
NB_EPOCH = 200
BATCH_SIZE = 128
VERBOSE = 1
NB_CLASSES = 10 # number of outputs = number of digits
OPTIMIZER = SGD() # SGD optimized
N_HIDDEN = 128
VALIDATION_SPLIT = 0.2 # how much TRAIN is reserved for VALIDATION

Loading data


In [3]:
# data shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()

Preprocessing data


In [4]:
# X_train is 60000 rows of 28X28 values --> reshaped in 60000 X 784
RESHAPE = 784

X_train = X_train.reshape(60000, RESHAPE)
X_test = X_test.reshape(10000, RESHAPE)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')

# normalizing data

X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')

# convert class vectors to binary class matrices (One Hot Encoding)
y_train = np_utils.to_categorical(y_train, NB_CLASSES)
y_test = np_utils.to_categorical(y_test, NB_CLASSES)


60000 train samples
10000 test samples

Designing the neural network


In [5]:
# 10 outputs
# final stage is softmax
model = Sequential()
model.add(Dense(NB_CLASSES, input_shape=(RESHAPE,)))
model.add(Activation('softmax'))
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 10)                7850      
_________________________________________________________________
activation_1 (Activation)    (None, 10)                0         
=================================================================
Total params: 7,850
Trainable params: 7,850
Non-trainable params: 0
_________________________________________________________________

Compiling the model


In [6]:
model.compile(loss='categorical_crossentropy', optimizer=OPTIMIZER, metrics=['accuracy'])

Training the model


In [7]:
history = model.fit(X_train, y_train,
batch_size=BATCH_SIZE, epochs=NB_EPOCH,
verbose=VERBOSE, validation_split=VALIDATION_SPLIT)


Train on 48000 samples, validate on 12000 samples
Epoch 1/200
48000/48000 [==============================] - 2s - loss: 1.3633 - acc: 0.6796 - val_loss: 0.8904 - val_acc: 0.8246
Epoch 2/200
48000/48000 [==============================] - 1s - loss: 0.7913 - acc: 0.8272 - val_loss: 0.6572 - val_acc: 0.8546
Epoch 3/200
48000/48000 [==============================] - 1s - loss: 0.6436 - acc: 0.8497 - val_loss: 0.5625 - val_acc: 0.8681
Epoch 4/200
48000/48000 [==============================] - 1s - loss: 0.5717 - acc: 0.8602 - val_loss: 0.5098 - val_acc: 0.8765
Epoch 5/200
48000/48000 [==============================] - 1s - loss: 0.5276 - acc: 0.8678 - val_loss: 0.4758 - val_acc: 0.8826
Epoch 6/200
48000/48000 [==============================] - 1s - loss: 0.4973 - acc: 0.8726 - val_loss: 0.4515 - val_acc: 0.8866
Epoch 7/200
48000/48000 [==============================] - 1s - loss: 0.4748 - acc: 0.8775 - val_loss: 0.4333 - val_acc: 0.8882
Epoch 8/200
48000/48000 [==============================] - 1s - loss: 0.4574 - acc: 0.8803 - val_loss: 0.4189 - val_acc: 0.8920
Epoch 9/200
48000/48000 [==============================] - 1s - loss: 0.4433 - acc: 0.8834 - val_loss: 0.4075 - val_acc: 0.8939
Epoch 10/200
48000/48000 [==============================] - 1s - loss: 0.4317 - acc: 0.8850 - val_loss: 0.3977 - val_acc: 0.8966
Epoch 11/200
48000/48000 [==============================] - 1s - loss: 0.4218 - acc: 0.8873 - val_loss: 0.3896 - val_acc: 0.8984
Epoch 12/200
48000/48000 [==============================] - 1s - loss: 0.4134 - acc: 0.8888 - val_loss: 0.3827 - val_acc: 0.8995
Epoch 13/200
48000/48000 [==============================] - 1s - loss: 0.4060 - acc: 0.8902 - val_loss: 0.3766 - val_acc: 0.9003
Epoch 14/200
48000/48000 [==============================] - 1s - loss: 0.3995 - acc: 0.8918 - val_loss: 0.3712 - val_acc: 0.9013
Epoch 15/200
48000/48000 [==============================] - 1s - loss: 0.3936 - acc: 0.8928 - val_loss: 0.3664 - val_acc: 0.9016
Epoch 16/200
48000/48000 [==============================] - 1s - loss: 0.3884 - acc: 0.8945 - val_loss: 0.3621 - val_acc: 0.9031
Epoch 17/200
48000/48000 [==============================] - 1s - loss: 0.3837 - acc: 0.8950 - val_loss: 0.3582 - val_acc: 0.9033
Epoch 18/200
48000/48000 [==============================] - 1s - loss: 0.3794 - acc: 0.8962 - val_loss: 0.3546 - val_acc: 0.9039
Epoch 19/200
48000/48000 [==============================] - 1s - loss: 0.3755 - acc: 0.8970 - val_loss: 0.3514 - val_acc: 0.9048
Epoch 20/200
48000/48000 [==============================] - 1s - loss: 0.3718 - acc: 0.8979 - val_loss: 0.3485 - val_acc: 0.9053
Epoch 21/200
48000/48000 [==============================] - 1s - loss: 0.3685 - acc: 0.8985 - val_loss: 0.3457 - val_acc: 0.9058
Epoch 22/200
48000/48000 [==============================] - 1s - loss: 0.3653 - acc: 0.8995 - val_loss: 0.3431 - val_acc: 0.9058
Epoch 23/200
48000/48000 [==============================] - 1s - loss: 0.3625 - acc: 0.8999 - val_loss: 0.3407 - val_acc: 0.9063
Epoch 24/200
48000/48000 [==============================] - 1s - loss: 0.3598 - acc: 0.9008 - val_loss: 0.3385 - val_acc: 0.9070
Epoch 25/200
48000/48000 [==============================] - 1s - loss: 0.3572 - acc: 0.9012 - val_loss: 0.3364 - val_acc: 0.9074
Epoch 26/200
48000/48000 [==============================] - 1s - loss: 0.3548 - acc: 0.9019 - val_loss: 0.3345 - val_acc: 0.9084
Epoch 27/200
48000/48000 [==============================] - 1s - loss: 0.3525 - acc: 0.9022 - val_loss: 0.3326 - val_acc: 0.9082
Epoch 28/200
48000/48000 [==============================] - 1s - loss: 0.3504 - acc: 0.9032 - val_loss: 0.3311 - val_acc: 0.9090
Epoch 29/200
48000/48000 [==============================] - 1s - loss: 0.3484 - acc: 0.9031 - val_loss: 0.3293 - val_acc: 0.9094
Epoch 30/200
48000/48000 [==============================] - 1s - loss: 0.3465 - acc: 0.9041 - val_loss: 0.3277 - val_acc: 0.9097
Epoch 31/200
48000/48000 [==============================] - 1s - loss: 0.3447 - acc: 0.9044 - val_loss: 0.3264 - val_acc: 0.9097
Epoch 32/200
48000/48000 [==============================] - 1s - loss: 0.3430 - acc: 0.9047 - val_loss: 0.3249 - val_acc: 0.9097
Epoch 33/200
48000/48000 [==============================] - 1s - loss: 0.3413 - acc: 0.9051 - val_loss: 0.3235 - val_acc: 0.9103
Epoch 34/200
48000/48000 [==============================] - 1s - loss: 0.3397 - acc: 0.9056 - val_loss: 0.3222 - val_acc: 0.9104
Epoch 35/200
48000/48000 [==============================] - 1s - loss: 0.3382 - acc: 0.9058 - val_loss: 0.3211 - val_acc: 0.9110
Epoch 36/200
48000/48000 [==============================] - 1s - loss: 0.3368 - acc: 0.9062 - val_loss: 0.3198 - val_acc: 0.9110
Epoch 37/200
48000/48000 [==============================] - 1s - loss: 0.3353 - acc: 0.9069 - val_loss: 0.3187 - val_acc: 0.9117
Epoch 38/200
48000/48000 [==============================] - 1s - loss: 0.3340 - acc: 0.9075 - val_loss: 0.3177 - val_acc: 0.9120
Epoch 39/200
48000/48000 [==============================] - 1s - loss: 0.3327 - acc: 0.9075 - val_loss: 0.3166 - val_acc: 0.9122
Epoch 40/200
48000/48000 [==============================] - 1s - loss: 0.3314 - acc: 0.9078 - val_loss: 0.3159 - val_acc: 0.9118
Epoch 41/200
48000/48000 [==============================] - 1s - loss: 0.3303 - acc: 0.9080 - val_loss: 0.3147 - val_acc: 0.9127
Epoch 42/200
48000/48000 [==============================] - 1s - loss: 0.3291 - acc: 0.9084 - val_loss: 0.3138 - val_acc: 0.9132
Epoch 43/200
48000/48000 [==============================] - 1s - loss: 0.3280 - acc: 0.9089 - val_loss: 0.3130 - val_acc: 0.9132
Epoch 44/200
48000/48000 [==============================] - 1s - loss: 0.3270 - acc: 0.9091 - val_loss: 0.3121 - val_acc: 0.9132
Epoch 45/200
48000/48000 [==============================] - 1s - loss: 0.3259 - acc: 0.9093 - val_loss: 0.3113 - val_acc: 0.9135
Epoch 46/200
48000/48000 [==============================] - 1s - loss: 0.3249 - acc: 0.9095 - val_loss: 0.3105 - val_acc: 0.9137
Epoch 47/200
48000/48000 [==============================] - 1s - loss: 0.3239 - acc: 0.9105 - val_loss: 0.3098 - val_acc: 0.9141
Epoch 48/200
48000/48000 [==============================] - 1s - loss: 0.3230 - acc: 0.9105 - val_loss: 0.3090 - val_acc: 0.9146
Epoch 49/200
48000/48000 [==============================] - 1s - loss: 0.3221 - acc: 0.9102 - val_loss: 0.3083 - val_acc: 0.9151
Epoch 50/200
48000/48000 [==============================] - 1s - loss: 0.3212 - acc: 0.9109 - val_loss: 0.3075 - val_acc: 0.9150
Epoch 51/200
48000/48000 [==============================] - 1s - loss: 0.3204 - acc: 0.9109 - val_loss: 0.3070 - val_acc: 0.9150
Epoch 52/200
48000/48000 [==============================] - 1s - loss: 0.3195 - acc: 0.9112 - val_loss: 0.3063 - val_acc: 0.9148
Epoch 53/200
48000/48000 [==============================] - 1s - loss: 0.3187 - acc: 0.9114 - val_loss: 0.3057 - val_acc: 0.9153
Epoch 54/200
48000/48000 [==============================] - 1s - loss: 0.3180 - acc: 0.9117 - val_loss: 0.3050 - val_acc: 0.9148
Epoch 55/200
48000/48000 [==============================] - 1s - loss: 0.3171 - acc: 0.9121 - val_loss: 0.3044 - val_acc: 0.9149
Epoch 56/200
48000/48000 [==============================] - 1s - loss: 0.3164 - acc: 0.9121 - val_loss: 0.3037 - val_acc: 0.9156
Epoch 57/200
48000/48000 [==============================] - 1s - loss: 0.3157 - acc: 0.9128 - val_loss: 0.3034 - val_acc: 0.9152
Epoch 58/200
48000/48000 [==============================] - 1s - loss: 0.3149 - acc: 0.9121 - val_loss: 0.3029 - val_acc: 0.9148
Epoch 59/200
48000/48000 [==============================] - 1s - loss: 0.3143 - acc: 0.9128 - val_loss: 0.3022 - val_acc: 0.9151
Epoch 60/200
48000/48000 [==============================] - 1s - loss: 0.3136 - acc: 0.9129 - val_loss: 0.3016 - val_acc: 0.9161
Epoch 61/200
48000/48000 [==============================] - 1s - loss: 0.3130 - acc: 0.9133 - val_loss: 0.3011 - val_acc: 0.9158
Epoch 62/200
48000/48000 [==============================] - 1s - loss: 0.3123 - acc: 0.9131 - val_loss: 0.3007 - val_acc: 0.9151
Epoch 63/200
48000/48000 [==============================] - 1s - loss: 0.3117 - acc: 0.9136 - val_loss: 0.3003 - val_acc: 0.9156
Epoch 64/200
48000/48000 [==============================] - 1s - loss: 0.3110 - acc: 0.9137 - val_loss: 0.2997 - val_acc: 0.9158
Epoch 65/200
48000/48000 [==============================] - 1s - loss: 0.3105 - acc: 0.9137 - val_loss: 0.2992 - val_acc: 0.9159
Epoch 66/200
48000/48000 [==============================] - 1s - loss: 0.3098 - acc: 0.9138 - val_loss: 0.2988 - val_acc: 0.9161
Epoch 67/200
48000/48000 [==============================] - 1s - loss: 0.3093 - acc: 0.9141 - val_loss: 0.2983 - val_acc: 0.9165
Epoch 68/200
48000/48000 [==============================] - 1s - loss: 0.3087 - acc: 0.9139 - val_loss: 0.2979 - val_acc: 0.9166
Epoch 69/200
48000/48000 [==============================] - 1s - loss: 0.3082 - acc: 0.9144 - val_loss: 0.2976 - val_acc: 0.9164
Epoch 70/200
48000/48000 [==============================] - 1s - loss: 0.3077 - acc: 0.9145 - val_loss: 0.2971 - val_acc: 0.9166
Epoch 71/200
48000/48000 [==============================] - 1s - loss: 0.3071 - acc: 0.9146 - val_loss: 0.2967 - val_acc: 0.9172
Epoch 72/200
48000/48000 [==============================] - 1s - loss: 0.3066 - acc: 0.9147 - val_loss: 0.2964 - val_acc: 0.9167
Epoch 73/200
48000/48000 [==============================] - 1s - loss: 0.3061 - acc: 0.9151 - val_loss: 0.2960 - val_acc: 0.9169
Epoch 74/200
48000/48000 [==============================] - 1s - loss: 0.3056 - acc: 0.9150 - val_loss: 0.2956 - val_acc: 0.9173
Epoch 75/200
48000/48000 [==============================] - 1s - loss: 0.3051 - acc: 0.9151 - val_loss: 0.2952 - val_acc: 0.9177
Epoch 76/200
48000/48000 [==============================] - 1s - loss: 0.3046 - acc: 0.9152 - val_loss: 0.2950 - val_acc: 0.9173
Epoch 77/200
48000/48000 [==============================] - 1s - loss: 0.3042 - acc: 0.9154 - val_loss: 0.2945 - val_acc: 0.9172
Epoch 78/200
48000/48000 [==============================] - 1s - loss: 0.3037 - acc: 0.9154 - val_loss: 0.2942 - val_acc: 0.9176
Epoch 79/200
48000/48000 [==============================] - 1s - loss: 0.3032 - acc: 0.9157 - val_loss: 0.2939 - val_acc: 0.9179
Epoch 80/200
48000/48000 [==============================] - 1s - loss: 0.3028 - acc: 0.9156 - val_loss: 0.2936 - val_acc: 0.9177
Epoch 81/200
48000/48000 [==============================] - 1s - loss: 0.3024 - acc: 0.9157 - val_loss: 0.2933 - val_acc: 0.9179
Epoch 82/200
48000/48000 [==============================] - 1s - loss: 0.3019 - acc: 0.9157 - val_loss: 0.2930 - val_acc: 0.9178
Epoch 83/200
48000/48000 [==============================] - 1s - loss: 0.3015 - acc: 0.9160 - val_loss: 0.2926 - val_acc: 0.9182
Epoch 84/200
48000/48000 [==============================] - 1s - loss: 0.3011 - acc: 0.9161 - val_loss: 0.2924 - val_acc: 0.9179
Epoch 85/200
48000/48000 [==============================] - 1s - loss: 0.3007 - acc: 0.9165 - val_loss: 0.2920 - val_acc: 0.9184
Epoch 86/200
48000/48000 [==============================] - 1s - loss: 0.3003 - acc: 0.9164 - val_loss: 0.2918 - val_acc: 0.9185
Epoch 87/200
48000/48000 [==============================] - 1s - loss: 0.2999 - acc: 0.9165 - val_loss: 0.2914 - val_acc: 0.9185
Epoch 88/200
48000/48000 [==============================] - 1s - loss: 0.2995 - acc: 0.9166 - val_loss: 0.2911 - val_acc: 0.9188
Epoch 89/200
48000/48000 [==============================] - 1s - loss: 0.2991 - acc: 0.9167 - val_loss: 0.2909 - val_acc: 0.9191
Epoch 90/200
48000/48000 [==============================] - 1s - loss: 0.2988 - acc: 0.9169 - val_loss: 0.2906 - val_acc: 0.9191
Epoch 91/200
48000/48000 [==============================] - 1s - loss: 0.2984 - acc: 0.9168 - val_loss: 0.2903 - val_acc: 0.9192
Epoch 92/200
48000/48000 [==============================] - 1s - loss: 0.2981 - acc: 0.9170 - val_loss: 0.2901 - val_acc: 0.9196
Epoch 93/200
48000/48000 [==============================] - 1s - loss: 0.2977 - acc: 0.9171 - val_loss: 0.2898 - val_acc: 0.9195
Epoch 94/200
48000/48000 [==============================] - 1s - loss: 0.2973 - acc: 0.9172 - val_loss: 0.2895 - val_acc: 0.9196
Epoch 95/200
48000/48000 [==============================] - 1s - loss: 0.2970 - acc: 0.9174 - val_loss: 0.2894 - val_acc: 0.9196
Epoch 96/200
48000/48000 [==============================] - 1s - loss: 0.2967 - acc: 0.9174 - val_loss: 0.2891 - val_acc: 0.9198
Epoch 97/200
48000/48000 [==============================] - 1s - loss: 0.2963 - acc: 0.9176 - val_loss: 0.2889 - val_acc: 0.9197
Epoch 98/200
48000/48000 [==============================] - 1s - loss: 0.2960 - acc: 0.9174 - val_loss: 0.2886 - val_acc: 0.9202
Epoch 99/200
48000/48000 [==============================] - 1s - loss: 0.2957 - acc: 0.9176 - val_loss: 0.2884 - val_acc: 0.9202
Epoch 100/200
48000/48000 [==============================] - 1s - loss: 0.2953 - acc: 0.9178 - val_loss: 0.2882 - val_acc: 0.9200
Epoch 101/200
48000/48000 [==============================] - 1s - loss: 0.2950 - acc: 0.9179 - val_loss: 0.2879 - val_acc: 0.9201
Epoch 102/200
48000/48000 [==============================] - 1s - loss: 0.2947 - acc: 0.9180 - val_loss: 0.2877 - val_acc: 0.9204
Epoch 103/200
48000/48000 [==============================] - 1s - loss: 0.2944 - acc: 0.9180 - val_loss: 0.2875 - val_acc: 0.9202
Epoch 104/200
48000/48000 [==============================] - 1s - loss: 0.2941 - acc: 0.9184 - val_loss: 0.2873 - val_acc: 0.9202
Epoch 105/200
48000/48000 [==============================] - 1s - loss: 0.2938 - acc: 0.9183 - val_loss: 0.2871 - val_acc: 0.9206
Epoch 106/200
48000/48000 [==============================] - 1s - loss: 0.2935 - acc: 0.9183 - val_loss: 0.2868 - val_acc: 0.9202
Epoch 107/200
48000/48000 [==============================] - 1s - loss: 0.2932 - acc: 0.9186 - val_loss: 0.2867 - val_acc: 0.9206
Epoch 108/200
48000/48000 [==============================] - 1s - loss: 0.2929 - acc: 0.9185 - val_loss: 0.2864 - val_acc: 0.9208
Epoch 109/200
48000/48000 [==============================] - 1s - loss: 0.2927 - acc: 0.9185 - val_loss: 0.2863 - val_acc: 0.9206
Epoch 110/200
48000/48000 [==============================] - 1s - loss: 0.2923 - acc: 0.9187 - val_loss: 0.2860 - val_acc: 0.9204
Epoch 111/200
48000/48000 [==============================] - 1s - loss: 0.2921 - acc: 0.9184 - val_loss: 0.2858 - val_acc: 0.9210
Epoch 112/200
48000/48000 [==============================] - 1s - loss: 0.2918 - acc: 0.9187 - val_loss: 0.2857 - val_acc: 0.9207
Epoch 113/200
48000/48000 [==============================] - 1s - loss: 0.2915 - acc: 0.9189 - val_loss: 0.2854 - val_acc: 0.9210
Epoch 114/200
48000/48000 [==============================] - 1s - loss: 0.2913 - acc: 0.9188 - val_loss: 0.2853 - val_acc: 0.9211
Epoch 115/200
48000/48000 [==============================] - 1s - loss: 0.2910 - acc: 0.9189 - val_loss: 0.2852 - val_acc: 0.9205
Epoch 116/200
48000/48000 [==============================] - 1s - loss: 0.2908 - acc: 0.9189 - val_loss: 0.2849 - val_acc: 0.9213
Epoch 117/200
48000/48000 [==============================] - 1s - loss: 0.2905 - acc: 0.9193 - val_loss: 0.2847 - val_acc: 0.9213
Epoch 118/200
48000/48000 [==============================] - 1s - loss: 0.2902 - acc: 0.9192 - val_loss: 0.2846 - val_acc: 0.9212
Epoch 119/200
48000/48000 [==============================] - 1s - loss: 0.2900 - acc: 0.9191 - val_loss: 0.2844 - val_acc: 0.9212
Epoch 120/200
48000/48000 [==============================] - 1s - loss: 0.2898 - acc: 0.9192 - val_loss: 0.2842 - val_acc: 0.9212
Epoch 121/200
48000/48000 [==============================] - 1s - loss: 0.2895 - acc: 0.9191 - val_loss: 0.2841 - val_acc: 0.9212
Epoch 122/200
48000/48000 [==============================] - 1s - loss: 0.2892 - acc: 0.9192 - val_loss: 0.2840 - val_acc: 0.9212
Epoch 123/200
48000/48000 [==============================] - 1s - loss: 0.2890 - acc: 0.9194 - val_loss: 0.2838 - val_acc: 0.9211
Epoch 124/200
48000/48000 [==============================] - 1s - loss: 0.2888 - acc: 0.9197 - val_loss: 0.2837 - val_acc: 0.9210
Epoch 125/200
48000/48000 [==============================] - 1s - loss: 0.2885 - acc: 0.9193 - val_loss: 0.2835 - val_acc: 0.9207
Epoch 126/200
48000/48000 [==============================] - 1s - loss: 0.2883 - acc: 0.9197 - val_loss: 0.2834 - val_acc: 0.9217
Epoch 127/200
48000/48000 [==============================] - 1s - loss: 0.2881 - acc: 0.9194 - val_loss: 0.2832 - val_acc: 0.9212
Epoch 128/200
48000/48000 [==============================] - 1s - loss: 0.2879 - acc: 0.9194 - val_loss: 0.2830 - val_acc: 0.9210
Epoch 129/200
48000/48000 [==============================] - 1s - loss: 0.2876 - acc: 0.9196 - val_loss: 0.2828 - val_acc: 0.9217
Epoch 130/200
48000/48000 [==============================] - 1s - loss: 0.2874 - acc: 0.9197 - val_loss: 0.2826 - val_acc: 0.9216
Epoch 131/200
48000/48000 [==============================] - 1s - loss: 0.2871 - acc: 0.9200 - val_loss: 0.2827 - val_acc: 0.9211
Epoch 132/200
48000/48000 [==============================] - 1s - loss: 0.2870 - acc: 0.9197 - val_loss: 0.2824 - val_acc: 0.9213
Epoch 133/200
48000/48000 [==============================] - 1s - loss: 0.2868 - acc: 0.9198 - val_loss: 0.2823 - val_acc: 0.9216
Epoch 134/200
48000/48000 [==============================] - 1s - loss: 0.2866 - acc: 0.9199 - val_loss: 0.2822 - val_acc: 0.9214
Epoch 135/200
48000/48000 [==============================] - 1s - loss: 0.2863 - acc: 0.9203 - val_loss: 0.2820 - val_acc: 0.9213
Epoch 136/200
48000/48000 [==============================] - 1s - loss: 0.2861 - acc: 0.9196 - val_loss: 0.2818 - val_acc: 0.9215
Epoch 137/200
48000/48000 [==============================] - 1s - loss: 0.2859 - acc: 0.9198 - val_loss: 0.2818 - val_acc: 0.9217
Epoch 138/200
48000/48000 [==============================] - 1s - loss: 0.2857 - acc: 0.9203 - val_loss: 0.2815 - val_acc: 0.9218
Epoch 139/200
48000/48000 [==============================] - 1s - loss: 0.2855 - acc: 0.9203 - val_loss: 0.2814 - val_acc: 0.9215
Epoch 140/200
48000/48000 [==============================] - 1s - loss: 0.2853 - acc: 0.9201 - val_loss: 0.2812 - val_acc: 0.9216
Epoch 141/200
48000/48000 [==============================] - 1s - loss: 0.2852 - acc: 0.9204 - val_loss: 0.2811 - val_acc: 0.9217
Epoch 142/200
48000/48000 [==============================] - 1s - loss: 0.2849 - acc: 0.9201 - val_loss: 0.2810 - val_acc: 0.9217
Epoch 143/200
48000/48000 [==============================] - 1s - loss: 0.2848 - acc: 0.9205 - val_loss: 0.2809 - val_acc: 0.9219
Epoch 144/200
48000/48000 [==============================] - 1s - loss: 0.2846 - acc: 0.9208 - val_loss: 0.2808 - val_acc: 0.9217
Epoch 145/200
48000/48000 [==============================] - 1s - loss: 0.2844 - acc: 0.9207 - val_loss: 0.2806 - val_acc: 0.9221
Epoch 146/200
48000/48000 [==============================] - 1s - loss: 0.2841 - acc: 0.9206 - val_loss: 0.2806 - val_acc: 0.9220
Epoch 147/200
48000/48000 [==============================] - 1s - loss: 0.2840 - acc: 0.9207 - val_loss: 0.2804 - val_acc: 0.9217
Epoch 148/200
48000/48000 [==============================] - 1s - loss: 0.2838 - acc: 0.9209 - val_loss: 0.2803 - val_acc: 0.9218
Epoch 149/200
48000/48000 [==============================] - 1s - loss: 0.2836 - acc: 0.9208 - val_loss: 0.2802 - val_acc: 0.9216
Epoch 150/200
48000/48000 [==============================] - 1s - loss: 0.2835 - acc: 0.9210 - val_loss: 0.2800 - val_acc: 0.9225
Epoch 151/200
48000/48000 [==============================] - 1s - loss: 0.2833 - acc: 0.9210 - val_loss: 0.2799 - val_acc: 0.9226
Epoch 152/200
48000/48000 [==============================] - 1s - loss: 0.2831 - acc: 0.9211 - val_loss: 0.2798 - val_acc: 0.9222
Epoch 153/200
48000/48000 [==============================] - 1s - loss: 0.2829 - acc: 0.9207 - val_loss: 0.2797 - val_acc: 0.9224
Epoch 154/200
48000/48000 [==============================] - 1s - loss: 0.2827 - acc: 0.9209 - val_loss: 0.2796 - val_acc: 0.9222
Epoch 155/200
48000/48000 [==============================] - 1s - loss: 0.2826 - acc: 0.9208 - val_loss: 0.2795 - val_acc: 0.9225
Epoch 156/200
48000/48000 [==============================] - 1s - loss: 0.2824 - acc: 0.9210 - val_loss: 0.2794 - val_acc: 0.9224
Epoch 157/200
48000/48000 [==============================] - 1s - loss: 0.2822 - acc: 0.9210 - val_loss: 0.2793 - val_acc: 0.9224
Epoch 158/200
48000/48000 [==============================] - 1s - loss: 0.2821 - acc: 0.9214 - val_loss: 0.2792 - val_acc: 0.9226
Epoch 159/200
48000/48000 [==============================] - 1s - loss: 0.2819 - acc: 0.9214 - val_loss: 0.2791 - val_acc: 0.9226
Epoch 160/200
48000/48000 [==============================] - 1s - loss: 0.2817 - acc: 0.9213 - val_loss: 0.2790 - val_acc: 0.9225
Epoch 161/200
48000/48000 [==============================] - 1s - loss: 0.2816 - acc: 0.9214 - val_loss: 0.2789 - val_acc: 0.9222
Epoch 162/200
48000/48000 [==============================] - 1s - loss: 0.2814 - acc: 0.9215 - val_loss: 0.2788 - val_acc: 0.9227
Epoch 163/200
48000/48000 [==============================] - 1s - loss: 0.2812 - acc: 0.9213 - val_loss: 0.2787 - val_acc: 0.9225
Epoch 164/200
48000/48000 [==============================] - 1s - loss: 0.2811 - acc: 0.9216 - val_loss: 0.2786 - val_acc: 0.9225
Epoch 165/200
48000/48000 [==============================] - 1s - loss: 0.2809 - acc: 0.9215 - val_loss: 0.2785 - val_acc: 0.9227
Epoch 166/200
48000/48000 [==============================] - 1s - loss: 0.2807 - acc: 0.9216 - val_loss: 0.2784 - val_acc: 0.9225
Epoch 167/200
48000/48000 [==============================] - 1s - loss: 0.2806 - acc: 0.9217 - val_loss: 0.2784 - val_acc: 0.9227
Epoch 168/200
48000/48000 [==============================] - 1s - loss: 0.2804 - acc: 0.9219 - val_loss: 0.2782 - val_acc: 0.9228
Epoch 169/200
48000/48000 [==============================] - 1s - loss: 0.2803 - acc: 0.9216 - val_loss: 0.2782 - val_acc: 0.9227
Epoch 170/200
48000/48000 [==============================] - 1s - loss: 0.2801 - acc: 0.9216 - val_loss: 0.2781 - val_acc: 0.9227
Epoch 171/200
48000/48000 [==============================] - 1s - loss: 0.2800 - acc: 0.9220 - val_loss: 0.2780 - val_acc: 0.9226
Epoch 172/200
48000/48000 [==============================] - 1s - loss: 0.2798 - acc: 0.9218 - val_loss: 0.2778 - val_acc: 0.9231
Epoch 173/200
48000/48000 [==============================] - 1s - loss: 0.2797 - acc: 0.9217 - val_loss: 0.2778 - val_acc: 0.9229
Epoch 174/200
48000/48000 [==============================] - 1s - loss: 0.2796 - acc: 0.9217 - val_loss: 0.2777 - val_acc: 0.9227
Epoch 175/200
48000/48000 [==============================] - 1s - loss: 0.2794 - acc: 0.9218 - val_loss: 0.2776 - val_acc: 0.9232
Epoch 176/200
48000/48000 [==============================] - 1s - loss: 0.2793 - acc: 0.9220 - val_loss: 0.2775 - val_acc: 0.9232
Epoch 177/200
48000/48000 [==============================] - 1s - loss: 0.2791 - acc: 0.9219 - val_loss: 0.2774 - val_acc: 0.9234
Epoch 178/200
48000/48000 [==============================] - 1s - loss: 0.2790 - acc: 0.9221 - val_loss: 0.2774 - val_acc: 0.9228
Epoch 179/200
48000/48000 [==============================] - 1s - loss: 0.2788 - acc: 0.9221 - val_loss: 0.2773 - val_acc: 0.9232
Epoch 180/200
48000/48000 [==============================] - 1s - loss: 0.2787 - acc: 0.9221 - val_loss: 0.2771 - val_acc: 0.9235
Epoch 181/200
48000/48000 [==============================] - 1s - loss: 0.2785 - acc: 0.9223 - val_loss: 0.2770 - val_acc: 0.9232
Epoch 182/200
48000/48000 [==============================] - 1s - loss: 0.2784 - acc: 0.9220 - val_loss: 0.2769 - val_acc: 0.9231
Epoch 183/200
48000/48000 [==============================] - 1s - loss: 0.2783 - acc: 0.9223 - val_loss: 0.2769 - val_acc: 0.9231
Epoch 184/200
48000/48000 [==============================] - 1s - loss: 0.2781 - acc: 0.9223 - val_loss: 0.2768 - val_acc: 0.9230
Epoch 185/200
48000/48000 [==============================] - 1s - loss: 0.2780 - acc: 0.9224 - val_loss: 0.2767 - val_acc: 0.9233
Epoch 186/200
48000/48000 [==============================] - 1s - loss: 0.2779 - acc: 0.9223 - val_loss: 0.2766 - val_acc: 0.9236
Epoch 187/200
48000/48000 [==============================] - 1s - loss: 0.2777 - acc: 0.9224 - val_loss: 0.2766 - val_acc: 0.9233
Epoch 188/200
48000/48000 [==============================] - 1s - loss: 0.2776 - acc: 0.9226 - val_loss: 0.2765 - val_acc: 0.9236
Epoch 189/200
48000/48000 [==============================] - 1s - loss: 0.2775 - acc: 0.9225 - val_loss: 0.2764 - val_acc: 0.9235
Epoch 190/200
48000/48000 [==============================] - 1s - loss: 0.2773 - acc: 0.9225 - val_loss: 0.2764 - val_acc: 0.9235
Epoch 191/200
48000/48000 [==============================] - 1s - loss: 0.2772 - acc: 0.9225 - val_loss: 0.2763 - val_acc: 0.9237
Epoch 192/200
48000/48000 [==============================] - 1s - loss: 0.2770 - acc: 0.9226 - val_loss: 0.2762 - val_acc: 0.9238
Epoch 193/200
48000/48000 [==============================] - 1s - loss: 0.2770 - acc: 0.9226 - val_loss: 0.2761 - val_acc: 0.9237
Epoch 194/200
48000/48000 [==============================] - 1s - loss: 0.2768 - acc: 0.9226 - val_loss: 0.2761 - val_acc: 0.9236
Epoch 195/200
48000/48000 [==============================] - 1s - loss: 0.2767 - acc: 0.9231 - val_loss: 0.2760 - val_acc: 0.9239
Epoch 196/200
48000/48000 [==============================] - 1s - loss: 0.2766 - acc: 0.9226 - val_loss: 0.2758 - val_acc: 0.9241
Epoch 197/200
48000/48000 [==============================] - 1s - loss: 0.2765 - acc: 0.9229 - val_loss: 0.2758 - val_acc: 0.9242
Epoch 198/200
48000/48000 [==============================] - 1s - loss: 0.2763 - acc: 0.9231 - val_loss: 0.2758 - val_acc: 0.9236
Epoch 199/200
48000/48000 [==============================] - 1s - loss: 0.2762 - acc: 0.9229 - val_loss: 0.2757 - val_acc: 0.9241
Epoch 200/200
48000/48000 [==============================] - 1s - loss: 0.2761 - acc: 0.9230 - val_loss: 0.2756 - val_acc: 0.9241

In [8]:
score = model.evaluate(X_test, y_test, verbose=VERBOSE)
print("Test score: ", score[0])
print("Test accuracy: ", score[1])


 8896/10000 [=========================>....] - ETA: 0sTest score:  0.27738585037
Test accuracy:  0.9227

In [ ]: