In [1]:
import argparse
import pickle
import gzip
from collections import Counter, defaultdict
import keras
from keras.models import Sequential
from keras.layers import Conv2D
from keras.layers import Dense
from keras.layers import MaxPool2D
from keras.layers import Dropout
from keras.layers import Flatten
from keras.layers.core import Reshape
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
import matplotlib.pyplot as plt
from collections import defaultdict
%matplotlib inline
import json
plt.style.use('ggplot')
Using TensorFlow backend.
In [2]:
class Numbers:
"""
Class to store MNIST data
"""
def __init__(self, location):
# Load the dataset
with gzip.open(location, 'rb') as f:
train_set, valid_set, test_set = pickle.load(f)
self.train_x, self.train_y = train_set
self.test_x, self.test_y = valid_set
In [15]:
class CNN:
'''
CNN classifier
'''
def __init__(self, train_x, train_y, test_x, test_y, history, epochs = 15, batch_size=128, activation="relu" ):
'''
initialize CNN classifier
'''
self.batch_size = batch_size
self.epochs = epochs
print (len(train_x))
print (len([elem for elem in train_x]))
# TODO: reshape train_x and test_x
# reshape our data from (n, length) to (n, width, height, 1) which width*height = length
#self.train_x = np.array(np.array([train_x[i:i + 28] for i in range(0, len(train_x), 28)]))
#self.train_x = np.array([[elem[i:i + 28] for i in range(0, len(elem), 28)] for elem in train_x])
#self.test_x = np.array([[elem[i:i + 28] for i in range(0, len(elem), 28)] for elem in test_x])
self.train_y = np.array(train_y)
self.test_y = np.array(test_y)
# input image dimensions
img_x, img_y = 28, 28
input_shape = (img_x, img_y, 1)
# TODO: reshape train_x and test_x
self.train_x = train_x.reshape(train_x.shape[0], img_x, img_y, 1)
self.test_x = test_x.reshape(test_x.shape[0], img_x, img_y, 1)
print (self.train_x.shape, self.test_x.shape, self.train_y.shape, self.test_y.shape)
#print (self.train_x[0], self.test_x[0], self.train_y[0], self.test_y[0])
# normalize data to range [0, 1]
#self.train_x /= 255
#self.test_x /= 255
#print (self.train_x[0], self.test_x[0], self.train_y[0], self.test_y[0])
# TODO: one hot encoding for train_y and test_y
num_classes = len(set(train_y))
one_hot_train_y = [ [0 if elem != idx else 1 for idx in range(num_classes) ] for elem in train_y]
one_hot_test_y = [ [0 if elem != idx else 1 for idx in range(num_classes) ] for elem in test_y]
self.train_y = one_hot_train_y
self.test_y = one_hot_test_y
# TODO: build you CNN model
model = Sequential()
model.add(Conv2D(32, kernel_size=(5, 5), strides=(1, 1),
activation=activation,
input_shape=input_shape))
model.add(MaxPool2D(pool_size=(2, 2), strides=(2, 2)))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(1000, activation=activation))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.SGD(lr=0.01),
metrics=['accuracy'])
self.model = model
def train(self):
'''
train CNN classifier with training data
:param x: training data input
:param y: training label input
:return:
'''
# TODO: fit in training data
self.model.fit(self.train_x, self.train_y,
batch_size=self.batch_size,
epochs=self.epochs,
verbose=1,
validation_data=(self.test_x, self.test_y),
callbacks=[history])
def evaluate(self):
'''
test CNN classifier and get accuracy
:return: accuracy
'''
acc = self.model.evaluate(self.test_x, self.test_y)
return acc
In [4]:
class AccuracyHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.acc = []
def on_epoch_end(self, batch, logs={}):
self.acc.append(logs.get('acc'))
In [5]:
#parser = argparse.ArgumentParser(description='CNN classifier options')
#parser.add_argument('--limit', type=int, default=-1,help='Restrict training to this many examples')
#args = parser.parse_args()
data = Numbers("../data/mnist.pkl.gz")
print ( data.train_x.shape, data.test_x.shape, data.train_y.shape, data.test_y.shape )
limit = 50000
history = AccuracyHistory()
epochs = 100
cnn = CNN(data.train_x[:limit], data.train_y[:limit], data.test_x, data.test_y, history, epochs = epochs, batch_size=128)
cnn.train()
acc = cnn.evaluate()
print(acc)
(50000, 784) (10000, 784) (50000,) (10000,)
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/100
50000/50000 [==============================] - 162s - loss: 1.2840 - acc: 0.5941 - val_loss: 0.4058 - val_acc: 0.8813
Epoch 2/100
50000/50000 [==============================] - 159s - loss: 0.5552 - acc: 0.8248 - val_loss: 0.3056 - val_acc: 0.9105
Epoch 3/100
50000/50000 [==============================] - 155s - loss: 0.4545 - acc: 0.8591 - val_loss: 0.2704 - val_acc: 0.9195
Epoch 4/100
50000/50000 [==============================] - 156s - loss: 0.3948 - acc: 0.8791 - val_loss: 0.2372 - val_acc: 0.9320
Epoch 5/100
50000/50000 [==============================] - 150s - loss: 0.3559 - acc: 0.8914 - val_loss: 0.2166 - val_acc: 0.9377
Epoch 6/100
50000/50000 [==============================] - 152s - loss: 0.3203 - acc: 0.9024 - val_loss: 0.1945 - val_acc: 0.9456
Epoch 7/100
50000/50000 [==============================] - 149s - loss: 0.2949 - acc: 0.9112 - val_loss: 0.1794 - val_acc: 0.9496
Epoch 8/100
50000/50000 [==============================] - 149s - loss: 0.2719 - acc: 0.9187 - val_loss: 0.1632 - val_acc: 0.9553
Epoch 9/100
50000/50000 [==============================] - 153s - loss: 0.2550 - acc: 0.9225 - val_loss: 0.1506 - val_acc: 0.9587
Epoch 10/100
50000/50000 [==============================] - 140s - loss: 0.2379 - acc: 0.9292 - val_loss: 0.1420 - val_acc: 0.9616
Epoch 11/100
50000/50000 [==============================] - 141s - loss: 0.2222 - acc: 0.9323 - val_loss: 0.1328 - val_acc: 0.9642
Epoch 12/100
50000/50000 [==============================] - 136s - loss: 0.2071 - acc: 0.9380 - val_loss: 0.1261 - val_acc: 0.9658
Epoch 13/100
50000/50000 [==============================] - 138s - loss: 0.1961 - acc: 0.9399 - val_loss: 0.1188 - val_acc: 0.9676
Epoch 14/100
50000/50000 [==============================] - 141s - loss: 0.1903 - acc: 0.9424 - val_loss: 0.1149 - val_acc: 0.9689
Epoch 15/100
50000/50000 [==============================] - 140s - loss: 0.1796 - acc: 0.9458 - val_loss: 0.1070 - val_acc: 0.9706
Epoch 16/100
50000/50000 [==============================] - 140s - loss: 0.1705 - acc: 0.9485 - val_loss: 0.1055 - val_acc: 0.9707
Epoch 17/100
50000/50000 [==============================] - 139s - loss: 0.1668 - acc: 0.9487 - val_loss: 0.1011 - val_acc: 0.9725
Epoch 18/100
50000/50000 [==============================] - 138s - loss: 0.1607 - acc: 0.9511 - val_loss: 0.0954 - val_acc: 0.9738
Epoch 19/100
50000/50000 [==============================] - 139s - loss: 0.1523 - acc: 0.9533 - val_loss: 0.0936 - val_acc: 0.9737
Epoch 20/100
50000/50000 [==============================] - 139s - loss: 0.1486 - acc: 0.9546 - val_loss: 0.0915 - val_acc: 0.9744
Epoch 21/100
50000/50000 [==============================] - 138s - loss: 0.1412 - acc: 0.9563 - val_loss: 0.0876 - val_acc: 0.9753
Epoch 22/100
50000/50000 [==============================] - 137s - loss: 0.1393 - acc: 0.9577 - val_loss: 0.0854 - val_acc: 0.9766
Epoch 23/100
50000/50000 [==============================] - 137s - loss: 0.1365 - acc: 0.9576 - val_loss: 0.0826 - val_acc: 0.9769
Epoch 24/100
50000/50000 [==============================] - 137s - loss: 0.1292 - acc: 0.9613 - val_loss: 0.0810 - val_acc: 0.9778
Epoch 25/100
50000/50000 [==============================] - 137s - loss: 0.1286 - acc: 0.9605 - val_loss: 0.0787 - val_acc: 0.9786
Epoch 26/100
50000/50000 [==============================] - 136s - loss: 0.1251 - acc: 0.9609 - val_loss: 0.0776 - val_acc: 0.9782
Epoch 27/100
50000/50000 [==============================] - 137s - loss: 0.1225 - acc: 0.9625 - val_loss: 0.0759 - val_acc: 0.9793
Epoch 28/100
50000/50000 [==============================] - 137s - loss: 0.1183 - acc: 0.9642 - val_loss: 0.0746 - val_acc: 0.9785
Epoch 29/100
50000/50000 [==============================] - 137s - loss: 0.1151 - acc: 0.9644 - val_loss: 0.0727 - val_acc: 0.9795
Epoch 30/100
50000/50000 [==============================] - 139s - loss: 0.1139 - acc: 0.9654 - val_loss: 0.0710 - val_acc: 0.9801
Epoch 31/100
50000/50000 [==============================] - 140s - loss: 0.1116 - acc: 0.9656 - val_loss: 0.0699 - val_acc: 0.9804
Epoch 32/100
50000/50000 [==============================] - 144s - loss: 0.1079 - acc: 0.9669 - val_loss: 0.0685 - val_acc: 0.9807
Epoch 33/100
50000/50000 [==============================] - 141s - loss: 0.1086 - acc: 0.9664 - val_loss: 0.0682 - val_acc: 0.9814
Epoch 34/100
50000/50000 [==============================] - 138s - loss: 0.1043 - acc: 0.9676 - val_loss: 0.0666 - val_acc: 0.9810
Epoch 35/100
50000/50000 [==============================] - 137s - loss: 0.1035 - acc: 0.9683 - val_loss: 0.0655 - val_acc: 0.9815
Epoch 36/100
50000/50000 [==============================] - 139s - loss: 0.0979 - acc: 0.9699 - val_loss: 0.0650 - val_acc: 0.9815
Epoch 37/100
50000/50000 [==============================] - 141s - loss: 0.0968 - acc: 0.9702 - val_loss: 0.0631 - val_acc: 0.9823
Epoch 38/100
50000/50000 [==============================] - 139s - loss: 0.0993 - acc: 0.9699 - val_loss: 0.0636 - val_acc: 0.9820
Epoch 39/100
50000/50000 [==============================] - 141s - loss: 0.0959 - acc: 0.9704 - val_loss: 0.0621 - val_acc: 0.9826
Epoch 40/100
50000/50000 [==============================] - 139s - loss: 0.0937 - acc: 0.9710 - val_loss: 0.0617 - val_acc: 0.9830
Epoch 41/100
50000/50000 [==============================] - 143s - loss: 0.0910 - acc: 0.9719 - val_loss: 0.0602 - val_acc: 0.9831
Epoch 42/100
50000/50000 [==============================] - 142s - loss: 0.0897 - acc: 0.9724 - val_loss: 0.0603 - val_acc: 0.9829
Epoch 43/100
50000/50000 [==============================] - 140s - loss: 0.0907 - acc: 0.9715 - val_loss: 0.0592 - val_acc: 0.9832
Epoch 44/100
50000/50000 [==============================] - 142s - loss: 0.0891 - acc: 0.9720 - val_loss: 0.0582 - val_acc: 0.9838
Epoch 45/100
50000/50000 [==============================] - 142s - loss: 0.0869 - acc: 0.9729 - val_loss: 0.0572 - val_acc: 0.9835
Epoch 46/100
50000/50000 [==============================] - 143s - loss: 0.0857 - acc: 0.9737 - val_loss: 0.0570 - val_acc: 0.9838
Epoch 47/100
50000/50000 [==============================] - 141s - loss: 0.0832 - acc: 0.9735 - val_loss: 0.0568 - val_acc: 0.9842
Epoch 48/100
50000/50000 [==============================] - 140s - loss: 0.0844 - acc: 0.9745 - val_loss: 0.0562 - val_acc: 0.9841
Epoch 49/100
50000/50000 [==============================] - 139s - loss: 0.0837 - acc: 0.9738 - val_loss: 0.0553 - val_acc: 0.9838
Epoch 50/100
50000/50000 [==============================] - 146s - loss: 0.0805 - acc: 0.9752 - val_loss: 0.0554 - val_acc: 0.9838
Epoch 51/100
50000/50000 [==============================] - 139s - loss: 0.0813 - acc: 0.9751 - val_loss: 0.0544 - val_acc: 0.9845
Epoch 52/100
50000/50000 [==============================] - 140s - loss: 0.0792 - acc: 0.9752 - val_loss: 0.0540 - val_acc: 0.9845
Epoch 53/100
50000/50000 [==============================] - 120s - loss: 0.0795 - acc: 0.9760 - val_loss: 0.0534 - val_acc: 0.9848
Epoch 54/100
50000/50000 [==============================] - 101s - loss: 0.0797 - acc: 0.9762 - val_loss: 0.0527 - val_acc: 0.9855
Epoch 55/100
50000/50000 [==============================] - 103s - loss: 0.0767 - acc: 0.9764 - val_loss: 0.0521 - val_acc: 0.9855
Epoch 56/100
50000/50000 [==============================] - 101s - loss: 0.0763 - acc: 0.9761 - val_loss: 0.0525 - val_acc: 0.9851
Epoch 57/100
50000/50000 [==============================] - 102s - loss: 0.0752 - acc: 0.9762 - val_loss: 0.0517 - val_acc: 0.9849
Epoch 58/100
50000/50000 [==============================] - 103s - loss: 0.0748 - acc: 0.9769 - val_loss: 0.0514 - val_acc: 0.9848
Epoch 59/100
50000/50000 [==============================] - 102s - loss: 0.0727 - acc: 0.9777 - val_loss: 0.0509 - val_acc: 0.9853
Epoch 60/100
50000/50000 [==============================] - 100s - loss: 0.0722 - acc: 0.9774 - val_loss: 0.0511 - val_acc: 0.9853
Epoch 61/100
50000/50000 [==============================] - 100s - loss: 0.0722 - acc: 0.9779 - val_loss: 0.0504 - val_acc: 0.9853
Epoch 62/100
50000/50000 [==============================] - 101s - loss: 0.0708 - acc: 0.9777 - val_loss: 0.0494 - val_acc: 0.9856
Epoch 63/100
50000/50000 [==============================] - 98s - loss: 0.0707 - acc: 0.9776 - val_loss: 0.0490 - val_acc: 0.9855
Epoch 64/100
50000/50000 [==============================] - 101s - loss: 0.0688 - acc: 0.9790 - val_loss: 0.0493 - val_acc: 0.9855
Epoch 65/100
50000/50000 [==============================] - 100s - loss: 0.0701 - acc: 0.9782 - val_loss: 0.0487 - val_acc: 0.9856
Epoch 66/100
50000/50000 [==============================] - 100s - loss: 0.0683 - acc: 0.9785 - val_loss: 0.0483 - val_acc: 0.9856
Epoch 67/100
50000/50000 [==============================] - 101s - loss: 0.0660 - acc: 0.9790 - val_loss: 0.0479 - val_acc: 0.9861
Epoch 68/100
50000/50000 [==============================] - 99s - loss: 0.0671 - acc: 0.9793 - val_loss: 0.0477 - val_acc: 0.9857
Epoch 69/100
50000/50000 [==============================] - 100s - loss: 0.0650 - acc: 0.9795 - val_loss: 0.0470 - val_acc: 0.9861
Epoch 70/100
50000/50000 [==============================] - 101s - loss: 0.0657 - acc: 0.9801 - val_loss: 0.0472 - val_acc: 0.9859
Epoch 71/100
50000/50000 [==============================] - 100s - loss: 0.0660 - acc: 0.9790 - val_loss: 0.0468 - val_acc: 0.9857
Epoch 72/100
50000/50000 [==============================] - 100s - loss: 0.0661 - acc: 0.9793 - val_loss: 0.0469 - val_acc: 0.9865
Epoch 73/100
50000/50000 [==============================] - 101s - loss: 0.0634 - acc: 0.9807 - val_loss: 0.0459 - val_acc: 0.9864
Epoch 74/100
50000/50000 [==============================] - 101s - loss: 0.0625 - acc: 0.9808 - val_loss: 0.0460 - val_acc: 0.9862
Epoch 75/100
50000/50000 [==============================] - 99s - loss: 0.0609 - acc: 0.9802 - val_loss: 0.0457 - val_acc: 0.9862
Epoch 76/100
50000/50000 [==============================] - 100s - loss: 0.0624 - acc: 0.9811 - val_loss: 0.0459 - val_acc: 0.9871
Epoch 77/100
50000/50000 [==============================] - 99s - loss: 0.0619 - acc: 0.9801 - val_loss: 0.0455 - val_acc: 0.9864
Epoch 78/100
50000/50000 [==============================] - 99s - loss: 0.0610 - acc: 0.9809 - val_loss: 0.0448 - val_acc: 0.9867
Epoch 79/100
50000/50000 [==============================] - 101s - loss: 0.0605 - acc: 0.9806 - val_loss: 0.0451 - val_acc: 0.9866
Epoch 80/100
50000/50000 [==============================] - 100s - loss: 0.0603 - acc: 0.9809 - val_loss: 0.0445 - val_acc: 0.9866
Epoch 81/100
50000/50000 [==============================] - 100s - loss: 0.0596 - acc: 0.9808 - val_loss: 0.0447 - val_acc: 0.9867
Epoch 82/100
50000/50000 [==============================] - 100s - loss: 0.0592 - acc: 0.9812 - val_loss: 0.0446 - val_acc: 0.9871
Epoch 83/100
50000/50000 [==============================] - 100s - loss: 0.0570 - acc: 0.9818 - val_loss: 0.0442 - val_acc: 0.9869
Epoch 84/100
50000/50000 [==============================] - 101s - loss: 0.0582 - acc: 0.9818 - val_loss: 0.0441 - val_acc: 0.9871
Epoch 85/100
50000/50000 [==============================] - 100s - loss: 0.0566 - acc: 0.9824 - val_loss: 0.0441 - val_acc: 0.9873
Epoch 86/100
50000/50000 [==============================] - 101s - loss: 0.0579 - acc: 0.9820 - val_loss: 0.0430 - val_acc: 0.9868
Epoch 87/100
50000/50000 [==============================] - 100s - loss: 0.0565 - acc: 0.9822 - val_loss: 0.0434 - val_acc: 0.9875
Epoch 88/100
50000/50000 [==============================] - 104s - loss: 0.0562 - acc: 0.9831 - val_loss: 0.0433 - val_acc: 0.9872
Epoch 89/100
50000/50000 [==============================] - 100s - loss: 0.0558 - acc: 0.9822 - val_loss: 0.0437 - val_acc: 0.9875
Epoch 90/100
50000/50000 [==============================] - 101s - loss: 0.0541 - acc: 0.9829 - val_loss: 0.0425 - val_acc: 0.9878
Epoch 91/100
50000/50000 [==============================] - 100s - loss: 0.0548 - acc: 0.9827 - val_loss: 0.0424 - val_acc: 0.9878
Epoch 92/100
50000/50000 [==============================] - 99s - loss: 0.0523 - acc: 0.9834 - val_loss: 0.0425 - val_acc: 0.9879
Epoch 93/100
50000/50000 [==============================] - 98s - loss: 0.0532 - acc: 0.9828 - val_loss: 0.0419 - val_acc: 0.9880
Epoch 94/100
50000/50000 [==============================] - 100s - loss: 0.0539 - acc: 0.9828 - val_loss: 0.0422 - val_acc: 0.9872
Epoch 95/100
50000/50000 [==============================] - 104s - loss: 0.0532 - acc: 0.9828 - val_loss: 0.0419 - val_acc: 0.9877
Epoch 96/100
50000/50000 [==============================] - 113s - loss: 0.0510 - acc: 0.9836 - val_loss: 0.0426 - val_acc: 0.9880
Epoch 97/100
50000/50000 [==============================] - 110s - loss: 0.0541 - acc: 0.9833 - val_loss: 0.0417 - val_acc: 0.9879
Epoch 98/100
50000/50000 [==============================] - 103s - loss: 0.0526 - acc: 0.9834 - val_loss: 0.0415 - val_acc: 0.9882
Epoch 99/100
50000/50000 [==============================] - 101s - loss: 0.0507 - acc: 0.9839 - val_loss: 0.0414 - val_acc: 0.9883
Epoch 100/100
50000/50000 [==============================] - 99s - loss: 0.0496 - acc: 0.9846 - val_loss: 0.0413 - val_acc: 0.9882
9952/10000 [============================>.] - ETA: 0s[0.04126097159641795, 0.98819999999999997]
In [6]:
plt.plot(range(1,epochs+1), history.acc)
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.title('CNN with one convolutional layer of 32 nodes ')
plt.show()
In [7]:
print(cnn.model.summary())
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_1 (Conv2D) (None, 24, 24, 32) 832
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 12, 12, 32) 0
_________________________________________________________________
dropout_1 (Dropout) (None, 12, 12, 32) 0
_________________________________________________________________
flatten_1 (Flatten) (None, 4608) 0
_________________________________________________________________
dense_1 (Dense) (None, 1000) 4609000
_________________________________________________________________
dropout_2 (Dropout) (None, 1000) 0
_________________________________________________________________
dense_2 (Dense) (None, 10) 10010
=================================================================
Total params: 4,619,842
Trainable params: 4,619,842
Non-trainable params: 0
_________________________________________________________________
None
In [17]:
activation_results = []
for act_fcn in [ 'tanh', 'sigmoid', 'softmax', 'linear', 'relu' ]:
history = AccuracyHistory()
epochs = 50
history = AccuracyHistory()
cnn = CNN(data.train_x[:limit], data.train_y[:limit],\
data.test_x, data.test_y, history, epochs = epochs, batch_size=128, activation=act_fcn)
cnn.train()
acc = cnn.evaluate()
print(acc)
activation_results.append( (act_fcn, acc, history))
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 144s - loss: 1.0266 - acc: 0.6983 - val_loss: 0.3987 - val_acc: 0.8914
Epoch 2/50
50000/50000 [==============================] - 122s - loss: 0.4672 - acc: 0.8610 - val_loss: 0.3164 - val_acc: 0.9073
Epoch 3/50
50000/50000 [==============================] - 124s - loss: 0.3982 - acc: 0.8800 - val_loss: 0.2866 - val_acc: 0.9163
Epoch 4/50
50000/50000 [==============================] - 150s - loss: 0.3629 - acc: 0.8903 - val_loss: 0.2661 - val_acc: 0.9216
Epoch 5/50
50000/50000 [==============================] - 135s - loss: 0.3367 - acc: 0.8980 - val_loss: 0.2496 - val_acc: 0.9287
Epoch 6/50
50000/50000 [==============================] - 140s - loss: 0.3191 - acc: 0.9043 - val_loss: 0.2348 - val_acc: 0.9338
Epoch 7/50
50000/50000 [==============================] - 149s - loss: 0.3006 - acc: 0.9103 - val_loss: 0.2224 - val_acc: 0.9359
Epoch 8/50
50000/50000 [==============================] - 147s - loss: 0.2846 - acc: 0.9161 - val_loss: 0.2095 - val_acc: 0.9418
Epoch 9/50
50000/50000 [==============================] - 133s - loss: 0.2704 - acc: 0.9198 - val_loss: 0.1976 - val_acc: 0.9438
Epoch 10/50
50000/50000 [==============================] - 133s - loss: 0.2556 - acc: 0.9234 - val_loss: 0.1871 - val_acc: 0.9480
Epoch 11/50
50000/50000 [==============================] - 128s - loss: 0.2447 - acc: 0.9265 - val_loss: 0.1775 - val_acc: 0.9507
Epoch 12/50
50000/50000 [==============================] - 122s - loss: 0.2341 - acc: 0.9306 - val_loss: 0.1702 - val_acc: 0.9534
Epoch 13/50
50000/50000 [==============================] - 142s - loss: 0.2237 - acc: 0.9320 - val_loss: 0.1599 - val_acc: 0.9563
Epoch 14/50
50000/50000 [==============================] - 135s - loss: 0.2154 - acc: 0.9354 - val_loss: 0.1532 - val_acc: 0.9593
Epoch 15/50
50000/50000 [==============================] - 130s - loss: 0.2073 - acc: 0.9384 - val_loss: 0.1463 - val_acc: 0.9614
Epoch 16/50
50000/50000 [==============================] - 125s - loss: 0.1995 - acc: 0.9406 - val_loss: 0.1404 - val_acc: 0.9651
Epoch 17/50
50000/50000 [==============================] - 128s - loss: 0.1928 - acc: 0.9410 - val_loss: 0.1334 - val_acc: 0.9653
Epoch 18/50
50000/50000 [==============================] - 130s - loss: 0.1838 - acc: 0.9455 - val_loss: 0.1292 - val_acc: 0.9675
Epoch 19/50
50000/50000 [==============================] - 121s - loss: 0.1799 - acc: 0.9460 - val_loss: 0.1236 - val_acc: 0.9682
Epoch 20/50
50000/50000 [==============================] - 125s - loss: 0.1737 - acc: 0.9478 - val_loss: 0.1191 - val_acc: 0.9698
Epoch 21/50
50000/50000 [==============================] - 145s - loss: 0.1665 - acc: 0.9496 - val_loss: 0.1157 - val_acc: 0.9700
Epoch 22/50
50000/50000 [==============================] - 130s - loss: 0.1656 - acc: 0.9499 - val_loss: 0.1118 - val_acc: 0.9707
Epoch 23/50
50000/50000 [==============================] - 197s - loss: 0.1594 - acc: 0.9513 - val_loss: 0.1077 - val_acc: 0.9723
Epoch 24/50
50000/50000 [==============================] - 915s - loss: 0.1559 - acc: 0.9531 - val_loss: 0.1049 - val_acc: 0.9735
Epoch 25/50
50000/50000 [==============================] - 157s - loss: 0.1525 - acc: 0.9537 - val_loss: 0.1022 - val_acc: 0.9741
Epoch 26/50
50000/50000 [==============================] - 118s - loss: 0.1482 - acc: 0.9542 - val_loss: 0.0996 - val_acc: 0.9732
Epoch 27/50
50000/50000 [==============================] - 116s - loss: 0.1434 - acc: 0.9563 - val_loss: 0.0970 - val_acc: 0.9738
Epoch 28/50
50000/50000 [==============================] - 134s - loss: 0.1422 - acc: 0.9567 - val_loss: 0.0964 - val_acc: 0.9751
Epoch 29/50
50000/50000 [==============================] - 114s - loss: 0.1384 - acc: 0.9578 - val_loss: 0.0927 - val_acc: 0.9758
Epoch 30/50
50000/50000 [==============================] - 107s - loss: 0.1364 - acc: 0.9578 - val_loss: 0.0915 - val_acc: 0.9763
Epoch 31/50
50000/50000 [==============================] - 104s - loss: 0.1345 - acc: 0.9591 - val_loss: 0.0885 - val_acc: 0.9767
Epoch 32/50
50000/50000 [==============================] - 105s - loss: 0.1302 - acc: 0.9602 - val_loss: 0.0869 - val_acc: 0.9771
Epoch 33/50
50000/50000 [==============================] - 114s - loss: 0.1308 - acc: 0.9601 - val_loss: 0.0857 - val_acc: 0.9772
Epoch 34/50
50000/50000 [==============================] - 112s - loss: 0.1250 - acc: 0.9622 - val_loss: 0.0840 - val_acc: 0.9778
Epoch 35/50
50000/50000 [==============================] - 112s - loss: 0.1257 - acc: 0.9611 - val_loss: 0.0823 - val_acc: 0.9784
Epoch 36/50
50000/50000 [==============================] - 115s - loss: 0.1213 - acc: 0.9628 - val_loss: 0.0812 - val_acc: 0.9783
Epoch 37/50
50000/50000 [==============================] - 199s - loss: 0.1214 - acc: 0.9628 - val_loss: 0.0799 - val_acc: 0.9791
Epoch 38/50
50000/50000 [==============================] - 110s - loss: 0.1172 - acc: 0.9644 - val_loss: 0.0791 - val_acc: 0.9789
Epoch 39/50
50000/50000 [==============================] - 107s - loss: 0.1176 - acc: 0.9639 - val_loss: 0.0776 - val_acc: 0.9788
Epoch 40/50
50000/50000 [==============================] - 110s - loss: 0.1179 - acc: 0.9647 - val_loss: 0.0764 - val_acc: 0.9800
Epoch 41/50
50000/50000 [==============================] - 110s - loss: 0.1142 - acc: 0.9644 - val_loss: 0.0768 - val_acc: 0.9795
Epoch 42/50
50000/50000 [==============================] - 105s - loss: 0.1123 - acc: 0.9652 - val_loss: 0.0741 - val_acc: 0.9800
Epoch 43/50
50000/50000 [==============================] - 104s - loss: 0.1113 - acc: 0.9655 - val_loss: 0.0733 - val_acc: 0.9802
Epoch 44/50
50000/50000 [==============================] - 104s - loss: 0.1096 - acc: 0.9656 - val_loss: 0.0733 - val_acc: 0.9797
Epoch 45/50
50000/50000 [==============================] - 104s - loss: 0.1092 - acc: 0.9664 - val_loss: 0.0722 - val_acc: 0.9800
Epoch 46/50
50000/50000 [==============================] - 116s - loss: 0.1060 - acc: 0.9672 - val_loss: 0.0723 - val_acc: 0.9804
Epoch 47/50
50000/50000 [==============================] - 112s - loss: 0.1042 - acc: 0.9676 - val_loss: 0.0723 - val_acc: 0.9805
Epoch 48/50
50000/50000 [==============================] - 111s - loss: 0.1067 - acc: 0.9673 - val_loss: 0.0704 - val_acc: 0.9810
Epoch 49/50
50000/50000 [==============================] - 112s - loss: 0.1046 - acc: 0.9675 - val_loss: 0.0691 - val_acc: 0.9812
Epoch 50/50
50000/50000 [==============================] - 112s - loss: 0.1033 - acc: 0.9676 - val_loss: 0.0691 - val_acc: 0.9815
9984/10000 [============================>.] - ETA: 0s[0.069144260631129148, 0.98150000000000004]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 108s - loss: 2.4850 - acc: 0.1010 - val_loss: 2.2899 - val_acc: 0.1373
Epoch 2/50
50000/50000 [==============================] - 107s - loss: 2.3725 - acc: 0.1110 - val_loss: 2.2704 - val_acc: 0.3268
Epoch 3/50
50000/50000 [==============================] - 106s - loss: 2.3182 - acc: 0.1256 - val_loss: 2.2468 - val_acc: 0.1611
Epoch 4/50
50000/50000 [==============================] - 105s - loss: 2.2768 - acc: 0.1485 - val_loss: 2.2052 - val_acc: 0.3560
Epoch 5/50
50000/50000 [==============================] - 112s - loss: 2.2216 - acc: 0.1911 - val_loss: 2.1324 - val_acc: 0.4783
Epoch 6/50
50000/50000 [==============================] - 109s - loss: 2.1323 - acc: 0.2545 - val_loss: 1.9869 - val_acc: 0.6267
Epoch 7/50
50000/50000 [==============================] - 113s - loss: 1.9727 - acc: 0.3424 - val_loss: 1.7347 - val_acc: 0.6713
Epoch 8/50
50000/50000 [==============================] - 102s - loss: 1.7421 - acc: 0.4383 - val_loss: 1.4128 - val_acc: 0.7354
Epoch 9/50
50000/50000 [==============================] - 102s - loss: 1.4853 - acc: 0.5274 - val_loss: 1.1159 - val_acc: 0.7829
Epoch 10/50
50000/50000 [==============================] - 103s - loss: 1.2646 - acc: 0.5979 - val_loss: 0.9066 - val_acc: 0.7783
Epoch 11/50
50000/50000 [==============================] - 103s - loss: 1.0967 - acc: 0.6494 - val_loss: 0.7627 - val_acc: 0.8197
Epoch 12/50
50000/50000 [==============================] - 103s - loss: 0.9689 - acc: 0.6864 - val_loss: 0.6645 - val_acc: 0.8387
Epoch 13/50
50000/50000 [==============================] - 103s - loss: 0.8829 - acc: 0.7172 - val_loss: 0.5970 - val_acc: 0.8484
Epoch 14/50
50000/50000 [==============================] - 104s - loss: 0.8174 - acc: 0.7358 - val_loss: 0.5460 - val_acc: 0.8602
Epoch 15/50
50000/50000 [==============================] - 109s - loss: 0.7605 - acc: 0.7566 - val_loss: 0.5104 - val_acc: 0.8642
Epoch 16/50
50000/50000 [==============================] - 109s - loss: 0.7182 - acc: 0.7685 - val_loss: 0.4831 - val_acc: 0.8705
Epoch 17/50
50000/50000 [==============================] - 111s - loss: 0.6848 - acc: 0.7785 - val_loss: 0.4584 - val_acc: 0.8757
Epoch 18/50
50000/50000 [==============================] - 111s - loss: 0.6566 - acc: 0.7912 - val_loss: 0.4395 - val_acc: 0.8795
Epoch 19/50
50000/50000 [==============================] - 112s - loss: 0.6378 - acc: 0.7953 - val_loss: 0.4233 - val_acc: 0.8840
Epoch 20/50
50000/50000 [==============================] - 114s - loss: 0.6104 - acc: 0.8052 - val_loss: 0.4120 - val_acc: 0.8851
Epoch 21/50
50000/50000 [==============================] - 111s - loss: 0.5953 - acc: 0.8094 - val_loss: 0.3972 - val_acc: 0.8896
Epoch 22/50
50000/50000 [==============================] - 106s - loss: 0.5838 - acc: 0.8145 - val_loss: 0.3885 - val_acc: 0.8912
Epoch 23/50
50000/50000 [==============================] - 109s - loss: 0.5627 - acc: 0.8216 - val_loss: 0.3798 - val_acc: 0.8920
Epoch 24/50
50000/50000 [==============================] - 111s - loss: 0.5511 - acc: 0.8270 - val_loss: 0.3726 - val_acc: 0.8962
Epoch 25/50
50000/50000 [==============================] - 110s - loss: 0.5416 - acc: 0.8286 - val_loss: 0.3642 - val_acc: 0.8976
Epoch 26/50
50000/50000 [==============================] - 105s - loss: 0.5324 - acc: 0.8323 - val_loss: 0.3590 - val_acc: 0.8987
Epoch 27/50
50000/50000 [==============================] - 117s - loss: 0.5223 - acc: 0.8357 - val_loss: 0.3528 - val_acc: 0.8972
Epoch 28/50
50000/50000 [==============================] - 115s - loss: 0.5148 - acc: 0.8378 - val_loss: 0.3480 - val_acc: 0.9006
Epoch 29/50
50000/50000 [==============================] - 109s - loss: 0.5033 - acc: 0.8424 - val_loss: 0.3434 - val_acc: 0.9014
Epoch 30/50
50000/50000 [==============================] - 100s - loss: 0.4995 - acc: 0.8423 - val_loss: 0.3379 - val_acc: 0.9004
Epoch 31/50
50000/50000 [==============================] - 114s - loss: 0.4898 - acc: 0.8472 - val_loss: 0.3346 - val_acc: 0.9027
Epoch 32/50
50000/50000 [==============================] - 113s - loss: 0.4864 - acc: 0.8465 - val_loss: 0.3337 - val_acc: 0.9028
Epoch 33/50
50000/50000 [==============================] - 897s - loss: 0.4757 - acc: 0.8515 - val_loss: 0.3258 - val_acc: 0.9048
Epoch 34/50
50000/50000 [==============================] - 113s - loss: 0.4735 - acc: 0.8523 - val_loss: 0.3240 - val_acc: 0.9048
Epoch 35/50
50000/50000 [==============================] - 96s - loss: 0.4634 - acc: 0.8564 - val_loss: 0.3191 - val_acc: 0.9076
Epoch 36/50
50000/50000 [==============================] - 105s - loss: 0.4631 - acc: 0.8553 - val_loss: 0.3150 - val_acc: 0.9080
Epoch 37/50
50000/50000 [==============================] - 118s - loss: 0.4548 - acc: 0.8595 - val_loss: 0.3124 - val_acc: 0.9084
Epoch 38/50
50000/50000 [==============================] - 117s - loss: 0.4489 - acc: 0.8607 - val_loss: 0.3095 - val_acc: 0.9104
Epoch 39/50
50000/50000 [==============================] - 100s - loss: 0.4434 - acc: 0.8629 - val_loss: 0.3069 - val_acc: 0.9106
Epoch 40/50
50000/50000 [==============================] - 110s - loss: 0.4435 - acc: 0.8624 - val_loss: 0.3047 - val_acc: 0.9118
Epoch 41/50
50000/50000 [==============================] - 125s - loss: 0.4347 - acc: 0.8651 - val_loss: 0.3009 - val_acc: 0.9119
Epoch 42/50
50000/50000 [==============================] - 133s - loss: 0.4320 - acc: 0.8659 - val_loss: 0.2980 - val_acc: 0.9127
Epoch 43/50
50000/50000 [==============================] - 135s - loss: 0.4281 - acc: 0.8676 - val_loss: 0.2963 - val_acc: 0.9136
Epoch 44/50
50000/50000 [==============================] - 119s - loss: 0.4213 - acc: 0.8692 - val_loss: 0.2937 - val_acc: 0.9145
Epoch 45/50
50000/50000 [==============================] - 117s - loss: 0.4191 - acc: 0.8698 - val_loss: 0.2923 - val_acc: 0.9143
Epoch 46/50
50000/50000 [==============================] - 124s - loss: 0.4172 - acc: 0.8708 - val_loss: 0.2884 - val_acc: 0.9152
Epoch 47/50
50000/50000 [==============================] - 133s - loss: 0.4119 - acc: 0.8725 - val_loss: 0.2859 - val_acc: 0.9159
Epoch 48/50
50000/50000 [==============================] - 788s - loss: 0.4047 - acc: 0.8739 - val_loss: 0.2847 - val_acc: 0.9156
Epoch 49/50
50000/50000 [==============================] - 135s - loss: 0.4039 - acc: 0.8761 - val_loss: 0.2819 - val_acc: 0.9175
Epoch 50/50
50000/50000 [==============================] - 107s - loss: 0.4014 - acc: 0.8746 - val_loss: 0.2804 - val_acc: 0.9179
9952/10000 [============================>.] - ETA: 0s[0.28042341040372848, 0.91790000000000005]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 360s - loss: 2.3021 - acc: 0.1129 - val_loss: 2.3021 - val_acc: 0.1064
Epoch 2/50
50000/50000 [==============================] - 317s - loss: 2.3015 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 3/50
50000/50000 [==============================] - 343s - loss: 2.3013 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 4/50
50000/50000 [==============================] - 323s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 5/50
50000/50000 [==============================] - 290s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 6/50
50000/50000 [==============================] - 304s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 7/50
50000/50000 [==============================] - 302s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3019 - val_acc: 0.1064
Epoch 8/50
50000/50000 [==============================] - 1110s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 9/50
50000/50000 [==============================] - 656s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 10/50
50000/50000 [==============================] - 607s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 11/50
50000/50000 [==============================] - 320s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 12/50
50000/50000 [==============================] - 299s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 13/50
50000/50000 [==============================] - 285s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 14/50
50000/50000 [==============================] - 301s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 15/50
50000/50000 [==============================] - 311s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 16/50
50000/50000 [==============================] - 290s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 17/50
50000/50000 [==============================] - 299s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 18/50
50000/50000 [==============================] - 295s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 19/50
50000/50000 [==============================] - 307s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 20/50
50000/50000 [==============================] - 327s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 21/50
50000/50000 [==============================] - 1051s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 22/50
50000/50000 [==============================] - 2924s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 23/50
50000/50000 [==============================] - 351s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 24/50
50000/50000 [==============================] - 326s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 25/50
50000/50000 [==============================] - 315s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 26/50
50000/50000 [==============================] - 304s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 27/50
50000/50000 [==============================] - 288s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 28/50
50000/50000 [==============================] - 266s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 29/50
50000/50000 [==============================] - 298s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 30/50
50000/50000 [==============================] - 305s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 31/50
50000/50000 [==============================] - 322s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 32/50
50000/50000 [==============================] - 323s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 33/50
50000/50000 [==============================] - 315s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 34/50
50000/50000 [==============================] - 337s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 35/50
50000/50000 [==============================] - 392s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 36/50
50000/50000 [==============================] - 368s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 37/50
50000/50000 [==============================] - 355s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 38/50
50000/50000 [==============================] - 352s - loss: 2.3011 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 39/50
50000/50000 [==============================] - 322s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 40/50
50000/50000 [==============================] - 312s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 41/50
50000/50000 [==============================] - 315s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 42/50
50000/50000 [==============================] - 329s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 43/50
50000/50000 [==============================] - 318s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 44/50
50000/50000 [==============================] - 322s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 45/50
50000/50000 [==============================] - 320s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 46/50
50000/50000 [==============================] - 362s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 47/50
50000/50000 [==============================] - 313s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 48/50
50000/50000 [==============================] - 321s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 49/50
50000/50000 [==============================] - 350s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
Epoch 50/50
50000/50000 [==============================] - 369s - loss: 2.3010 - acc: 0.1136 - val_loss: 2.3020 - val_acc: 0.1064
9984/10000 [============================>.] - ETA: 0s[2.3019887622833251, 0.10639999999999999]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 115s - loss: 0.9172 - acc: 0.7179 - val_loss: 0.3614 - val_acc: 0.9005
Epoch 2/50
50000/50000 [==============================] - 123s - loss: 0.4496 - acc: 0.8642 - val_loss: 0.3033 - val_acc: 0.9141
Epoch 3/50
50000/50000 [==============================] - 107s - loss: 0.3844 - acc: 0.8837 - val_loss: 0.2762 - val_acc: 0.9221
Epoch 4/50
50000/50000 [==============================] - 100s - loss: 0.3523 - acc: 0.8943 - val_loss: 0.2547 - val_acc: 0.9278
Epoch 5/50
50000/50000 [==============================] - 107s - loss: 0.3240 - acc: 0.9034 - val_loss: 0.2387 - val_acc: 0.9330
Epoch 6/50
50000/50000 [==============================] - 107s - loss: 0.2998 - acc: 0.9117 - val_loss: 0.2208 - val_acc: 0.9394
Epoch 7/50
50000/50000 [==============================] - 105s - loss: 0.2811 - acc: 0.9165 - val_loss: 0.2074 - val_acc: 0.9426
Epoch 8/50
50000/50000 [==============================] - 110s - loss: 0.2630 - acc: 0.9218 - val_loss: 0.1935 - val_acc: 0.9479
Epoch 9/50
50000/50000 [==============================] - 108s - loss: 0.2514 - acc: 0.9253 - val_loss: 0.1821 - val_acc: 0.9505
Epoch 10/50
50000/50000 [==============================] - 104s - loss: 0.2372 - acc: 0.9296 - val_loss: 0.1727 - val_acc: 0.9536
Epoch 11/50
50000/50000 [==============================] - 105s - loss: 0.2262 - acc: 0.9329 - val_loss: 0.1642 - val_acc: 0.9567
Epoch 12/50
50000/50000 [==============================] - 126s - loss: 0.2162 - acc: 0.9361 - val_loss: 0.1557 - val_acc: 0.9594
Epoch 13/50
50000/50000 [==============================] - 108s - loss: 0.2059 - acc: 0.9386 - val_loss: 0.1468 - val_acc: 0.9632
Epoch 14/50
50000/50000 [==============================] - 102s - loss: 0.1956 - acc: 0.9417 - val_loss: 0.1402 - val_acc: 0.9639
Epoch 15/50
50000/50000 [==============================] - 116s - loss: 0.1905 - acc: 0.9431 - val_loss: 0.1350 - val_acc: 0.9653
Epoch 16/50
50000/50000 [==============================] - 120s - loss: 0.1845 - acc: 0.9449 - val_loss: 0.1298 - val_acc: 0.9671
Epoch 17/50
50000/50000 [==============================] - 114s - loss: 0.1781 - acc: 0.9468 - val_loss: 0.1237 - val_acc: 0.9680
Epoch 18/50
50000/50000 [==============================] - 108s - loss: 0.1733 - acc: 0.9470 - val_loss: 0.1189 - val_acc: 0.9702
Epoch 19/50
50000/50000 [==============================] - 105s - loss: 0.1679 - acc: 0.9496 - val_loss: 0.1165 - val_acc: 0.9703
Epoch 20/50
50000/50000 [==============================] - 107s - loss: 0.1622 - acc: 0.9515 - val_loss: 0.1119 - val_acc: 0.9722
Epoch 21/50
50000/50000 [==============================] - 116s - loss: 0.1569 - acc: 0.9519 - val_loss: 0.1087 - val_acc: 0.9721
Epoch 22/50
50000/50000 [==============================] - 113s - loss: 0.1546 - acc: 0.9539 - val_loss: 0.1053 - val_acc: 0.9734
Epoch 23/50
50000/50000 [==============================] - 104s - loss: 0.1510 - acc: 0.9541 - val_loss: 0.1027 - val_acc: 0.9737
Epoch 24/50
50000/50000 [==============================] - 102s - loss: 0.1464 - acc: 0.9560 - val_loss: 0.1002 - val_acc: 0.9745
Epoch 25/50
50000/50000 [==============================] - 102s - loss: 0.1426 - acc: 0.9570 - val_loss: 0.0980 - val_acc: 0.9751
Epoch 26/50
50000/50000 [==============================] - 105s - loss: 0.1379 - acc: 0.9580 - val_loss: 0.0961 - val_acc: 0.9762
Epoch 27/50
50000/50000 [==============================] - 110s - loss: 0.1370 - acc: 0.9581 - val_loss: 0.0936 - val_acc: 0.9765
Epoch 28/50
50000/50000 [==============================] - 103s - loss: 0.1339 - acc: 0.9598 - val_loss: 0.0913 - val_acc: 0.9770
Epoch 29/50
50000/50000 [==============================] - 102s - loss: 0.1302 - acc: 0.9595 - val_loss: 0.0902 - val_acc: 0.9770
Epoch 30/50
50000/50000 [==============================] - 106s - loss: 0.1284 - acc: 0.9603 - val_loss: 0.0890 - val_acc: 0.9774
Epoch 31/50
50000/50000 [==============================] - 105s - loss: 0.1266 - acc: 0.9606 - val_loss: 0.0870 - val_acc: 0.9774
Epoch 32/50
50000/50000 [==============================] - 107s - loss: 0.1256 - acc: 0.9613 - val_loss: 0.0851 - val_acc: 0.9786
Epoch 33/50
50000/50000 [==============================] - 107s - loss: 0.1232 - acc: 0.9618 - val_loss: 0.0850 - val_acc: 0.9783
Epoch 34/50
50000/50000 [==============================] - 106s - loss: 0.1222 - acc: 0.9626 - val_loss: 0.0829 - val_acc: 0.9786
Epoch 35/50
50000/50000 [==============================] - 99s - loss: 0.1192 - acc: 0.9633 - val_loss: 0.0823 - val_acc: 0.9791
Epoch 36/50
50000/50000 [==============================] - 95s - loss: 0.1188 - acc: 0.9634 - val_loss: 0.0805 - val_acc: 0.9801
Epoch 37/50
50000/50000 [==============================] - 91s - loss: 0.1171 - acc: 0.9638 - val_loss: 0.0790 - val_acc: 0.9799
Epoch 38/50
50000/50000 [==============================] - 92s - loss: 0.1156 - acc: 0.9643 - val_loss: 0.0795 - val_acc: 0.9796
Epoch 39/50
50000/50000 [==============================] - 92s - loss: 0.1133 - acc: 0.9650 - val_loss: 0.0771 - val_acc: 0.9806
Epoch 40/50
50000/50000 [==============================] - 92s - loss: 0.1128 - acc: 0.9653 - val_loss: 0.0769 - val_acc: 0.9804
Epoch 41/50
50000/50000 [==============================] - 91s - loss: 0.1107 - acc: 0.9648 - val_loss: 0.0767 - val_acc: 0.9802
Epoch 42/50
50000/50000 [==============================] - 94s - loss: 0.1106 - acc: 0.9663 - val_loss: 0.0754 - val_acc: 0.9806
Epoch 43/50
50000/50000 [==============================] - 94s - loss: 0.1095 - acc: 0.9666 - val_loss: 0.0753 - val_acc: 0.9808
Epoch 44/50
50000/50000 [==============================] - 94s - loss: 0.1067 - acc: 0.9676 - val_loss: 0.0746 - val_acc: 0.9799
Epoch 45/50
50000/50000 [==============================] - 91s - loss: 0.1052 - acc: 0.9674 - val_loss: 0.0729 - val_acc: 0.9815
Epoch 46/50
50000/50000 [==============================] - 91s - loss: 0.1056 - acc: 0.9672 - val_loss: 0.0737 - val_acc: 0.9808
Epoch 47/50
50000/50000 [==============================] - 91s - loss: 0.1043 - acc: 0.9673 - val_loss: 0.0729 - val_acc: 0.9805
Epoch 48/50
50000/50000 [==============================] - 92s - loss: 0.1036 - acc: 0.9676 - val_loss: 0.0723 - val_acc: 0.9812
Epoch 49/50
50000/50000 [==============================] - 93s - loss: 0.1030 - acc: 0.9679 - val_loss: 0.0718 - val_acc: 0.9812
Epoch 50/50
50000/50000 [==============================] - 97s - loss: 0.0993 - acc: 0.9695 - val_loss: 0.0739 - val_acc: 0.9797
9984/10000 [============================>.] - ETA: 0s[0.073935902985930446, 0.97970000000000002]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 103s - loss: 1.2449 - acc: 0.6062 - val_loss: 0.4019 - val_acc: 0.8877
Epoch 2/50
50000/50000 [==============================] - 104s - loss: 0.5551 - acc: 0.8263 - val_loss: 0.3002 - val_acc: 0.9167
Epoch 3/50
50000/50000 [==============================] - 122s - loss: 0.4497 - acc: 0.8613 - val_loss: 0.2583 - val_acc: 0.9285
Epoch 4/50
50000/50000 [==============================] - 193s - loss: 0.3858 - acc: 0.8830 - val_loss: 0.2286 - val_acc: 0.9375
Epoch 5/50
50000/50000 [==============================] - 136s - loss: 0.3435 - acc: 0.8959 - val_loss: 0.2106 - val_acc: 0.9411
Epoch 6/50
50000/50000 [==============================] - 119s - loss: 0.3076 - acc: 0.9076 - val_loss: 0.1855 - val_acc: 0.9499
Epoch 7/50
50000/50000 [==============================] - 114s - loss: 0.2839 - acc: 0.9139 - val_loss: 0.1714 - val_acc: 0.9529
Epoch 8/50
50000/50000 [==============================] - 113s - loss: 0.2605 - acc: 0.9210 - val_loss: 0.1582 - val_acc: 0.9572
Epoch 9/50
50000/50000 [==============================] - 117s - loss: 0.2421 - acc: 0.9269 - val_loss: 0.1474 - val_acc: 0.9595
Epoch 10/50
50000/50000 [==============================] - 123s - loss: 0.2233 - acc: 0.9330 - val_loss: 0.1379 - val_acc: 0.9622
Epoch 11/50
50000/50000 [==============================] - 115s - loss: 0.2116 - acc: 0.9374 - val_loss: 0.1274 - val_acc: 0.9658
Epoch 12/50
50000/50000 [==============================] - 115s - loss: 0.1987 - acc: 0.9410 - val_loss: 0.1208 - val_acc: 0.9673
Epoch 13/50
50000/50000 [==============================] - 114s - loss: 0.1880 - acc: 0.9429 - val_loss: 0.1141 - val_acc: 0.9690
Epoch 14/50
50000/50000 [==============================] - 112s - loss: 0.1797 - acc: 0.9458 - val_loss: 0.1083 - val_acc: 0.9708
Epoch 15/50
50000/50000 [==============================] - 118s - loss: 0.1719 - acc: 0.9472 - val_loss: 0.1041 - val_acc: 0.9710
Epoch 16/50
50000/50000 [==============================] - 115s - loss: 0.1625 - acc: 0.9501 - val_loss: 0.0995 - val_acc: 0.9726
Epoch 17/50
50000/50000 [==============================] - 114s - loss: 0.1578 - acc: 0.9518 - val_loss: 0.0959 - val_acc: 0.9734
Epoch 18/50
50000/50000 [==============================] - 107s - loss: 0.1534 - acc: 0.9544 - val_loss: 0.0918 - val_acc: 0.9752
Epoch 19/50
50000/50000 [==============================] - 102s - loss: 0.1478 - acc: 0.9553 - val_loss: 0.0889 - val_acc: 0.9758
Epoch 20/50
50000/50000 [==============================] - 120s - loss: 0.1450 - acc: 0.9553 - val_loss: 0.0872 - val_acc: 0.9762
Epoch 21/50
50000/50000 [==============================] - 131s - loss: 0.1370 - acc: 0.9590 - val_loss: 0.0830 - val_acc: 0.9784
Epoch 22/50
50000/50000 [==============================] - 131s - loss: 0.1331 - acc: 0.9594 - val_loss: 0.0819 - val_acc: 0.9778
Epoch 23/50
50000/50000 [==============================] - 127s - loss: 0.1299 - acc: 0.9608 - val_loss: 0.0787 - val_acc: 0.9791
Epoch 24/50
50000/50000 [==============================] - 126s - loss: 0.1261 - acc: 0.9615 - val_loss: 0.0782 - val_acc: 0.9792
Epoch 25/50
50000/50000 [==============================] - 113s - loss: 0.1227 - acc: 0.9623 - val_loss: 0.0757 - val_acc: 0.9802
Epoch 26/50
50000/50000 [==============================] - 122s - loss: 0.1196 - acc: 0.9639 - val_loss: 0.0734 - val_acc: 0.9807
Epoch 27/50
50000/50000 [==============================] - 111s - loss: 0.1171 - acc: 0.9643 - val_loss: 0.0723 - val_acc: 0.9803
Epoch 28/50
50000/50000 [==============================] - 115s - loss: 0.1145 - acc: 0.9654 - val_loss: 0.0714 - val_acc: 0.9803
Epoch 29/50
50000/50000 [==============================] - 114s - loss: 0.1121 - acc: 0.9659 - val_loss: 0.0692 - val_acc: 0.9814
Epoch 30/50
50000/50000 [==============================] - 135s - loss: 0.1095 - acc: 0.9668 - val_loss: 0.0694 - val_acc: 0.9809
Epoch 31/50
50000/50000 [==============================] - 115s - loss: 0.1074 - acc: 0.9664 - val_loss: 0.0674 - val_acc: 0.9815
Epoch 32/50
50000/50000 [==============================] - 109s - loss: 0.1039 - acc: 0.9678 - val_loss: 0.0662 - val_acc: 0.9814
Epoch 33/50
50000/50000 [==============================] - 107s - loss: 0.1026 - acc: 0.9681 - val_loss: 0.0655 - val_acc: 0.9814
Epoch 34/50
50000/50000 [==============================] - 140s - loss: 0.1022 - acc: 0.9689 - val_loss: 0.0641 - val_acc: 0.9817
Epoch 35/50
50000/50000 [==============================] - 146s - loss: 0.0968 - acc: 0.9701 - val_loss: 0.0636 - val_acc: 0.9815
Epoch 36/50
50000/50000 [==============================] - 120s - loss: 0.0981 - acc: 0.9698 - val_loss: 0.0623 - val_acc: 0.9819
Epoch 37/50
50000/50000 [==============================] - 104s - loss: 0.0948 - acc: 0.9708 - val_loss: 0.0615 - val_acc: 0.9824
Epoch 38/50
50000/50000 [==============================] - 104s - loss: 0.0949 - acc: 0.9703 - val_loss: 0.0601 - val_acc: 0.9830
Epoch 39/50
50000/50000 [==============================] - 104s - loss: 0.0935 - acc: 0.9712 - val_loss: 0.0594 - val_acc: 0.9830
Epoch 40/50
50000/50000 [==============================] - 109s - loss: 0.0916 - acc: 0.9713 - val_loss: 0.0586 - val_acc: 0.9836
Epoch 41/50
50000/50000 [==============================] - 119s - loss: 0.0908 - acc: 0.9721 - val_loss: 0.0593 - val_acc: 0.9829
Epoch 42/50
50000/50000 [==============================] - 171s - loss: 0.0892 - acc: 0.9722 - val_loss: 0.0576 - val_acc: 0.9832
Epoch 43/50
50000/50000 [==============================] - 157s - loss: 0.0859 - acc: 0.9728 - val_loss: 0.0570 - val_acc: 0.9836
Epoch 44/50
50000/50000 [==============================] - 137s - loss: 0.0846 - acc: 0.9741 - val_loss: 0.0563 - val_acc: 0.9834
Epoch 45/50
50000/50000 [==============================] - 138s - loss: 0.0836 - acc: 0.9746 - val_loss: 0.0558 - val_acc: 0.9838
Epoch 46/50
50000/50000 [==============================] - 126s - loss: 0.0842 - acc: 0.9748 - val_loss: 0.0555 - val_acc: 0.9844
Epoch 47/50
50000/50000 [==============================] - 126s - loss: 0.0841 - acc: 0.9735 - val_loss: 0.0544 - val_acc: 0.9842
Epoch 48/50
50000/50000 [==============================] - 115s - loss: 0.0827 - acc: 0.9743 - val_loss: 0.0537 - val_acc: 0.9850
Epoch 49/50
50000/50000 [==============================] - 119s - loss: 0.0801 - acc: 0.9752 - val_loss: 0.0538 - val_acc: 0.9849
Epoch 50/50
50000/50000 [==============================] - 115s - loss: 0.0790 - acc: 0.9754 - val_loss: 0.0528 - val_acc: 0.9850
9984/10000 [============================>.] - ETA: 0s[0.052752453687321392, 0.98499999999999999]
In [18]:
ar_save = activation_results
activation_results = [xx for xx in activation_results if xx[0]!='linear']
In [20]:
print (activation_results)
[('relu', [0.069144260631129148, 0.98150000000000004], <__main__.AccuracyHistory object at 0x12077ada0>), ('relu', [0.28042341040372848, 0.91790000000000005], <__main__.AccuracyHistory object at 0x121eac7f0>), ('relu', [2.3019887622833251, 0.10639999999999999], <__main__.AccuracyHistory object at 0x13c383080>), ('relu', [0.073935902985930446, 0.97970000000000002], <__main__.AccuracyHistory object at 0x13cc45438>), ('relu', [0.052752453687321392, 0.98499999999999999], <__main__.AccuracyHistory object at 0x13c2b7588>)]
In [27]:
activation_results = [xx for xx in activation_results if xx[0]!='linear']
labels = [ 'tanh', 'sigmoid', 'softmax', 'linear', 'relu' ]
for idx, label in enumerate(labels):
activation_results[idx] = list( activation_results[idx])
activation_results[idx][0] = label
In [45]:
activation_results = [xx for xx in activation_results if xx[0]!='linear']
activation_results = [xx for xx in activation_results if xx[0]!='softmax']
In [48]:
activation_results[0][2].acc
Out[48]:
[0.69827999998092649,
0.86102000000000001,
0.87995999996185303,
0.89028000001907348,
0.8979600000190735,
0.90431999998092649,
0.9102800000190735,
0.91607999998092648,
0.9197599999809265,
0.92342000000000002,
0.92653999998092651,
0.93057999998092655,
0.93195999996185308,
0.93544000001907346,
0.93842000001907344,
0.94057999996185304,
0.94098000003814697,
0.94550000001907353,
0.94603999996185306,
0.9478399999809265,
0.94958000003814702,
0.94986000001907345,
0.95131999996185301,
0.9530599999809265,
0.95369999998092647,
0.95421999996185303,
0.95626000001907352,
0.95671999998092649,
0.95776000003814699,
0.95784000001907343,
0.95912000001907349,
0.96021999998092655,
0.96014000003814692,
0.96218000003814697,
0.96106000001907343,
0.96280000001907351,
0.96278000003814701,
0.96442000001907346,
0.9639000000190735,
0.96472000000000002,
0.96436000003814693,
0.96518000003814697,
0.96553999999999995,
0.96558000003814692,
0.96637999996185298,
0.96716000001907354,
0.96760000003814695,
0.96726000000000001,
0.96752000001907346,
0.96762000000000004]
In [52]:
import seaborn as sns
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
sns.set_style("whitegrid")
x_vals = [xx[0] for xx in activation_results]
x = range(len(x_vals))
y_test_acc = [xx[1][1] for xx in activation_results]
#ax.bar(x, y_train_acc)
ax.bar(x, y_test_acc)
ax.set_xticks(x)
ax.set_xticklabels(x_vals)
ax.set_xlabel('Activation Fucntion')
ax.set_ylabel('Test Accuracy after 50 epochs')
ax.set_ylim(0.50,1.00)
plt.title('HW3 1.2 Accuracy versus activation function')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2)
plt.show()
plt.clf()
plt.close(fig)
/Users/brianmckean/anaconda2/envs/hwenv/lib/python3.6/site-packages/matplotlib/axes/_axes.py:545: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.
warnings.warn("No labelled objects found. "
<matplotlib.figure.Figure at 0x14953cfd0>
<matplotlib.figure.Figure at 0x120790c18>
In [50]:
### Plot accuacy versus epoch for activation functions
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
sns.set_style("whitegrid")
x = range(len(activation_results[0][2].acc))
y_vals = [xx[2].acc for xx in activation_results]
y_labels = [xx[0] for xx in activation_results]
line1, = ax.plot(x, y_vals[0] , lw=2, label=y_labels[0])
line2, = ax.plot(x, y_vals[1] , lw=2, label=y_labels[1])
line3, = ax.plot(x, y_vals[2] , lw=2, label=y_labels[2])
ax.set_xlabel('Epochs')
ax.set_ylabel('Test Accuracy')
ax.set_ylim(0.0,1.00)
plt.title('HW3 1.2 Accuracy Versus Epochs for three activation functions')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2)
plt.show()
plt.clf()
plt.close(fig)
<matplotlib.figure.Figure at 0x15367c470>
<matplotlib.figure.Figure at 0x13d3e6400>
In [54]:
activation_results = []
act_fcn = 'relu'
for batch_size in [ 32, 64, 128, 256, 512 ]:
Print ("Batch size", batch_size)
history = AccuracyHistory()
epochs = 20
history = AccuracyHistory()
cnn = CNN(data.train_x[:limit], data.train_y[:limit],\
data.test_x, data.test_y, history, epochs = epochs, batch_size=batch_size, activation=act_fcn)
cnn.train()
acc = cnn.evaluate()
print(acc)
activation_results.append( (act_fcn, acc, history))
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/20
50000/50000 [==============================] - 185s - loss: 0.6954 - acc: 0.7796 - val_loss: 0.2386 - val_acc: 0.9309
Epoch 2/20
50000/50000 [==============================] - 146s - loss: 0.3248 - acc: 0.9008 - val_loss: 0.1687 - val_acc: 0.9529
Epoch 3/20
50000/50000 [==============================] - 151s - loss: 0.2323 - acc: 0.9306 - val_loss: 0.1283 - val_acc: 0.9646
Epoch 4/20
50000/50000 [==============================] - 162s - loss: 0.1840 - acc: 0.9432 - val_loss: 0.1021 - val_acc: 0.9721
Epoch 5/20
50000/50000 [==============================] - 187s - loss: 0.1543 - acc: 0.9532 - val_loss: 0.0899 - val_acc: 0.9757
Epoch 6/20
50000/50000 [==============================] - 209s - loss: 0.1359 - acc: 0.9583 - val_loss: 0.0774 - val_acc: 0.9797
Epoch 7/20
50000/50000 [==============================] - 155s - loss: 0.1215 - acc: 0.9624 - val_loss: 0.0724 - val_acc: 0.9815
Epoch 8/20
50000/50000 [==============================] - 147s - loss: 0.1120 - acc: 0.9652 - val_loss: 0.0653 - val_acc: 0.9834
Epoch 9/20
50000/50000 [==============================] - 156s - loss: 0.1022 - acc: 0.9682 - val_loss: 0.0628 - val_acc: 0.9832
Epoch 10/20
50000/50000 [==============================] - 149s - loss: 0.0937 - acc: 0.9708 - val_loss: 0.0590 - val_acc: 0.9842
Epoch 11/20
50000/50000 [==============================] - 170s - loss: 0.0883 - acc: 0.9735 - val_loss: 0.0565 - val_acc: 0.9841
Epoch 12/20
50000/50000 [==============================] - 168s - loss: 0.0834 - acc: 0.9743 - val_loss: 0.0538 - val_acc: 0.9851
Epoch 13/20
50000/50000 [==============================] - 150s - loss: 0.0781 - acc: 0.9754 - val_loss: 0.0524 - val_acc: 0.9866
Epoch 14/20
50000/50000 [==============================] - 146s - loss: 0.0761 - acc: 0.9757 - val_loss: 0.0506 - val_acc: 0.9862
Epoch 15/20
50000/50000 [==============================] - 147s - loss: 0.0725 - acc: 0.9767 - val_loss: 0.0497 - val_acc: 0.9860
Epoch 16/20
50000/50000 [==============================] - 150s - loss: 0.0690 - acc: 0.9780 - val_loss: 0.0488 - val_acc: 0.9861
Epoch 17/20
50000/50000 [==============================] - 1155s - loss: 0.0681 - acc: 0.9788 - val_loss: 0.0469 - val_acc: 0.9862
Epoch 18/20
50000/50000 [==============================] - 183s - loss: 0.0646 - acc: 0.9801 - val_loss: 0.0454 - val_acc: 0.9873
Epoch 19/20
50000/50000 [==============================] - 196s - loss: 0.0624 - acc: 0.9807 - val_loss: 0.0447 - val_acc: 0.9875
Epoch 20/20
50000/50000 [==============================] - 197s - loss: 0.0609 - acc: 0.9811 - val_loss: 0.0445 - val_acc: 0.9873
9984/10000 [============================>.] - ETA: 0s[0.044518859381461516, 0.98729999999999996]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/20
50000/50000 [==============================] - 140s - loss: 0.9135 - acc: 0.7136 - val_loss: 0.3047 - val_acc: 0.9110
Epoch 2/20
50000/50000 [==============================] - 128s - loss: 0.4130 - acc: 0.8732 - val_loss: 0.2379 - val_acc: 0.9288
Epoch 3/20
50000/50000 [==============================] - 130s - loss: 0.3149 - acc: 0.9048 - val_loss: 0.1869 - val_acc: 0.9471
Epoch 4/20
50000/50000 [==============================] - 152s - loss: 0.2608 - acc: 0.9217 - val_loss: 0.1559 - val_acc: 0.9566
Epoch 5/20
50000/50000 [==============================] - 115s - loss: 0.2271 - acc: 0.9311 - val_loss: 0.1327 - val_acc: 0.9622
Epoch 6/20
50000/50000 [==============================] - 98s - loss: 0.2015 - acc: 0.9385 - val_loss: 0.1196 - val_acc: 0.9660
Epoch 7/20
50000/50000 [==============================] - 116s - loss: 0.1816 - acc: 0.9446 - val_loss: 0.1079 - val_acc: 0.9701
Epoch 8/20
50000/50000 [==============================] - 124s - loss: 0.1654 - acc: 0.9498 - val_loss: 0.1022 - val_acc: 0.9722
Epoch 9/20
50000/50000 [==============================] - 121s - loss: 0.1567 - acc: 0.9519 - val_loss: 0.0945 - val_acc: 0.9747
Epoch 10/20
50000/50000 [==============================] - 112s - loss: 0.1454 - acc: 0.9552 - val_loss: 0.0889 - val_acc: 0.9755
Epoch 11/20
50000/50000 [==============================] - 162s - loss: 0.1399 - acc: 0.9571 - val_loss: 0.0848 - val_acc: 0.9769
Epoch 12/20
50000/50000 [==============================] - 121s - loss: 0.1290 - acc: 0.9604 - val_loss: 0.0811 - val_acc: 0.9776
Epoch 13/20
50000/50000 [==============================] - 119s - loss: 0.1238 - acc: 0.9614 - val_loss: 0.0761 - val_acc: 0.9796
Epoch 14/20
50000/50000 [==============================] - 109s - loss: 0.1165 - acc: 0.9655 - val_loss: 0.0744 - val_acc: 0.9797
Epoch 15/20
50000/50000 [==============================] - 113s - loss: 0.1148 - acc: 0.9650 - val_loss: 0.0713 - val_acc: 0.9803
Epoch 16/20
50000/50000 [==============================] - 143s - loss: 0.1093 - acc: 0.9670 - val_loss: 0.0690 - val_acc: 0.9805
Epoch 17/20
50000/50000 [==============================] - 117s - loss: 0.1052 - acc: 0.9675 - val_loss: 0.0672 - val_acc: 0.9805
Epoch 18/20
50000/50000 [==============================] - 132s - loss: 0.0995 - acc: 0.9695 - val_loss: 0.0652 - val_acc: 0.9819
Epoch 19/20
50000/50000 [==============================] - 139s - loss: 0.0982 - acc: 0.9700 - val_loss: 0.0629 - val_acc: 0.9820
Epoch 20/20
50000/50000 [==============================] - 148s - loss: 0.0937 - acc: 0.9711 - val_loss: 0.0621 - val_acc: 0.9813
9952/10000 [============================>.] - ETA: 0s[0.062111683936975894, 0.98129999999999995]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/20
50000/50000 [==============================] - 96s - loss: 1.2812 - acc: 0.5956 - val_loss: 0.3997 - val_acc: 0.8876
Epoch 2/20
50000/50000 [==============================] - 103s - loss: 0.5468 - acc: 0.8299 - val_loss: 0.2935 - val_acc: 0.9154
Epoch 3/20
50000/50000 [==============================] - 101s - loss: 0.4332 - acc: 0.8676 - val_loss: 0.2510 - val_acc: 0.9288
Epoch 4/20
50000/50000 [==============================] - 96s - loss: 0.3739 - acc: 0.8858 - val_loss: 0.2197 - val_acc: 0.9391
Epoch 5/20
50000/50000 [==============================] - 92s - loss: 0.3312 - acc: 0.8990 - val_loss: 0.1967 - val_acc: 0.9459
Epoch 6/20
50000/50000 [==============================] - 114s - loss: 0.2924 - acc: 0.9109 - val_loss: 0.1746 - val_acc: 0.9526
Epoch 7/20
50000/50000 [==============================] - 108s - loss: 0.2694 - acc: 0.9176 - val_loss: 0.1593 - val_acc: 0.9560
Epoch 8/20
50000/50000 [==============================] - 94s - loss: 0.2467 - acc: 0.9248 - val_loss: 0.1474 - val_acc: 0.9601
Epoch 9/20
50000/50000 [==============================] - 94s - loss: 0.2302 - acc: 0.9306 - val_loss: 0.1369 - val_acc: 0.9609
Epoch 10/20
50000/50000 [==============================] - 109s - loss: 0.2154 - acc: 0.9348 - val_loss: 0.1286 - val_acc: 0.9637
Epoch 11/20
50000/50000 [==============================] - 112s - loss: 0.1996 - acc: 0.9399 - val_loss: 0.1229 - val_acc: 0.9657
Epoch 12/20
50000/50000 [==============================] - 122s - loss: 0.1879 - acc: 0.9438 - val_loss: 0.1159 - val_acc: 0.9686
Epoch 13/20
50000/50000 [==============================] - 94s - loss: 0.1810 - acc: 0.9456 - val_loss: 0.1103 - val_acc: 0.9702
Epoch 14/20
50000/50000 [==============================] - 146s - loss: 0.1726 - acc: 0.9474 - val_loss: 0.1049 - val_acc: 0.9718
Epoch 15/20
50000/50000 [==============================] - 154s - loss: 0.1673 - acc: 0.9493 - val_loss: 0.1011 - val_acc: 0.9725
Epoch 16/20
50000/50000 [==============================] - 117s - loss: 0.1595 - acc: 0.9524 - val_loss: 0.0975 - val_acc: 0.9735
Epoch 17/20
50000/50000 [==============================] - 191s - loss: 0.1551 - acc: 0.9527 - val_loss: 0.0945 - val_acc: 0.9740
Epoch 18/20
50000/50000 [==============================] - 178s - loss: 0.1496 - acc: 0.9549 - val_loss: 0.0909 - val_acc: 0.9759
Epoch 19/20
50000/50000 [==============================] - 131s - loss: 0.1430 - acc: 0.9570 - val_loss: 0.0896 - val_acc: 0.9766
Epoch 20/20
50000/50000 [==============================] - 97s - loss: 0.1420 - acc: 0.9569 - val_loss: 0.0866 - val_acc: 0.9772
9984/10000 [============================>.] - ETA: 0s[0.086559842004626991, 0.97719999999999996]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/20
50000/50000 [==============================] - 94s - loss: 1.7674 - acc: 0.4349 - val_loss: 0.7223 - val_acc: 0.8464
Epoch 2/20
50000/50000 [==============================] - 118s - loss: 0.7761 - acc: 0.7565 - val_loss: 0.3984 - val_acc: 0.8884
Epoch 3/20
50000/50000 [==============================] - 91s - loss: 0.5882 - acc: 0.8158 - val_loss: 0.3412 - val_acc: 0.9006
Epoch 4/20
50000/50000 [==============================] - 102s - loss: 0.5150 - acc: 0.8402 - val_loss: 0.3181 - val_acc: 0.9033
Epoch 5/20
50000/50000 [==============================] - 104s - loss: 0.4717 - acc: 0.8521 - val_loss: 0.2869 - val_acc: 0.9167
Epoch 6/20
50000/50000 [==============================] - 119s - loss: 0.4359 - acc: 0.8651 - val_loss: 0.2702 - val_acc: 0.9223
Epoch 7/20
50000/50000 [==============================] - 97s - loss: 0.4098 - acc: 0.8765 - val_loss: 0.2550 - val_acc: 0.9256
Epoch 8/20
50000/50000 [==============================] - 90s - loss: 0.3867 - acc: 0.8815 - val_loss: 0.2402 - val_acc: 0.9322
Epoch 9/20
50000/50000 [==============================] - 82s - loss: 0.3672 - acc: 0.8886 - val_loss: 0.2269 - val_acc: 0.9375
Epoch 10/20
50000/50000 [==============================] - 90s - loss: 0.3491 - acc: 0.8941 - val_loss: 0.2147 - val_acc: 0.9404
Epoch 11/20
50000/50000 [==============================] - 98s - loss: 0.3331 - acc: 0.8993 - val_loss: 0.2038 - val_acc: 0.9447
Epoch 12/20
50000/50000 [==============================] - 95s - loss: 0.3203 - acc: 0.9016 - val_loss: 0.1975 - val_acc: 0.9462
Epoch 13/20
50000/50000 [==============================] - 93s - loss: 0.3049 - acc: 0.9078 - val_loss: 0.1881 - val_acc: 0.9497
Epoch 14/20
50000/50000 [==============================] - 83s - loss: 0.2900 - acc: 0.9117 - val_loss: 0.1829 - val_acc: 0.9502
Epoch 15/20
50000/50000 [==============================] - 101s - loss: 0.2828 - acc: 0.9139 - val_loss: 0.1743 - val_acc: 0.9533
Epoch 16/20
50000/50000 [==============================] - 86s - loss: 0.2721 - acc: 0.9165 - val_loss: 0.1676 - val_acc: 0.9554
Epoch 17/20
50000/50000 [==============================] - 86s - loss: 0.2616 - acc: 0.9210 - val_loss: 0.1624 - val_acc: 0.9561
Epoch 18/20
50000/50000 [==============================] - 86s - loss: 0.2546 - acc: 0.9234 - val_loss: 0.1558 - val_acc: 0.9581
Epoch 19/20
50000/50000 [==============================] - 87s - loss: 0.2434 - acc: 0.9266 - val_loss: 0.1497 - val_acc: 0.9599
Epoch 20/20
50000/50000 [==============================] - 87s - loss: 0.2404 - acc: 0.9275 - val_loss: 0.1445 - val_acc: 0.9609
9952/10000 [============================>.] - ETA: 0s[0.14449596992731095, 0.96089999999999998]
50000
50000
(50000, 28, 28, 1) (10000, 28, 28, 1) (50000,) (10000,)
Train on 50000 samples, validate on 10000 samples
Epoch 1/20
50000/50000 [==============================] - 84s - loss: 2.1116 - acc: 0.3095 - val_loss: 1.6611 - val_acc: 0.7574
Epoch 2/20
50000/50000 [==============================] - 77s - loss: 1.3314 - acc: 0.6268 - val_loss: 0.6914 - val_acc: 0.8436
Epoch 3/20
50000/50000 [==============================] - 74s - loss: 0.8376 - acc: 0.7384 - val_loss: 0.4613 - val_acc: 0.8754
Epoch 4/20
50000/50000 [==============================] - 76s - loss: 0.6797 - acc: 0.7825 - val_loss: 0.3860 - val_acc: 0.8890
Epoch 5/20
50000/50000 [==============================] - 79s - loss: 0.6023 - acc: 0.8097 - val_loss: 0.3483 - val_acc: 0.8980
Epoch 6/20
50000/50000 [==============================] - 85s - loss: 0.5526 - acc: 0.8255 - val_loss: 0.3206 - val_acc: 0.9072
Epoch 7/20
50000/50000 [==============================] - 93s - loss: 0.5130 - acc: 0.8390 - val_loss: 0.3052 - val_acc: 0.9108
Epoch 8/20
50000/50000 [==============================] - 93s - loss: 0.4875 - acc: 0.8487 - val_loss: 0.2884 - val_acc: 0.9175
Epoch 9/20
50000/50000 [==============================] - 99s - loss: 0.4621 - acc: 0.8559 - val_loss: 0.2786 - val_acc: 0.9204
Epoch 10/20
50000/50000 [==============================] - 81s - loss: 0.4418 - acc: 0.8634 - val_loss: 0.2680 - val_acc: 0.9229
Epoch 11/20
50000/50000 [==============================] - 83s - loss: 0.4233 - acc: 0.8700 - val_loss: 0.2550 - val_acc: 0.9274
Epoch 12/20
50000/50000 [==============================] - 81s - loss: 0.4071 - acc: 0.8749 - val_loss: 0.2488 - val_acc: 0.9288
Epoch 13/20
50000/50000 [==============================] - 75s - loss: 0.3884 - acc: 0.8820 - val_loss: 0.2396 - val_acc: 0.9322
Epoch 14/20
50000/50000 [==============================] - 74s - loss: 0.3810 - acc: 0.8838 - val_loss: 0.2327 - val_acc: 0.9336
Epoch 15/20
50000/50000 [==============================] - 75s - loss: 0.3695 - acc: 0.8868 - val_loss: 0.2270 - val_acc: 0.9359
Epoch 16/20
50000/50000 [==============================] - 74s - loss: 0.3528 - acc: 0.8923 - val_loss: 0.2194 - val_acc: 0.9372
Epoch 17/20
50000/50000 [==============================] - 80s - loss: 0.3452 - acc: 0.8952 - val_loss: 0.2123 - val_acc: 0.9406
Epoch 18/20
50000/50000 [==============================] - 77s - loss: 0.3348 - acc: 0.8987 - val_loss: 0.2056 - val_acc: 0.9412
Epoch 19/20
50000/50000 [==============================] - 81s - loss: 0.3258 - acc: 0.9002 - val_loss: 0.2001 - val_acc: 0.9435
Epoch 20/20
50000/50000 [==============================] - 92s - loss: 0.3183 - acc: 0.9027 - val_loss: 0.1938 - val_acc: 0.9454
9984/10000 [============================>.] - ETA: 0s[0.19382236483693122, 0.94540000000000002]
In [60]:
for idx, label in enumerate([ 32, 64, 128, 256, 512 ]):
activation_results[idx] = list( activation_results[idx])
activation_results[idx][0] = label
In [64]:
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
sns.set_style("whitegrid")
x_vals = [xx[0] for xx in activation_results]
x = range(len(x_vals))
y_test_acc = [xx[1][1] for xx in activation_results]
y_labels = [xx[0] for xx in activation_results]
#ax.bar(x, y_train_acc)
ax.bar(x, y_test_acc)
ax.set_xticks(x)
ax.set_xticklabels(x_vals)
ax.set_xlabel('Batch Size')
ax.set_ylabel('Test Accuracy after 20 epochs')
ax.set_ylim(0.50,1.00)
plt.title('HW3 1.2 Accuracy versus batch size')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2)
plt.show()
plt.clf()
plt.close(fig)
/Users/brianmckean/anaconda2/envs/hwenv/lib/python3.6/site-packages/matplotlib/axes/_axes.py:545: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.
warnings.warn("No labelled objects found. "
<matplotlib.figure.Figure at 0x154306e80>
<matplotlib.figure.Figure at 0x12605d860>
In [63]:
### Plot accuacy versus epoch for batch sizes
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
sns.set_style("whitegrid")
x = range(len(activation_results[0][2].acc))
y_vals = [xx[2].acc for xx in activation_results]
y_labels = [xx[0] for xx in activation_results]
line1, = ax.plot(x, y_vals[0] , lw=2, label=y_labels[0])
line2, = ax.plot(x, y_vals[1] , lw=2, label=y_labels[1])
line3, = ax.plot(x, y_vals[2] , lw=2, label=y_labels[2])
line4, = ax.plot(x, y_vals[3] , lw=2, label=y_labels[3])
line5, = ax.plot(x, y_vals[4] , lw=2, label=y_labels[4])
ax.set_xlabel('Epochs')
ax.set_ylabel('Test Accuracy')
ax.set_ylim(0.0,1.00)
plt.title('HW3 1.2 Accuracy Versus Epochs for different batch sizes')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2)
plt.show()
plt.clf()
plt.close(fig)
<matplotlib.figure.Figure at 0x154300160>
<matplotlib.figure.Figure at 0x13c7399e8>
In [ ]:
Content source: bdmckean/MachineLearning
Similar notebooks: