In [1]:
import pickle

pickle_file = '-images_6.pickle'

with open(pickle_file, 'rb') as f:
    save = pickle.load(f)
    X = save['X']
    y = save['y']
    del save  # hint to help gc free up memory
    print('Training set', X.shape, y.shape)


('Training set', (4128, 32, 32), (4128, 1))

In [2]:
# Add additional imports
import numpy as np
np.random.seed(1337)  # for reproducibility

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
from keras.callbacks import ModelCheckpoint

from keras.datasets import mnist

print "using ordering:", K.image_dim_ordering()


Using Theano backend.
using ordering: th

In [3]:
# number of classes
num_classes = np.max(y) + 1 # number of categories
print "number of classes:", num_classes

# image dimensions
img_rows, img_cols = X.shape[1],  X.shape[2]

if K.image_dim_ordering() == 'th':
    X = X.reshape(X.shape[0], 1, img_rows, img_cols)
    input_shape = (1, img_rows, img_cols)
else:
    X = X.reshape(X.shape[0], img_rows, img_cols, 1)
    input_shape = (img_rows, img_cols, 1)

y_ohe = np_utils.to_categorical(y, num_classes)

print X.shape
print y_ohe.shape


number of classes: 6
(4128, 1, 32, 32)
(4128, 6)

In [4]:
# Plot to make sure we have imported correctly..
%matplotlib inline
from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt

img_num = 0

if K.image_dim_ordering() == 'th':
    img = X[img_num][0,:,:]
else:
    img = X[img_num][:,:,0]

print img.shape
print y_ohe[img_num]
imshow(img, cmap = plt.get_cmap('gray'), vmin = 0, vmax = 1,  interpolation='nearest')


(32, 32)
[ 0.  1.  0.  0.  0.  0.]
Out[4]:
<matplotlib.image.AxesImage at 0x7f0043281990>

In [5]:
## Construct CNN

# model hyperparameters
batch_size = 32
nb_epoch = 50

# network architecture
patch_size_1 = 3
patch_size_2 = 3
patch_size_3 = 3
patch_size_4 = 3
patch_size_5 = 3

depth_1 = 64
depth_2 = 128
depth_3 = 128
depth_4 = 256
depth_5 = 256

pool_size = 2

num_hidden_1 = 512
num_hidden_2 = 1024

dropout = 0.25


##---------------------------
# create new Keras Sequential model
model = Sequential()

model.add(Convolution2D(depth_1, patch_size_1, patch_size_1,
                        border_mode='same',
                        input_shape=input_shape))
model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(pool_size, pool_size)))

model.add(Convolution2D(depth_2, patch_size_2, patch_size_2,
                        border_mode='same'))
model.add(Activation('relu'))
model.add(Convolution2D(depth_3, patch_size_3, patch_size_3,
                        border_mode='same'))
model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(pool_size, pool_size)))

model.add(Convolution2D(depth_4, patch_size_4, patch_size_4,
                        border_mode='same'))
model.add(Activation('relu'))
model.add(Convolution2D(depth_5, patch_size_5, patch_size_5,
                        border_mode='same'))
model.add(Activation('relu'))


model.add(Flatten())

model.add(Dense(num_hidden_1))
model.add(Activation('relu'))
model.add(Dropout(dropout))

model.add(Dense(num_hidden_2))
model.add(Activation('relu'))
model.add(Dropout(dropout))

model.add(Dense(num_classes))
model.add(Activation('softmax'))

print "Model Created"

##-------------------------------------------
# compile model
checkpoint_name = "-model.hdf5"
checkpointer = ModelCheckpoint(checkpoint_name, verbose=0, save_best_only=True)

model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])

print "Compiled.."


Model Created
Compiled..

In [6]:
# Train Model
history = model.fit(X, y_ohe, validation_split=0.25, batch_size=batch_size, nb_epoch=nb_epoch,
          verbose=1, callbacks=[checkpointer])


Train on 3096 samples, validate on 1032 samples
Epoch 1/50
3096/3096 [==============================] - 61s - loss: 1.7928 - acc: 0.1557 - val_loss: 1.7927 - val_acc: 0.1483
Epoch 2/50
3096/3096 [==============================] - 72s - loss: 1.7922 - acc: 0.1618 - val_loss: 1.7944 - val_acc: 0.1483
Epoch 3/50
3096/3096 [==============================] - 83s - loss: 1.7924 - acc: 0.1696 - val_loss: 1.7928 - val_acc: 0.1483
Epoch 4/50
3096/3096 [==============================] - 91s - loss: 1.7924 - acc: 0.1631 - val_loss: 1.7935 - val_acc: 0.1483
Epoch 5/50
3096/3096 [==============================] - 86s - loss: 1.7921 - acc: 0.1686 - val_loss: 1.7932 - val_acc: 0.1560
Epoch 6/50
3096/3096 [==============================] - 83s - loss: 1.7920 - acc: 0.1741 - val_loss: 1.7944 - val_acc: 0.1483
Epoch 7/50
3096/3096 [==============================] - 85s - loss: 1.7915 - acc: 0.1705 - val_loss: 1.7982 - val_acc: 0.1657
Epoch 8/50
3096/3096 [==============================] - 91s - loss: 1.7866 - acc: 0.1902 - val_loss: 1.7996 - val_acc: 0.1589
Epoch 9/50
3096/3096 [==============================] - 88s - loss: 1.7631 - acc: 0.2261 - val_loss: 1.7564 - val_acc: 0.2006
Epoch 10/50
3096/3096 [==============================] - 85s - loss: 1.7373 - acc: 0.2464 - val_loss: 1.7535 - val_acc: 0.2054
Epoch 11/50
3096/3096 [==============================] - 84s - loss: 1.7031 - acc: 0.2636 - val_loss: 1.7287 - val_acc: 0.2393
Epoch 12/50
3096/3096 [==============================] - 86s - loss: 1.6581 - acc: 0.2994 - val_loss: 1.6782 - val_acc: 0.2742
Epoch 13/50
3096/3096 [==============================] - 84s - loss: 1.6206 - acc: 0.3039 - val_loss: 1.6598 - val_acc: 0.2975
Epoch 14/50
3096/3096 [==============================] - 83s - loss: 1.5620 - acc: 0.3327 - val_loss: 1.6452 - val_acc: 0.3101
Epoch 15/50
3096/3096 [==============================] - 84s - loss: 1.4983 - acc: 0.3773 - val_loss: 1.6158 - val_acc: 0.3004
Epoch 16/50
3096/3096 [==============================] - 86s - loss: 1.3956 - acc: 0.4115 - val_loss: 1.5766 - val_acc: 0.3227
Epoch 17/50
3096/3096 [==============================] - 86s - loss: 1.3041 - acc: 0.4574 - val_loss: 1.5901 - val_acc: 0.3459
Epoch 18/50
3096/3096 [==============================] - 83s - loss: 1.2045 - acc: 0.5136 - val_loss: 1.6065 - val_acc: 0.3672
Epoch 19/50
3096/3096 [==============================] - 83s - loss: 1.0344 - acc: 0.5882 - val_loss: 1.7094 - val_acc: 0.3537
Epoch 20/50
3096/3096 [==============================] - 82s - loss: 0.8852 - acc: 0.6515 - val_loss: 1.7084 - val_acc: 0.3953
Epoch 21/50
3096/3096 [==============================] - 84s - loss: 0.6500 - acc: 0.7565 - val_loss: 2.0010 - val_acc: 0.3808
Epoch 22/50
3096/3096 [==============================] - 84s - loss: 0.4447 - acc: 0.8411 - val_loss: 2.2295 - val_acc: 0.4215
Epoch 23/50
3096/3096 [==============================] - 83s - loss: 0.2678 - acc: 0.9173 - val_loss: 2.5814 - val_acc: 0.4138
Epoch 24/50
3096/3096 [==============================] - 85s - loss: 0.1538 - acc: 0.9522 - val_loss: 2.9761 - val_acc: 0.4273
Epoch 25/50
3096/3096 [==============================] - 85s - loss: 0.1039 - acc: 0.9742 - val_loss: 3.0978 - val_acc: 0.4254
Epoch 26/50
3096/3096 [==============================] - 79s - loss: 0.0717 - acc: 0.9822 - val_loss: 3.3791 - val_acc: 0.4283
Epoch 27/50
3096/3096 [==============================] - 79s - loss: 0.0755 - acc: 0.9790 - val_loss: 3.1796 - val_acc: 0.4215
Epoch 28/50
3096/3096 [==============================] - 81s - loss: 0.0459 - acc: 0.9887 - val_loss: 3.8096 - val_acc: 0.4070
Epoch 29/50
3096/3096 [==============================] - 79s - loss: 0.0407 - acc: 0.9906 - val_loss: 3.6368 - val_acc: 0.4060
Epoch 30/50
3096/3096 [==============================] - 79s - loss: 0.0404 - acc: 0.9897 - val_loss: 3.6453 - val_acc: 0.4186
Epoch 31/50
3096/3096 [==============================] - 87s - loss: 0.0340 - acc: 0.9922 - val_loss: 3.7137 - val_acc: 0.4215
Epoch 32/50
3096/3096 [==============================] - 77s - loss: 0.0327 - acc: 0.9929 - val_loss: 3.6074 - val_acc: 0.4176
Epoch 33/50
3096/3096 [==============================] - 80s - loss: 0.0278 - acc: 0.9942 - val_loss: 3.5590 - val_acc: 0.4264
Epoch 34/50
3096/3096 [==============================] - 81s - loss: 0.0261 - acc: 0.9935 - val_loss: 3.5766 - val_acc: 0.4273
Epoch 35/50
3096/3096 [==============================] - 81s - loss: 0.0229 - acc: 0.9929 - val_loss: 3.6868 - val_acc: 0.4283
Epoch 36/50
3096/3096 [==============================] - 81s - loss: 0.0232 - acc: 0.9935 - val_loss: 4.0080 - val_acc: 0.4186
Epoch 37/50
3096/3096 [==============================] - 80s - loss: 0.0304 - acc: 0.9897 - val_loss: 3.4965 - val_acc: 0.4147
Epoch 38/50
3096/3096 [==============================] - 83s - loss: 0.0173 - acc: 0.9939 - val_loss: 3.7504 - val_acc: 0.4322
Epoch 39/50
3096/3096 [==============================] - 82s - loss: 0.0219 - acc: 0.9942 - val_loss: 3.6112 - val_acc: 0.4293
Epoch 40/50
3096/3096 [==============================] - 80s - loss: 0.0200 - acc: 0.9939 - val_loss: 3.4088 - val_acc: 0.4234
Epoch 41/50
3096/3096 [==============================] - 73s - loss: 0.0185 - acc: 0.9942 - val_loss: 3.6401 - val_acc: 0.4118
Epoch 42/50
3096/3096 [==============================] - 69s - loss: 0.0173 - acc: 0.9939 - val_loss: 3.5107 - val_acc: 0.4176
Epoch 43/50
3096/3096 [==============================] - 67s - loss: 0.0155 - acc: 0.9922 - val_loss: 3.9209 - val_acc: 0.4244
Epoch 44/50
3096/3096 [==============================] - 66s - loss: 0.0181 - acc: 0.9939 - val_loss: 3.8068 - val_acc: 0.4079
Epoch 45/50
3096/3096 [==============================] - 65s - loss: 0.0152 - acc: 0.9948 - val_loss: 3.8315 - val_acc: 0.4225
Epoch 46/50
3096/3096 [==============================] - 66s - loss: 0.0131 - acc: 0.9942 - val_loss: 3.7502 - val_acc: 0.4205
Epoch 47/50
3096/3096 [==============================] - 65s - loss: 0.0146 - acc: 0.9929 - val_loss: 4.0414 - val_acc: 0.4322
Epoch 48/50
3096/3096 [==============================] - 64s - loss: 0.0111 - acc: 0.9952 - val_loss: 4.0078 - val_acc: 0.4273
Epoch 49/50
3096/3096 [==============================] - 66s - loss: 0.0125 - acc: 0.9948 - val_loss: 3.6609 - val_acc: 0.4128
Epoch 50/50
3096/3096 [==============================] - 65s - loss: 0.0121 - acc: 0.9945 - val_loss: 3.9542 - val_acc: 0.4167