In [1]:
import numpy as np
np.random.seed(1337)  # for reproducibility

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
from keras.callbacks import ModelCheckpoint

import pickle

print "using ordering:", K.image_dim_ordering()


Using Theano backend.
using ordering: th

In [2]:
# load data from memory

import pickle

pickle_file = '-data.pickle'

with open(pickle_file, 'rb') as f:
    save = pickle.load(f)
    X = save['X']
    y = save['y']
    del save  # hint to help gc free up memory

In [3]:
# number of classes
num_classes = 2

# image dimensions
img_rows, img_cols = X.shape[1], X.shape[2]

if K.image_dim_ordering() == 'th':
    X = X.reshape(X.shape[0], 3, img_rows, img_cols)
    input_shape = (3, img_rows, img_cols)
else:
    X = X.reshape(X.shape[0], img_rows, img_cols, 3)
    input_shape = (img_rows, img_cols, 3)

y = np_utils.to_categorical(y, num_classes)

print X.shape
print y.shape


(5726, 3, 32, 32)
(5726, 2)

In [4]:
# preview one sample from the reloaded X dataset to make sure nothing happened along the way

%matplotlib inline
from matplotlib.pyplot import imshow
import matplotlib.pyplot as plt

img_num = 0

if K.image_dim_ordering() == 'th':
    img = X[img_num].reshape(img_rows, img_cols, 3)

print img.shape
imshow(img, cmap = plt.get_cmap('gray'), vmin = 0, vmax = 1,  interpolation='nearest')


(32, 32, 3)
Out[4]:
<matplotlib.image.AxesImage at 0x7fd63cb52650>

In [5]:
# model hyperparameters
batch_size = 32
nb_epoch = 10

# network architecture
patch_size_1 = 3
patch_size_2 = 3
patch_size_3 = 3

depth_1 = 32
depth_2 = 64
depth_3 = 128

pool_size = 2

num_hidden_1 = 256
num_hidden_2 = 512

dropout = 0.5

In [6]:
model = Sequential()

model.add(Convolution2D(depth_1, patch_size_1, patch_size_1,
                        border_mode='valid',
                        input_shape=input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(pool_size, pool_size)))

model.add(Convolution2D(depth_2, patch_size_2, patch_size_2,
                        border_mode='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(pool_size, pool_size)))

model.add(Convolution2D(depth_3, patch_size_3, patch_size_3,
                        border_mode='valid'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(pool_size, pool_size)))

model.add(Flatten())

model.add(Dense(num_hidden_1))
model.add(Activation('relu'))
model.add(Dropout(dropout))

model.add(Dense(num_hidden_2))
model.add(Activation('relu'))
model.add(Dropout(dropout))

model.add(Dense(num_classes))

model.add(Activation('softmax'))

In [7]:
checkpoint_name = "-model.hdf5"
checkpointer = ModelCheckpoint(checkpoint_name, verbose=0, save_best_only=True)

model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])

In [8]:
history = model.fit(X, y, validation_split=0.25, batch_size=batch_size, nb_epoch=nb_epoch,
          verbose=1, callbacks=[checkpointer])


Train on 4294 samples, validate on 1432 samples
Epoch 1/10
4294/4294 [==============================] - 38s - loss: 0.5731 - acc: 0.6910 - val_loss: 0.2825 - val_acc: 0.9085
Epoch 2/10
4294/4294 [==============================] - 29s - loss: 0.2332 - acc: 0.9206 - val_loss: 0.0887 - val_acc: 0.9707
Epoch 3/10
4294/4294 [==============================] - 30s - loss: 0.0597 - acc: 0.9797 - val_loss: 1.1273 - val_acc: 0.5901
Epoch 4/10
4294/4294 [==============================] - 30s - loss: 0.0411 - acc: 0.9874 - val_loss: 0.0382 - val_acc: 0.9874
Epoch 5/10
4294/4294 [==============================] - 27s - loss: 0.0277 - acc: 0.9918 - val_loss: 0.0336 - val_acc: 0.9895
Epoch 6/10
4294/4294 [==============================] - 27s - loss: 0.0251 - acc: 0.9930 - val_loss: 0.0211 - val_acc: 0.9944
Epoch 7/10
4294/4294 [==============================] - 16s - loss: 0.0238 - acc: 0.9930 - val_loss: 0.0277 - val_acc: 0.9895
Epoch 8/10
4294/4294 [==============================] - 24s - loss: 0.0249 - acc: 0.9928 - val_loss: 0.0170 - val_acc: 0.9937
Epoch 9/10
4294/4294 [==============================] - 26s - loss: 0.0228 - acc: 0.9935 - val_loss: 0.0189 - val_acc: 0.9951
Epoch 10/10
4294/4294 [==============================] - 26s - loss: 0.0226 - acc: 0.9937 - val_loss: 0.0156 - val_acc: 0.9958

In [ ]: