In [1]:
%matplotlib inline
from matplotlib import pyplot as plt
from keras.models import Sequential
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.optimizers import SGD
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
import numpy as np
import pandas as pd
from time import time

In [2]:
np.set_printoptions(precision=4)
np.set_printoptions(suppress=True)

In [3]:
nb_classes = 9
batch_size = 64
nb_epoch = 50
np.random.seed(1337) # for reproducibility

In [4]:
# Creating the model which consists of 3 conv layers followed by
# 2 fully conntected layers

# Sequential wrapper model
model = Sequential()

# first convolutional layer
model.add(Convolution2D(32, 1, 2, 2))
model.add(Activation('relu'))

# second convolutional layer
model.add(Convolution2D(48, 32, 2, 2))
model.add(Activation('relu')) 
model.add(MaxPooling2D(poolsize=(2,2)))

# third convolutional layer
model.add(Convolution2D(32, 48, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2,2)))

# convert convolutional filters to flatt so they can be feed to 
# fully connected layers
model.add(Flatten())

# first fully connected layer
model.add(Dense(32*6*6, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# second fully connected layer
model.add(Dense(128, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# last fully connected layer which output classes
model.add(Dense(128, 9, init='lecun_uniform'))
model.add(Activation('softmax'))

# setting sgd optimizer parameters
sgd = SGD(lr=0.05, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)

features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.8)
(valX, testX, valY, testY) = train_test_split(testX, testY, test_size = 0.5)

# reshape for convolutions
trainX = trainX.reshape((trainX.shape[0], 1, 28, 28))
testX = testX.reshape((testX.shape[0], 1, 28, 28))
valX = valX.reshape((valX.shape[0], 1, 28, 28))

# convert class vectors to binary class matrices
trainY = np_utils.to_categorical(trainY, nb_classes)
testY = np_utils.to_categorical(testY, nb_classes)
valY = np_utils.to_categorical(valY, nb_classes)

print "the shape of train set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the shape of validation set %s rows, %s columns" %(valX.shape[0], valX.shape[1])

mm = model.fit(trainX, trainY,
               batch_size=batch_size,
               nb_epoch=nb_epoch,
               show_accuracy=True,
               verbose=1,
               validation_data=(testX, testY))

score = model.evaluate(valX, valY, show_accuracy=True, verbose=0, batch_size=batch_size)
print 'Test score : %s' %score[0]
print 'Test accuracy : %s' %score[1]

df = pd.DataFrame(mm)
df.index = df['epoch']
df['acc'].plot()


the shape of train set 1200 rows, 1 columns
the shape of test set 2400 rows, 1 columns
the shape of validation set 2400 rows, 1 columns
Train on 1200 samples, validate on 2400 samples
Epoch 0
1200/1200 [==============================] - 72s - loss: 2.1896 - acc.: 0.1258 - val. loss: 2.1806 - val. acc.: 0.1488
Epoch 1
1200/1200 [==============================] - 71s - loss: 2.1738 - acc.: 0.1475 - val. loss: 2.1772 - val. acc.: 0.1488
Epoch 2
1200/1200 [==============================] - 71s - loss: 2.1697 - acc.: 0.1475 - val. loss: 2.1757 - val. acc.: 0.1488
Epoch 3
1200/1200 [==============================] - 74s - loss: 2.1641 - acc.: 0.1583 - val. loss: 2.1598 - val. acc.: 0.2286
Epoch 4
1200/1200 [==============================] - 78s - loss: 2.0644 - acc.: 0.2592 - val. loss: 1.9367 - val. acc.: 0.2936
Epoch 5
1200/1200 [==============================] - 71s - loss: 1.7413 - acc.: 0.3492 - val. loss: 1.2673 - val. acc.: 0.5452
Epoch 6
1200/1200 [==============================] - 81s - loss: 1.5537 - acc.: 0.4483 - val. loss: 0.8332 - val. acc.: 0.7677
Epoch 7
1200/1200 [==============================] - 68s - loss: 0.7499 - acc.: 0.7708 - val. loss: 0.4864 - val. acc.: 0.9071
Epoch 8
1200/1200 [==============================] - 68s - loss: 0.5006 - acc.: 0.8600 - val. loss: 0.4167 - val. acc.: 0.9042
Epoch 9
1200/1200 [==============================] - 67s - loss: 0.4309 - acc.: 0.9017 - val. loss: 0.3850 - val. acc.: 0.9062
Epoch 10
1200/1200 [==============================] - 68s - loss: 0.3503 - acc.: 0.9167 - val. loss: 0.3895 - val. acc.: 0.9062
Epoch 11
1200/1200 [==============================] - 78s - loss: 0.3227 - acc.: 0.9275 - val. loss: 0.3675 - val. acc.: 0.9091
Epoch 12
1200/1200 [==============================] - 75s - loss: 0.3130 - acc.: 0.9333 - val. loss: 0.3726 - val. acc.: 0.9083
Epoch 13
1200/1200 [==============================] - 74s - loss: 0.3092 - acc.: 0.9250 - val. loss: 0.3778 - val. acc.: 0.9124
Epoch 14
1200/1200 [==============================] - 68s - loss: 0.2682 - acc.: 0.9342 - val. loss: 0.3851 - val. acc.: 0.9116
Epoch 15
1200/1200 [==============================] - 67s - loss: 0.2606 - acc.: 0.9408 - val. loss: 0.4001 - val. acc.: 0.9137
Epoch 16
1200/1200 [==============================] - 68s - loss: 0.3060 - acc.: 0.9308 - val. loss: 0.3595 - val. acc.: 0.9120
Epoch 17
1200/1200 [==============================] - 67s - loss: 0.2205 - acc.: 0.9425 - val. loss: 0.3831 - val. acc.: 0.9194
Epoch 18
1200/1200 [==============================] - 67s - loss: 0.2301 - acc.: 0.9417 - val. loss: 0.3744 - val. acc.: 0.9198
Epoch 19
1200/1200 [==============================] - 67s - loss: 0.1975 - acc.: 0.9467 - val. loss: 0.4686 - val. acc.: 0.9174
Epoch 20
1200/1200 [==============================] - 69s - loss: 0.2040 - acc.: 0.9408 - val. loss: 0.4215 - val. acc.: 0.9206
Epoch 21
1200/1200 [==============================] - 68s - loss: 0.2058 - acc.: 0.9475 - val. loss: 0.3931 - val. acc.: 0.9219
Epoch 22
1200/1200 [==============================] - 68s - loss: 0.1900 - acc.: 0.9425 - val. loss: 0.4485 - val. acc.: 0.9215
Epoch 23
1200/1200 [==============================] - 71s - loss: 0.1991 - acc.: 0.9533 - val. loss: 0.4151 - val. acc.: 0.9178
Epoch 24
1200/1200 [==============================] - 71s - loss: 0.1968 - acc.: 0.9525 - val. loss: 0.4220 - val. acc.: 0.9186
Epoch 25
1200/1200 [==============================] - 72s - loss: 0.1896 - acc.: 0.9492 - val. loss: 0.4519 - val. acc.: 0.9182
Epoch 26
1200/1200 [==============================] - 67s - loss: 0.1814 - acc.: 0.9533 - val. loss: 0.4669 - val. acc.: 0.9186
Epoch 27
1200/1200 [==============================] - 80s - loss: 0.1718 - acc.: 0.9583 - val. loss: 0.4472 - val. acc.: 0.9211
Epoch 28
1200/1200 [==============================] - 77s - loss: 0.1612 - acc.: 0.9483 - val. loss: 0.4569 - val. acc.: 0.9215
Epoch 29
1200/1200 [==============================] - 70s - loss: 0.1822 - acc.: 0.9508 - val. loss: 0.4475 - val. acc.: 0.9161
Epoch 30
1200/1200 [==============================] - 68s - loss: 0.1841 - acc.: 0.9542 - val. loss: 0.4402 - val. acc.: 0.9174
Epoch 31
1200/1200 [==============================] - 68s - loss: 0.1702 - acc.: 0.9592 - val. loss: 0.4706 - val. acc.: 0.9161
Epoch 32
1200/1200 [==============================] - 69s - loss: 0.1705 - acc.: 0.9533 - val. loss: 0.4992 - val. acc.: 0.9194
Epoch 33
1200/1200 [==============================] - 68s - loss: 0.1651 - acc.: 0.9583 - val. loss: 0.4516 - val. acc.: 0.9198
Epoch 34
1200/1200 [==============================] - 78s - loss: 0.1565 - acc.: 0.9583 - val. loss: 0.4355 - val. acc.: 0.9211
Epoch 35
1200/1200 [==============================] - 77s - loss: 0.1526 - acc.: 0.9583 - val. loss: 0.4748 - val. acc.: 0.9194
Epoch 36
1200/1200 [==============================] - 71s - loss: 0.1688 - acc.: 0.9583 - val. loss: 0.4790 - val. acc.: 0.9178
Epoch 37
1200/1200 [==============================] - 73s - loss: 0.1490 - acc.: 0.9625 - val. loss: 0.5161 - val. acc.: 0.9202
Epoch 38
1200/1200 [==============================] - 72s - loss: 0.1327 - acc.: 0.9642 - val. loss: 0.5444 - val. acc.: 0.9190
Epoch 39
1200/1200 [==============================] - 71s - loss: 0.1641 - acc.: 0.9575 - val. loss: 0.4800 - val. acc.: 0.9190
Epoch 40
1200/1200 [==============================] - 67s - loss: 0.1469 - acc.: 0.9625 - val. loss: 0.5221 - val. acc.: 0.9202
Epoch 41
1200/1200 [==============================] - 71s - loss: 0.1537 - acc.: 0.9583 - val. loss: 0.5261 - val. acc.: 0.9174
Epoch 42
1200/1200 [==============================] - 70s - loss: 0.1440 - acc.: 0.9525 - val. loss: 0.4796 - val. acc.: 0.9169
Epoch 43
1200/1200 [==============================] - 67s - loss: 0.1317 - acc.: 0.9658 - val. loss: 0.5776 - val. acc.: 0.9186
Epoch 44
1200/1200 [==============================] - 67s - loss: 0.1298 - acc.: 0.9617 - val. loss: 0.5543 - val. acc.: 0.9194
Epoch 45
1200/1200 [==============================] - 67s - loss: 0.1433 - acc.: 0.9575 - val. loss: 0.5781 - val. acc.: 0.9186
Epoch 46
1200/1200 [==============================] - 67s - loss: 0.1417 - acc.: 0.9617 - val. loss: 0.5296 - val. acc.: 0.9194
Epoch 47
1200/1200 [==============================] - 73s - loss: 0.1298 - acc.: 0.9650 - val. loss: 0.5829 - val. acc.: 0.9206
Epoch 48
1200/1200 [==============================] - 70s - loss: 0.1289 - acc.: 0.9633 - val. loss: 0.5037 - val. acc.: 0.9215
Epoch 49
1200/1200 [==============================] - 74s - loss: 0.1508 - acc.: 0.9600 - val. loss: 0.4608 - val. acc.: 0.9215
Test score : 0.427060957735
Test accuracy : 0.932565789474
Out[4]:
<matplotlib.axes._subplots.AxesSubplot at 0x10f5464d0>

In [5]:
# Creating the model which consists of 3 conv layers followed by
# 2 fully conntected layers

# Sequential wrapper model
model = Sequential()

# first convolutional layer
model.add(Convolution2D(32, 1, 2, 2))
model.add(Activation('relu'))

# second convolutional layer
model.add(Convolution2D(48, 32, 2, 2))
model.add(Activation('relu')) 
model.add(MaxPooling2D(poolsize=(2,2)))

# third convolutional layer
model.add(Convolution2D(32, 48, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2,2)))

# convert convolutional filters to flatt so they can be feed to 
# fully connected layers
model.add(Flatten())

# first fully connected layer
model.add(Dense(32*6*6, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# second fully connected layer
model.add(Dense(128, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# last fully connected layer which output classes
model.add(Dense(128, 9, init='lecun_uniform'))
model.add(Activation('softmax'))

# setting sgd optimizer parameters
sgd = SGD(lr=0.05, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)

features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.7)
(valX, testX, valY, testY) = train_test_split(testX, testY, test_size = 0.5)

# reshape for convolutions
trainX = trainX.reshape((trainX.shape[0], 1, 28, 28))
testX = testX.reshape((testX.shape[0], 1, 28, 28))
valX = valX.reshape((valX.shape[0], 1, 28, 28))

# convert class vectors to binary class matrices
trainY = np_utils.to_categorical(trainY, nb_classes)
testY = np_utils.to_categorical(testY, nb_classes)
valY = np_utils.to_categorical(valY, nb_classes)

print "the shape of train set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the shape of validation set %s rows, %s columns" %(valX.shape[0], valX.shape[1])

mm = model.fit(trainX, trainY,
               batch_size=batch_size,
               nb_epoch=nb_epoch,
               show_accuracy=True,
               verbose=1,
               validation_data=(testX, testY))

score = model.evaluate(valX, valY, show_accuracy=True, verbose=0, batch_size=batch_size)
print 'Test score : %s' %score[0]
print 'Test accuracy : %s' %score[1]

df = pd.DataFrame(mm)
df.index = df['epoch']
df['acc'].plot()


the shape of train set 1800 rows, 1 columns
the shape of test set 2100 rows, 1 columns
the shape of validation set 2100 rows, 1 columns
Train on 1800 samples, validate on 2100 samples
Epoch 0
1800/1800 [==============================] - 88s - loss: 2.1884 - acc.: 0.1483 - val. loss: 2.1703 - val. acc.: 0.1552
Epoch 1
1800/1800 [==============================] - 89s - loss: 2.1760 - acc.: 0.1522 - val. loss: 2.1655 - val. acc.: 0.1552
Epoch 2
1800/1800 [==============================] - 98s - loss: 2.1627 - acc.: 0.1578 - val. loss: 2.0856 - val. acc.: 0.2747
Epoch 3
1800/1800 [==============================] - 89s - loss: 2.1742 - acc.: 0.2100 - val. loss: 2.1699 - val. acc.: 0.1552
Epoch 4
1800/1800 [==============================] - 92s - loss: 2.1578 - acc.: 0.1617 - val. loss: 2.0579 - val. acc.: 0.2602
Epoch 5
1800/1800 [==============================] - 88s - loss: 1.3818 - acc.: 0.5406 - val. loss: 0.5892 - val. acc.: 0.8086
Epoch 6
1800/1800 [==============================] - 91s - loss: 0.6009 - acc.: 0.8444 - val. loss: 0.3554 - val. acc.: 0.9105
Epoch 7
1800/1800 [==============================] - 94s - loss: 0.4050 - acc.: 0.9061 - val. loss: 0.3316 - val. acc.: 0.9129
Epoch 8
1800/1800 [==============================] - 91s - loss: 0.3689 - acc.: 0.9089 - val. loss: 0.3231 - val. acc.: 0.9193
Epoch 9
1800/1800 [==============================] - 91s - loss: 0.3952 - acc.: 0.8978 - val. loss: 0.3786 - val. acc.: 0.9159
Epoch 10
1800/1800 [==============================] - 93s - loss: 0.3258 - acc.: 0.9133 - val. loss: 0.3084 - val. acc.: 0.9220
Epoch 11
1800/1800 [==============================] - 87s - loss: 0.3158 - acc.: 0.9278 - val. loss: 0.3321 - val. acc.: 0.9244
Epoch 12
1800/1800 [==============================] - 84s - loss: 0.2762 - acc.: 0.9350 - val. loss: 0.3153 - val. acc.: 0.9249
Epoch 13
1800/1800 [==============================] - 85s - loss: 0.2595 - acc.: 0.9367 - val. loss: 0.3814 - val. acc.: 0.9249
Epoch 14
1800/1800 [==============================] - 87s - loss: 0.2643 - acc.: 0.9344 - val. loss: 0.3453 - val. acc.: 0.9277
Epoch 15
1800/1800 [==============================] - 87s - loss: 0.2494 - acc.: 0.9372 - val. loss: 0.3345 - val. acc.: 0.9258
Epoch 16
1800/1800 [==============================] - 91s - loss: 0.2318 - acc.: 0.9444 - val. loss: 0.3561 - val. acc.: 0.9282
Epoch 17
1800/1800 [==============================] - 88s - loss: 0.2450 - acc.: 0.9428 - val. loss: 0.3398 - val. acc.: 0.9277
Epoch 18
1800/1800 [==============================] - 89s - loss: 0.2250 - acc.: 0.9383 - val. loss: 0.3524 - val. acc.: 0.9291
Epoch 19
1800/1800 [==============================] - 89s - loss: 0.2365 - acc.: 0.9417 - val. loss: 0.4050 - val. acc.: 0.9291
Epoch 20
1800/1800 [==============================] - 104s - loss: 0.2188 - acc.: 0.9450 - val. loss: 0.3918 - val. acc.: 0.9301
Epoch 21
1800/1800 [==============================] - 88s - loss: 0.2107 - acc.: 0.9478 - val. loss: 0.3902 - val. acc.: 0.9291
Epoch 22
1800/1800 [==============================] - 84s - loss: 0.2181 - acc.: 0.9467 - val. loss: 0.3876 - val. acc.: 0.9301
Epoch 23
1800/1800 [==============================] - 84s - loss: 0.2174 - acc.: 0.9433 - val. loss: 0.3894 - val. acc.: 0.9296
Epoch 24
1800/1800 [==============================] - 84s - loss: 0.2052 - acc.: 0.9494 - val. loss: 0.3952 - val. acc.: 0.9286
Epoch 25
1800/1800 [==============================] - 84s - loss: 0.2086 - acc.: 0.9467 - val. loss: 0.4171 - val. acc.: 0.9305
Epoch 26
1800/1800 [==============================] - 84s - loss: 0.1856 - acc.: 0.9522 - val. loss: 0.4361 - val. acc.: 0.9296
Epoch 27
1800/1800 [==============================] - 85s - loss: 0.1797 - acc.: 0.9544 - val. loss: 0.3969 - val. acc.: 0.9315
Epoch 28
1800/1800 [==============================] - 100s - loss: 0.1837 - acc.: 0.9572 - val. loss: 0.4184 - val. acc.: 0.9291
Epoch 29
1800/1800 [==============================] - 84s - loss: 0.1822 - acc.: 0.9550 - val. loss: 0.4622 - val. acc.: 0.9286
Epoch 30
1800/1800 [==============================] - 90s - loss: 0.1839 - acc.: 0.9539 - val. loss: 0.4224 - val. acc.: 0.9305
Epoch 31
1800/1800 [==============================] - 86s - loss: 0.1705 - acc.: 0.9544 - val. loss: 0.4770 - val. acc.: 0.9301
Epoch 32
1800/1800 [==============================] - 86s - loss: 0.1652 - acc.: 0.9589 - val. loss: 0.4434 - val. acc.: 0.9296
Epoch 33
1800/1800 [==============================] - 85s - loss: 0.1770 - acc.: 0.9528 - val. loss: 0.4364 - val. acc.: 0.9315
Epoch 34
1800/1800 [==============================] - 84s - loss: 0.1699 - acc.: 0.9528 - val. loss: 0.4461 - val. acc.: 0.9295
Epoch 35
1800/1800 [==============================] - 85s - loss: 0.1510 - acc.: 0.9550 - val. loss: 0.4605 - val. acc.: 0.9291
Epoch 36
1800/1800 [==============================] - 89s - loss: 0.1610 - acc.: 0.9556 - val. loss: 0.4970 - val. acc.: 0.9290
Epoch 37
1800/1800 [==============================] - 86s - loss: 0.1603 - acc.: 0.9589 - val. loss: 0.4545 - val. acc.: 0.9291
Epoch 38
1800/1800 [==============================] - 84s - loss: 0.1547 - acc.: 0.9583 - val. loss: 0.5162 - val. acc.: 0.9291
Epoch 39
1800/1800 [==============================] - 85s - loss: 0.1487 - acc.: 0.9606 - val. loss: 0.5485 - val. acc.: 0.9282
Epoch 40
1800/1800 [==============================] - 93s - loss: 0.1483 - acc.: 0.9606 - val. loss: 0.5049 - val. acc.: 0.9296
Epoch 41
1800/1800 [==============================] - 96s - loss: 0.1561 - acc.: 0.9572 - val. loss: 0.5051 - val. acc.: 0.9286
Epoch 42
1800/1800 [==============================] - 98s - loss: 0.1492 - acc.: 0.9561 - val. loss: 0.5319 - val. acc.: 0.9285
Epoch 43
1800/1800 [==============================] - 86s - loss: 0.1538 - acc.: 0.9572 - val. loss: 0.5404 - val. acc.: 0.9281
Epoch 44
1800/1800 [==============================] - 99s - loss: 0.1475 - acc.: 0.9600 - val. loss: 0.5073 - val. acc.: 0.9277
Epoch 45
1800/1800 [==============================] - 90s - loss: 0.1505 - acc.: 0.9578 - val. loss: 0.5494 - val. acc.: 0.9301
Epoch 46
1800/1800 [==============================] - 85s - loss: 0.1447 - acc.: 0.9650 - val. loss: 0.5792 - val. acc.: 0.9315
Epoch 47
1800/1800 [==============================] - 86s - loss: 0.1448 - acc.: 0.9622 - val. loss: 0.4937 - val. acc.: 0.9291
Epoch 48
1800/1800 [==============================] - 94s - loss: 0.1450 - acc.: 0.9600 - val. loss: 0.5578 - val. acc.: 0.9296
Epoch 49
1800/1800 [==============================] - 85s - loss: 0.1415 - acc.: 0.9622 - val. loss: 0.5306 - val. acc.: 0.9277
Test score : 0.497851541662
Test accuracy : 0.926755536131
Out[5]:
<matplotlib.axes._subplots.AxesSubplot at 0x10e14e390>

In [6]:
# Creating the model which consists of 3 conv layers followed by
# 2 fully conntected layers

# Sequential wrapper model
model = Sequential()

# first convolutional layer
model.add(Convolution2D(32, 1, 2, 2))
model.add(Activation('relu'))

# second convolutional layer
model.add(Convolution2D(48, 32, 2, 2))
model.add(Activation('relu')) 
model.add(MaxPooling2D(poolsize=(2,2)))

# third convolutional layer
model.add(Convolution2D(32, 48, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2,2)))

# convert convolutional filters to flatt so they can be feed to 
# fully connected layers
model.add(Flatten())

# first fully connected layer
model.add(Dense(32*6*6, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# second fully connected layer
model.add(Dense(128, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# last fully connected layer which output classes
model.add(Dense(128, 9, init='lecun_uniform'))
model.add(Activation('softmax'))

# setting sgd optimizer parameters
sgd = SGD(lr=0.05, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)

features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.5)
(valX, testX, valY, testY) = train_test_split(testX, testY, test_size = 0.5)

# reshape for convolutions
trainX = trainX.reshape((trainX.shape[0], 1, 28, 28))
testX = testX.reshape((testX.shape[0], 1, 28, 28))
valX = valX.reshape((valX.shape[0], 1, 28, 28))

# convert class vectors to binary class matrices
trainY = np_utils.to_categorical(trainY, nb_classes)
testY = np_utils.to_categorical(testY, nb_classes)
valY = np_utils.to_categorical(valY, nb_classes)

print "the shape of train set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the shape of validation set %s rows, %s columns" %(valX.shape[0], valX.shape[1])

mm = model.fit(trainX, trainY,
               batch_size=batch_size,
               nb_epoch=nb_epoch,
               show_accuracy=True,
               verbose=1,
               validation_data=(testX, testY))

score = model.evaluate(valX, valY, show_accuracy=True, verbose=0, batch_size=batch_size)
print 'Test score : %s' %score[0]
print 'Test accuracy : %s' %score[1]

df = pd.DataFrame(mm)
df.index = df['epoch']
df['acc'].plot()


the shape of train set 3000 rows, 1 columns
the shape of test set 1500 rows, 1 columns
the shape of validation set 1500 rows, 1 columns
Train on 3000 samples, validate on 1500 samples
Epoch 0
3000/3000 [==============================] - 151s - loss: 2.1777 - acc.: 0.1540 - val. loss: 2.1739 - val. acc.: 0.1425
Epoch 1
3000/3000 [==============================] - 145s - loss: 1.9790 - acc.: 0.2783 - val. loss: 1.6423 - val. acc.: 0.3968
Epoch 2
3000/3000 [==============================] - 162s - loss: 1.2428 - acc.: 0.5650 - val. loss: 0.5846 - val. acc.: 0.8588
Epoch 3
3000/3000 [==============================] - 163s - loss: 0.6615 - acc.: 0.8010 - val. loss: 0.3414 - val. acc.: 0.9165
Epoch 4
3000/3000 [==============================] - 156s - loss: 0.4018 - acc.: 0.9000 - val. loss: 0.3038 - val. acc.: 0.9217
Epoch 5
3000/3000 [==============================] - 133s - loss: 0.3377 - acc.: 0.9150 - val. loss: 0.3096 - val. acc.: 0.9310
Epoch 6
3000/3000 [==============================] - 123s - loss: 0.3107 - acc.: 0.9197 - val. loss: 0.2853 - val. acc.: 0.9321
Epoch 7
3000/3000 [==============================] - 121s - loss: 0.3023 - acc.: 0.9273 - val. loss: 0.2841 - val. acc.: 0.9290
Epoch 8
3000/3000 [==============================] - 120s - loss: 0.2679 - acc.: 0.9323 - val. loss: 0.3051 - val. acc.: 0.9329
Epoch 9
3000/3000 [==============================] - 121s - loss: 0.2648 - acc.: 0.9303 - val. loss: 0.2770 - val. acc.: 0.9315
Epoch 10
3000/3000 [==============================] - 120s - loss: 0.2424 - acc.: 0.9370 - val. loss: 0.2984 - val. acc.: 0.9310
Epoch 11
3000/3000 [==============================] - 121s - loss: 0.2314 - acc.: 0.9410 - val. loss: 0.3454 - val. acc.: 0.9295
Epoch 12
3000/3000 [==============================] - 120s - loss: 0.2342 - acc.: 0.9390 - val. loss: 0.2826 - val. acc.: 0.9321
Epoch 13
3000/3000 [==============================] - 121s - loss: 0.2237 - acc.: 0.9383 - val. loss: 0.2931 - val. acc.: 0.9316
Epoch 14
3000/3000 [==============================] - 120s - loss: 0.2222 - acc.: 0.9407 - val. loss: 0.3144 - val. acc.: 0.9302
Epoch 15
3000/3000 [==============================] - 128s - loss: 0.2069 - acc.: 0.9477 - val. loss: 0.3044 - val. acc.: 0.9321
Epoch 16
3000/3000 [==============================] - 164s - loss: 0.2190 - acc.: 0.9377 - val. loss: 0.3042 - val. acc.: 0.9336
Epoch 17
3000/3000 [==============================] - 146s - loss: 0.1964 - acc.: 0.9480 - val. loss: 0.3099 - val. acc.: 0.9315
Epoch 18
3000/3000 [==============================] - 177s - loss: 0.2009 - acc.: 0.9497 - val. loss: 0.3216 - val. acc.: 0.9310
Epoch 19
3000/3000 [==============================] - 130s - loss: 0.2063 - acc.: 0.9447 - val. loss: 0.3315 - val. acc.: 0.9336
Epoch 20
3000/3000 [==============================] - 151s - loss: 0.1948 - acc.: 0.9460 - val. loss: 0.3138 - val. acc.: 0.9328
Epoch 21
3000/3000 [==============================] - 161s - loss: 0.2021 - acc.: 0.9450 - val. loss: 0.3089 - val. acc.: 0.9302
Epoch 22
3000/3000 [==============================] - 133s - loss: 0.1855 - acc.: 0.9480 - val. loss: 0.3381 - val. acc.: 0.9334
Epoch 23
3000/3000 [==============================] - 122s - loss: 0.1877 - acc.: 0.9527 - val. loss: 0.3509 - val. acc.: 0.9336
Epoch 24
3000/3000 [==============================] - 122s - loss: 0.1773 - acc.: 0.9510 - val. loss: 0.3453 - val. acc.: 0.9336
Epoch 25
3000/3000 [==============================] - 121s - loss: 0.1712 - acc.: 0.9600 - val. loss: 0.2916 - val. acc.: 0.9321
Epoch 26
3000/3000 [==============================] - 122s - loss: 0.1904 - acc.: 0.9493 - val. loss: 0.3135 - val. acc.: 0.9336
Epoch 27
3000/3000 [==============================] - 122s - loss: 0.1863 - acc.: 0.9493 - val. loss: 0.3011 - val. acc.: 0.9321
Epoch 28
3000/3000 [==============================] - 146s - loss: 0.1846 - acc.: 0.9530 - val. loss: 0.3346 - val. acc.: 0.9328
Epoch 29
3000/3000 [==============================] - 151s - loss: 0.1608 - acc.: 0.9533 - val. loss: 0.3427 - val. acc.: 0.9323
Epoch 30
3000/3000 [==============================] - 145s - loss: 0.1693 - acc.: 0.9520 - val. loss: 0.3296 - val. acc.: 0.9341
Epoch 31
3000/3000 [==============================] - 146s - loss: 0.1636 - acc.: 0.9563 - val. loss: 0.3606 - val. acc.: 0.9347
Epoch 32
3000/3000 [==============================] - 142s - loss: 0.1577 - acc.: 0.9543 - val. loss: 0.3829 - val. acc.: 0.9341
Epoch 33
3000/3000 [==============================] - 124s - loss: 0.1534 - acc.: 0.9563 - val. loss: 0.3159 - val. acc.: 0.9302
Epoch 34
3000/3000 [==============================] - 123s - loss: 0.1529 - acc.: 0.9577 - val. loss: 0.3670 - val. acc.: 0.9321
Epoch 35
3000/3000 [==============================] - 121s - loss: 0.1517 - acc.: 0.9573 - val. loss: 0.3411 - val. acc.: 0.9334
Epoch 36
3000/3000 [==============================] - 130s - loss: 0.1513 - acc.: 0.9533 - val. loss: 0.4086 - val. acc.: 0.9321
Epoch 37
3000/3000 [==============================] - 139s - loss: 0.1611 - acc.: 0.9567 - val. loss: 0.3624 - val. acc.: 0.9328
Epoch 38
3000/3000 [==============================] - 141s - loss: 0.1481 - acc.: 0.9583 - val. loss: 0.3773 - val. acc.: 0.9347
Epoch 39
3000/3000 [==============================] - 161s - loss: 0.1457 - acc.: 0.9597 - val. loss: 0.3819 - val. acc.: 0.9347
Epoch 40
3000/3000 [==============================] - 138s - loss: 0.1258 - acc.: 0.9610 - val. loss: 0.4029 - val. acc.: 0.9341
Epoch 41
3000/3000 [==============================] - 128s - loss: 0.1381 - acc.: 0.9580 - val. loss: 0.3737 - val. acc.: 0.9315
Epoch 42
3000/3000 [==============================] - 130s - loss: 0.1404 - acc.: 0.9607 - val. loss: 0.4252 - val. acc.: 0.9303
Epoch 43
3000/3000 [==============================] - 132s - loss: 0.1395 - acc.: 0.9620 - val. loss: 0.3654 - val. acc.: 0.9321
Epoch 44
3000/3000 [==============================] - 145s - loss: 0.1344 - acc.: 0.9577 - val. loss: 0.3967 - val. acc.: 0.9289
Epoch 45
3000/3000 [==============================] - 154s - loss: 0.1354 - acc.: 0.9620 - val. loss: 0.4238 - val. acc.: 0.9295
Epoch 46
3000/3000 [==============================] - 148s - loss: 0.1342 - acc.: 0.9623 - val. loss: 0.3823 - val. acc.: 0.9302
Epoch 47
3000/3000 [==============================] - 129s - loss: 0.1484 - acc.: 0.9567 - val. loss: 0.4135 - val. acc.: 0.9321
Epoch 48
3000/3000 [==============================] - 132s - loss: 0.1379 - acc.: 0.9587 - val. loss: 0.4222 - val. acc.: 0.9334
Epoch 49
3000/3000 [==============================] - 133s - loss: 0.1434 - acc.: 0.9580 - val. loss: 0.3295 - val. acc.: 0.9321
Test score : 0.382670487636
Test accuracy : 0.936662946429
Out[6]:
<matplotlib.axes._subplots.AxesSubplot at 0x111a94e50>

In [7]:
# Creating the model which consists of 3 conv layers followed by
# 2 fully conntected layers

# Sequential wrapper model
model = Sequential()

# first convolutional layer
model.add(Convolution2D(32, 1, 2, 2))
model.add(Activation('relu'))

# second convolutional layer
model.add(Convolution2D(48, 32, 2, 2))
model.add(Activation('relu')) 
model.add(MaxPooling2D(poolsize=(2,2)))

# third convolutional layer
model.add(Convolution2D(32, 48, 2, 2))
model.add(Activation('relu'))
model.add(MaxPooling2D(poolsize=(2,2)))

# convert convolutional filters to flatt so they can be feed to 
# fully connected layers
model.add(Flatten())

# first fully connected layer
model.add(Dense(32*6*6, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# second fully connected layer
model.add(Dense(128, 128, init='lecun_uniform'))
model.add(Activation('relu'))
model.add(Dropout(0.5))

# last fully connected layer which output classes
model.add(Dense(128, 9, init='lecun_uniform'))
model.add(Activation('softmax'))

# setting sgd optimizer parameters
sgd = SGD(lr=0.05, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)

features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")

features = np.array(features, 'int16')
labels = np.array(labels, 'int')

def scale(X, eps = 0.001):
    # scale the data points s.t the columns of the feature space
    # (i.e the predictors) are within the range [0, 1]
    return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)

features = features.astype("float32")
features = scale(features)

# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.43)
(valX, testX, valY, testY) = train_test_split(testX, testY, test_size = 0.5)

# reshape for convolutions
trainX = trainX.reshape((trainX.shape[0], 1, 28, 28))
testX = testX.reshape((testX.shape[0], 1, 28, 28))
valX = valX.reshape((valX.shape[0], 1, 28, 28))

# convert class vectors to binary class matrices
trainY = np_utils.to_categorical(trainY, nb_classes)
testY = np_utils.to_categorical(testY, nb_classes)
valY = np_utils.to_categorical(valY, nb_classes)

print "the shape of train set %s rows, %s columns" %(trainX.shape[0], trainX.shape[1])
print "the shape of test set %s rows, %s columns" %(testX.shape[0], testX.shape[1])
print "the shape of validation set %s rows, %s columns" %(valX.shape[0], valX.shape[1])

mm = model.fit(trainX, trainY,
               batch_size=batch_size,
               nb_epoch=nb_epoch,
               show_accuracy=True,
               verbose=1,
               validation_data=(testX, testY))

score = model.evaluate(valX, valY, show_accuracy=True, verbose=0, batch_size=batch_size)
print 'Test score : %s' %score[0]
print 'Test accuracy : %s' %score[1]

df = pd.DataFrame(mm)
df.index = df['epoch']
df['acc'].plot()


the shape of train set 3420 rows, 1 columns
the shape of test set 1290 rows, 1 columns
the shape of validation set 1290 rows, 1 columns
Train on 3420 samples, validate on 1290 samples
Epoch 0
3420/3420 [==============================] - 150s - loss: 2.1782 - acc.: 0.1550 - val. loss: 2.1755 - val. acc.: 0.1499
Epoch 1
3420/3420 [==============================] - 150s - loss: 2.1425 - acc.: 0.2096 - val. loss: 1.9116 - val. acc.: 0.3308
Epoch 2
3420/3420 [==============================] - 157s - loss: 1.5503 - acc.: 0.4126 - val. loss: 0.8621 - val. acc.: 0.8539
Epoch 3
3420/3420 [==============================] - 141s - loss: 1.0735 - acc.: 0.6120 - val. loss: 0.4641 - val. acc.: 0.8784
Epoch 4
3420/3420 [==============================] - 140s - loss: 0.7830 - acc.: 0.7535 - val. loss: 0.3412 - val. acc.: 0.9219
Epoch 5
3420/3420 [==============================] - 138s - loss: 0.5168 - acc.: 0.8579 - val. loss: 0.2784 - val. acc.: 0.9308
Epoch 6
3420/3420 [==============================] - 140s - loss: 0.4313 - acc.: 0.8898 - val. loss: 0.2965 - val. acc.: 0.9360
Epoch 7
3420/3420 [==============================] - 139s - loss: 0.3774 - acc.: 0.9076 - val. loss: 0.2660 - val. acc.: 0.9330
Epoch 8
3420/3420 [==============================] - 142s - loss: 0.3598 - acc.: 0.9137 - val. loss: 0.2809 - val. acc.: 0.9353
Epoch 9
3420/3420 [==============================] - 141s - loss: 0.3327 - acc.: 0.9132 - val. loss: 0.2513 - val. acc.: 0.9382
Epoch 10
3420/3420 [==============================] - 141s - loss: 0.3320 - acc.: 0.9187 - val. loss: 0.2783 - val. acc.: 0.9353
Epoch 11
3420/3420 [==============================] - 143s - loss: 0.3146 - acc.: 0.9216 - val. loss: 0.2618 - val. acc.: 0.9353
Epoch 12
3420/3420 [==============================] - 136s - loss: 0.2903 - acc.: 0.9243 - val. loss: 0.2577 - val. acc.: 0.9368
Epoch 13
3420/3420 [==============================] - 141s - loss: 0.2878 - acc.: 0.9266 - val. loss: 0.2640 - val. acc.: 0.9368
Epoch 14
3420/3420 [==============================] - 135s - loss: 0.2742 - acc.: 0.9269 - val. loss: 0.2599 - val. acc.: 0.9382
Epoch 15
3420/3420 [==============================] - 133s - loss: 0.2662 - acc.: 0.9342 - val. loss: 0.2385 - val. acc.: 0.9390
Epoch 16
3420/3420 [==============================] - 133s - loss: 0.2637 - acc.: 0.9322 - val. loss: 0.2520 - val. acc.: 0.9382
Epoch 17
3420/3420 [==============================] - 134s - loss: 0.2471 - acc.: 0.9327 - val. loss: 0.2426 - val. acc.: 0.9353
Epoch 18
3420/3420 [==============================] - 133s - loss: 0.2595 - acc.: 0.9354 - val. loss: 0.2664 - val. acc.: 0.9382
Epoch 19
3420/3420 [==============================] - 133s - loss: 0.2686 - acc.: 0.9351 - val. loss: 0.2450 - val. acc.: 0.9368
Epoch 20
3420/3420 [==============================] - 133s - loss: 0.2448 - acc.: 0.9365 - val. loss: 0.2618 - val. acc.: 0.9382
Epoch 21
3420/3420 [==============================] - 133s - loss: 0.2398 - acc.: 0.9395 - val. loss: 0.2579 - val. acc.: 0.9345
Epoch 22
3420/3420 [==============================] - 133s - loss: 0.2423 - acc.: 0.9374 - val. loss: 0.2524 - val. acc.: 0.9390
Epoch 23
3420/3420 [==============================] - 133s - loss: 0.2265 - acc.: 0.9406 - val. loss: 0.2531 - val. acc.: 0.9405
Epoch 24
3420/3420 [==============================] - 133s - loss: 0.2263 - acc.: 0.9383 - val. loss: 0.2432 - val. acc.: 0.9412
Epoch 25
3420/3420 [==============================] - 134s - loss: 0.2133 - acc.: 0.9442 - val. loss: 0.2497 - val. acc.: 0.9405
Epoch 26
3420/3420 [==============================] - 133s - loss: 0.2231 - acc.: 0.9465 - val. loss: 0.2723 - val. acc.: 0.9420
Epoch 27
3420/3420 [==============================] - 133s - loss: 0.2196 - acc.: 0.9406 - val. loss: 0.2710 - val. acc.: 0.9382
Epoch 28
3420/3420 [==============================] - 133s - loss: 0.2246 - acc.: 0.9450 - val. loss: 0.2460 - val. acc.: 0.9397
Epoch 29
3420/3420 [==============================] - 133s - loss: 0.2114 - acc.: 0.9421 - val. loss: 0.2703 - val. acc.: 0.9420
Epoch 30
3420/3420 [==============================] - 133s - loss: 0.2280 - acc.: 0.9409 - val. loss: 0.2451 - val. acc.: 0.9420
Epoch 31
3420/3420 [==============================] - 153s - loss: 0.2029 - acc.: 0.9480 - val. loss: 0.2831 - val. acc.: 0.9435
Epoch 32
3420/3420 [==============================] - 146s - loss: 0.2100 - acc.: 0.9447 - val. loss: 0.2552 - val. acc.: 0.9420
Epoch 33
3420/3420 [==============================] - 147s - loss: 0.1936 - acc.: 0.9488 - val. loss: 0.2612 - val. acc.: 0.9420
Epoch 34
3420/3420 [==============================] - 142s - loss: 0.2002 - acc.: 0.9459 - val. loss: 0.2669 - val. acc.: 0.9420
Epoch 35
3420/3420 [==============================] - 163s - loss: 0.1915 - acc.: 0.9459 - val. loss: 0.2410 - val. acc.: 0.9420
Epoch 36
3420/3420 [==============================] - 219s - loss: 0.2021 - acc.: 0.9468 - val. loss: 0.2470 - val. acc.: 0.9420
Epoch 37
3420/3420 [==============================] - 145s - loss: 0.1762 - acc.: 0.9512 - val. loss: 0.3059 - val. acc.: 0.9435
Epoch 38
3420/3420 [==============================] - 139s - loss: 0.1892 - acc.: 0.9482 - val. loss: 0.2590 - val. acc.: 0.9442
Epoch 39
3420/3420 [==============================] - 141s - loss: 0.1829 - acc.: 0.9515 - val. loss: 0.2465 - val. acc.: 0.9449
Epoch 40
3420/3420 [==============================] - 144s - loss: 0.1817 - acc.: 0.9515 - val. loss: 0.3160 - val. acc.: 0.9420
Epoch 41
3420/3420 [==============================] - 179s - loss: 0.1803 - acc.: 0.9494 - val. loss: 0.2649 - val. acc.: 0.9427
Epoch 42
3420/3420 [==============================] - 146s - loss: 0.1871 - acc.: 0.9497 - val. loss: 0.2528 - val. acc.: 0.9420
Epoch 43
3420/3420 [==============================] - 151s - loss: 0.1774 - acc.: 0.9515 - val. loss: 0.3107 - val. acc.: 0.9420
Epoch 44
3420/3420 [==============================] - 140s - loss: 0.1651 - acc.: 0.9573 - val. loss: 0.2822 - val. acc.: 0.9435
Epoch 45
3420/3420 [==============================] - 188s - loss: 0.1611 - acc.: 0.9526 - val. loss: 0.3485 - val. acc.: 0.9449
Epoch 46
3420/3420 [==============================] - 144s - loss: 0.1910 - acc.: 0.9503 - val. loss: 0.2977 - val. acc.: 0.9435
Epoch 47
3420/3420 [==============================] - 145s - loss: 0.1693 - acc.: 0.9535 - val. loss: 0.2720 - val. acc.: 0.9412
Epoch 48
3420/3420 [==============================] - 138s - loss: 0.1643 - acc.: 0.9553 - val. loss: 0.2655 - val. acc.: 0.9412
Epoch 49
3420/3420 [==============================] - 144s - loss: 0.1569 - acc.: 0.9567 - val. loss: 0.3504 - val. acc.: 0.9435
Test score : 0.422514762865
Test accuracy : 0.930952380952
Out[7]:
<matplotlib.axes._subplots.AxesSubplot at 0x10e1d75d0>