In [65]:
%matplotlib inline
from matplotlib import pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
from sklearn.cross_validation import train_test_split
from sklearn.metrics import classification_report
from sklearn.externals import joblib
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
import numpy as np
import pandas as pd
from time import time
In [66]:
np.set_printoptions(precision=4)
np.set_printoptions(suppress=True)
plt.rcParams['image.interpolation'] = 'none'
In [67]:
nb_classes = 9
batch_size = 64
nb_epoch = 10
np.random.seed(1337) # for reproducibility
In [68]:
features = joblib.load("./mldata/features_1200.mat")
labels = joblib.load("./mldata/lables_1200.mat")
features = np.array(features, 'int16')
labels = np.array(labels, 'int')
def scale(X, eps = 0.001):
# scale the data points s.t the columns of the feature space
# (i.e the predictors) are within the range [0, 1]
return (X - np.min(X, axis = 0)) / (np.max(X, axis = 0) + eps)
features = features.astype("float32")
features = scale(features)
# scale the data to the range [0, 1] and then construct the training
# and testing splits
(trainX, testX, trainY, testY) = train_test_split(features, labels, test_size = 0.1)
(valX, testX, valY, testY) = train_test_split(testX, testY, test_size = 0.5)
# convert class vectors to binary class matrices
trainY = np_utils.to_categorical(trainY, nb_classes)
testY = np_utils.to_categorical(testY, nb_classes)
valY = np_utils.to_categorical(valY, nb_classes)
In [69]:
model = Sequential()
model.add(Dense(input_dim=784, output_dim=256, init="uniform"))
model.add(Activation("relu"))
model.add(Dense(input_dim=256, output_dim=64, init="uniform"))
model.add(Activation("relu"))
model.add(Dense(input_dim=64, output_dim=9, init="uniform"))
model.add(Activation("softmax"))
model.compile(loss='categorical_crossentropy', optimizer='sgd')
model.fit(trainX, trainY,
batch_size=batch_size,
nb_epoch=nb_epoch,
show_accuracy=True,
verbose=0,
validation_data=(testX, testY))
Out[69]:
In [70]:
model.layers
Out[70]:
In [71]:
model1 = Sequential()
model1.add(Dense(input_dim=784, output_dim=256, init="uniform", weights=model.layers[0].get_weights()))
model1.add(Activation('relu'))
model1.compile(loss='categorical_crossentropy', optimizer='sgd')
In [72]:
print valX[0].shape
print valY[0]
In [73]:
valX_h_1 = model1.predict(valX)
In [74]:
sample_0 = valX_h_1[0].reshape((16, 16))
sample_1 = valX_h_1[1].reshape((16, 16))
sample_2 = valX_h_1[2].reshape((16, 16))
sample_3 = valX_h_1[3].reshape((16, 16))
sample_4 = valX_h_1[4].reshape((16, 16))
fig, (ax0, ax1, ax2, ax3, ax4) = plt.subplots(1, 5)
ax0.imshow(sample_0, cmap='gray')
ax1.imshow(sample_1, cmap='gray')
ax2.imshow(sample_2, cmap='gray')
ax3.imshow(sample_3, cmap='gray')
ax4.imshow(sample_4, cmap='gray')
print valY[:5]
In [75]:
model2 = Sequential()
model2.add(Dense(input_dim=256, output_dim=64, init="uniform", weights=model.layers[2].get_weights()))
model2.add(Activation('relu'))
model2.compile(loss='categorical_crossentropy', optimizer='sgd')
valX_h_2 = model2.predict(valX_h_1)
In [76]:
sample_0 = valX_h_2[0].reshape((8, 8))
sample_1 = valX_h_2[1].reshape((8, 8))
sample_2 = valX_h_2[2].reshape((8, 8))
sample_3 = valX_h_2[3].reshape((8, 8))
sample_4 = valX_h_2[4].reshape((8, 8))
fig, (ax0, ax1, ax2, ax3, ax4) = plt.subplots(1, 5)
ax0.imshow(sample_0, cmap='gray')
ax1.imshow(sample_1, cmap='gray')
ax2.imshow(sample_2, cmap='gray')
ax3.imshow(sample_3, cmap='gray')
ax4.imshow(sample_4, cmap='gray')
print valY[:5]
In [77]:
model3 = Sequential()
model3.add(Dense(input_dim=64, output_dim=9, init="uniform", weights=model.layers[4].get_weights()))
model3.add(Activation('relu'))
model3.compile(loss='categorical_crossentropy', optimizer='sgd')
valX_h_3 = model3.predict(valX_h_2)
In [78]:
sample_0 = valX_h_3[0].reshape((3, 3))
sample_1 = valX_h_3[1].reshape((3, 3))
sample_2 = valX_h_3[2].reshape((3, 3))
sample_3 = valX_h_3[3].reshape((3, 3))
sample_4 = valX_h_3[4].reshape((3, 3))
fig, (ax0, ax1, ax2, ax3, ax4) = plt.subplots(1, 5)
ax0.imshow(sample_0, cmap='gray')
ax1.imshow(sample_1, cmap='gray')
ax2.imshow(sample_2, cmap='gray')
ax3.imshow(sample_3, cmap='gray')
ax4.imshow(sample_4, cmap='gray')
print valY[:5]