In [1]:
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from joblib import Parallel, delayed
import scipy
import pandas as pd
from numpy import array
from PIL import Image
import matplotlib.pyplot as plt
np.random.seed(1)


Using Theano backend.

In [2]:
#Loading the training and testing data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
print(X_train.shape)

#Reshaping from(,28,28) to (,784) for training using MLP
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255

print(X_train.shape)


(60000, 28, 28)
(60000, 784)

In [3]:
#Selecting 6000 random examples from the test data
test_rows = np.random.randint(0,X_test.shape[0],6000)
X_test = X_test[test_rows]
Y = y_test[test_rows]
#Converting the output to binary classification(Six=1,Not Six=0)
Y_test = Y == 6
Y_test = Y_test.astype(int)

#Selecting the examples where the output is 6
X_six = X_train[y_train == 6]
Y_six = y_train[y_train == 6]
#Selecting the examples where the output is not 6
X_not_six = X_train[y_train != 6]
Y_not_six = y_train[y_train != 6]

#Selecting 1000 random examples each from the data(Six and Not Six)
random_rows = np.random.randint(0,X_six.shape[0],1000)
X_six = X_six[random_rows]
Y_six = Y_six[random_rows]
random_rows = np.random.randint(0,X_not_six.shape[0],1000)
X_not_six = X_not_six[random_rows]
Y_not_six = Y_not_six[random_rows]

In [4]:
#Appending the data with output as 6 and data with output as not six
X_train = np.append(X_six,X_not_six)
#Reshaping the appended data to appropraite form - (,784)
X_train = X_train.reshape(X_six.shape[0] + X_not_six.shape[0], 784)
#Appending the labels and converting the labels to binary classification(Six=1,Not Six=0)
Y_labels = np.append(Y_six,Y_not_six)
Y_train = Y_labels == 6 
Y_train = Y_train.astype(int)

In [5]:
#Converting the classes to its binary categorical form
nb_classes = 2
Y_train = np_utils.to_categorical(Y_train, nb_classes)
Y_test = np_utils.to_categorical(Y_test, nb_classes)

In [6]:
#Reshaping to (28,28) and (1,28,28) is done because we are rotating images, so we cant use the (,784) dimension
#array, we need to use the original image dimension array which is (28,28)

#Initializing the array which will contain images rotated by 15 degrees anti clockwise
anti_X_train = scipy.misc.imrotate(X_train[0].reshape(28,28), angle = 15)
anti_X_train = anti_X_train.reshape(1, 28,28)

#Initializing the array which will contain images rotated by 15 degrees clockwise
clock_X_train = scipy.misc.imrotate(X_train[0].reshape(28,28), angle = -15)
clock_X_train = clock_X_train.reshape(1, 28,28)

In [7]:
#Performing clockwise and anticlockwise rotation for the rest of the images. Again reshaping needs to be done 
#below for the same reason as described above
for i in range(1,2000):
    
    rotate_anti = scipy.misc.imrotate(X_train[i].reshape(28,28), angle = 15)
    rotate_anti = rotate_anti.reshape(1, 28,28)
    
    rotate_clock = scipy.misc.imrotate(X_train[i].reshape(28,28), angle = -15)
    rotate_clock = rotate_clock.reshape(1, 28,28)
    
    #Appending the rotated images to the resoective arrays
    anti_X_train = np.append(anti_X_train,rotate_anti,axis=0)
    clock_X_train = np.append(clock_X_train,rotate_clock,axis=0)

In [8]:
#Displaying the original and rotated images
def image_compare(original,clockwise,anticlockwise):
    
    original = original.reshape(28,28)
    
    plt.figure(figsize=(20, 6))
    ax = plt.subplot(1, 3, 1)
    plt.imshow(original)
    plt.xlabel('ORIGINAL')
    plt.gray()
    ax = plt.subplot(1, 3, 2)
    plt.imshow(clockwise)
    plt.xlabel('ROTATED CLOCKWISE')
    plt.gray()
    ax = plt.subplot(1, 3, 3)
    plt.imshow(anticlockwise)
    plt.xlabel('ROTATED ANTI-CLOCKWISE')
    plt.gray()
    plt.show()

In [9]:
image_compare(X_train[0],clock_X_train[0],anti_X_train[0])

In [10]:
image_compare(X_train[1500],clock_X_train[1500],anti_X_train[1500])

In [12]:
#Reshaping the rotated arrays which have dimensions(,28,28) back to (,784) to train the arrays using MLP
anti_X_train = anti_X_train.reshape(anti_X_train.shape[0], 784)
clock_X_train = clock_X_train.reshape(clock_X_train.shape[0], 784)


anti_X_train = anti_X_train.astype('float32')#(**)
clock_X_train = clock_X_train.astype('float32')#(**)
anti_X_train /= 255#(**)
clock_X_train /= 255#(**)

In [12]:
#Appening the arrays to get the final rotated training array
rotated_X_train = np.append(X_train, anti_X_train, axis = 0)
rotated_Y_train = np.append(Y_train, Y_train, axis = 0)

#The array that includes the original arrays as well as the 15 degree rotated arrays(2000 original, 2000 15 degree
#clockwise rotated, 2000 15 degree anticlockwise rotated)
rotated_X_train = np.append(rotated_X_train, clock_X_train, axis = 0)
rotated_Y_train = np.append(rotated_Y_train, Y_train,axis = 0)

In [13]:
#Function for constructing the multi layer perceptron
# 1 Hidden Layer
def build_layer_1(nb_epoch,X_train,Y_train):
    
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(nb_classes))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

    model.fit(X_train, Y_train, batch_size=128, nb_epoch=nb_epoch,verbose=1,
              validation_data=(X_test, Y_test))
          

    score = model.evaluate(X_test, Y_test, verbose=0)
    print('Test score:', score[0])
    print('Test accuracy:', score[1])

In [14]:
%%time

build_layer_1(1,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/1
2000/2000 [==============================] - 0s - loss: 0.2878 - acc: 0.8665 - val_loss: 0.1196 - val_acc: 0.9662
Test score: 0.119554816027
Test accuracy: 0.966166666667
CPU times: user 2.59 s, sys: 108 ms, total: 2.7 s
Wall time: 2.5 s

In [15]:
%%time

build_layer_1(1,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/1
6000/6000 [==============================] - 0s - loss: 0.2019 - acc: 0.9193 - val_loss: 0.1022 - val_acc: 0.9642
Test score: 0.102156283175
Test accuracy: 0.964166666667
CPU times: user 3.09 s, sys: 44 ms, total: 3.14 s
Wall time: 1.84 s

In [16]:
%%time

build_layer_1(5,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/5
2000/2000 [==============================] - 0s - loss: 0.2495 - acc: 0.8840 - val_loss: 0.0814 - val_acc: 0.9760
Epoch 2/5
2000/2000 [==============================] - 0s - loss: 0.1088 - acc: 0.9705 - val_loss: 0.0858 - val_acc: 0.9693
Epoch 3/5
2000/2000 [==============================] - 0s - loss: 0.0754 - acc: 0.9755 - val_loss: 0.0822 - val_acc: 0.9695
Epoch 4/5
2000/2000 [==============================] - 0s - loss: 0.0635 - acc: 0.9805 - val_loss: 0.1181 - val_acc: 0.9580
Epoch 5/5
2000/2000 [==============================] - 0s - loss: 0.0621 - acc: 0.9775 - val_loss: 0.0943 - val_acc: 0.9667
Test score: 0.0942960611358
Test accuracy: 0.966666666667
CPU times: user 5.06 s, sys: 88 ms, total: 5.15 s
Wall time: 2.82 s

In [17]:
build_layer_1(5,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/5
6000/6000 [==============================] - 0s - loss: 0.1871 - acc: 0.9302 - val_loss: 0.1190 - val_acc: 0.9568
Epoch 2/5
6000/6000 [==============================] - 0s - loss: 0.0990 - acc: 0.9658 - val_loss: 0.1101 - val_acc: 0.9603
Epoch 3/5
6000/6000 [==============================] - 0s - loss: 0.0701 - acc: 0.9775 - val_loss: 0.0679 - val_acc: 0.9777
Epoch 4/5
6000/6000 [==============================] - 0s - loss: 0.0529 - acc: 0.9838 - val_loss: 0.0477 - val_acc: 0.9838
Epoch 5/5
6000/6000 [==============================] - 0s - loss: 0.0410 - acc: 0.9855 - val_loss: 0.0627 - val_acc: 0.9765
Test score: 0.0627371336253
Test accuracy: 0.9765

In [18]:
%%time

build_layer_1(10,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/10
2000/2000 [==============================] - 0s - loss: 0.2868 - acc: 0.8755 - val_loss: 0.1076 - val_acc: 0.9705
Epoch 2/10
2000/2000 [==============================] - 0s - loss: 0.0969 - acc: 0.9735 - val_loss: 0.1447 - val_acc: 0.9500
Epoch 3/10
2000/2000 [==============================] - 0s - loss: 0.0799 - acc: 0.9770 - val_loss: 0.1026 - val_acc: 0.9645
Epoch 4/10
2000/2000 [==============================] - 0s - loss: 0.0561 - acc: 0.9815 - val_loss: 0.0527 - val_acc: 0.9800
Epoch 5/10
2000/2000 [==============================] - 0s - loss: 0.0482 - acc: 0.9865 - val_loss: 0.0626 - val_acc: 0.9762
Epoch 6/10
2000/2000 [==============================] - 0s - loss: 0.0431 - acc: 0.9885 - val_loss: 0.0523 - val_acc: 0.9790
Epoch 7/10
2000/2000 [==============================] - 0s - loss: 0.0365 - acc: 0.9890 - val_loss: 0.0842 - val_acc: 0.9698
Epoch 8/10
2000/2000 [==============================] - 0s - loss: 0.0242 - acc: 0.9945 - val_loss: 0.0434 - val_acc: 0.9825
Epoch 9/10
2000/2000 [==============================] - 0s - loss: 0.0217 - acc: 0.9935 - val_loss: 0.1344 - val_acc: 0.9517
Epoch 10/10
2000/2000 [==============================] - 0s - loss: 0.0206 - acc: 0.9950 - val_loss: 0.0392 - val_acc: 0.9860
Test score: 0.0392039663407
Test accuracy: 0.986
CPU times: user 8.97 s, sys: 144 ms, total: 9.12 s
Wall time: 4.88 s

In [19]:
%%time

build_layer_1(10,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/10
6000/6000 [==============================] - 0s - loss: 0.2105 - acc: 0.9227 - val_loss: 0.1218 - val_acc: 0.9563
Epoch 2/10
6000/6000 [==============================] - 0s - loss: 0.0988 - acc: 0.9673 - val_loss: 0.0882 - val_acc: 0.9693
Epoch 3/10
6000/6000 [==============================] - 0s - loss: 0.0713 - acc: 0.9770 - val_loss: 0.0422 - val_acc: 0.9847
Epoch 4/10
6000/6000 [==============================] - 0s - loss: 0.0528 - acc: 0.9835 - val_loss: 0.0589 - val_acc: 0.9798
Epoch 5/10
6000/6000 [==============================] - 0s - loss: 0.0412 - acc: 0.9878 - val_loss: 0.0542 - val_acc: 0.9813
Epoch 6/10
6000/6000 [==============================] - 0s - loss: 0.0331 - acc: 0.9897 - val_loss: 0.0231 - val_acc: 0.9915
Epoch 7/10
6000/6000 [==============================] - 0s - loss: 0.0257 - acc: 0.9920 - val_loss: 0.0551 - val_acc: 0.9797
Epoch 8/10
6000/6000 [==============================] - 0s - loss: 0.0199 - acc: 0.9940 - val_loss: 0.0679 - val_acc: 0.9770
Epoch 9/10
6000/6000 [==============================] - 0s - loss: 0.0188 - acc: 0.9945 - val_loss: 0.0489 - val_acc: 0.9833
Epoch 10/10
6000/6000 [==============================] - 0s - loss: 0.0151 - acc: 0.9955 - val_loss: 0.0447 - val_acc: 0.9838
Test score: 0.0446645175412
Test accuracy: 0.983833333333
CPU times: user 17.2 s, sys: 300 ms, total: 17.5 s
Wall time: 9.25 s

In [20]:
%%time

build_layer_1(30,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/30
2000/2000 [==============================] - 0s - loss: 0.2916 - acc: 0.8775 - val_loss: 0.0778 - val_acc: 0.9790
Epoch 2/30
2000/2000 [==============================] - 0s - loss: 0.1064 - acc: 0.9675 - val_loss: 0.1209 - val_acc: 0.9603
Epoch 3/30
2000/2000 [==============================] - 0s - loss: 0.0752 - acc: 0.9760 - val_loss: 0.1048 - val_acc: 0.9642
Epoch 4/30
2000/2000 [==============================] - 0s - loss: 0.0627 - acc: 0.9810 - val_loss: 0.0983 - val_acc: 0.9633
Epoch 5/30
2000/2000 [==============================] - 0s - loss: 0.0538 - acc: 0.9820 - val_loss: 0.0524 - val_acc: 0.9795
Epoch 6/30
2000/2000 [==============================] - 0s - loss: 0.0416 - acc: 0.9860 - val_loss: 0.0835 - val_acc: 0.9700
Epoch 7/30
2000/2000 [==============================] - 0s - loss: 0.0362 - acc: 0.9895 - val_loss: 0.0452 - val_acc: 0.9832
Epoch 8/30
2000/2000 [==============================] - 0s - loss: 0.0298 - acc: 0.9920 - val_loss: 0.1498 - val_acc: 0.9417
Epoch 9/30
2000/2000 [==============================] - 0s - loss: 0.0259 - acc: 0.9940 - val_loss: 0.0341 - val_acc: 0.9872
Epoch 10/30
2000/2000 [==============================] - 0s - loss: 0.0233 - acc: 0.9940 - val_loss: 0.0510 - val_acc: 0.9800
Epoch 11/30
2000/2000 [==============================] - 0s - loss: 0.0163 - acc: 0.9970 - val_loss: 0.0419 - val_acc: 0.9835
Epoch 12/30
2000/2000 [==============================] - 0s - loss: 0.0120 - acc: 0.9985 - val_loss: 0.0634 - val_acc: 0.9768
Epoch 13/30
2000/2000 [==============================] - 0s - loss: 0.0114 - acc: 0.9980 - val_loss: 0.0565 - val_acc: 0.9790
Epoch 14/30
2000/2000 [==============================] - 0s - loss: 0.0103 - acc: 0.9980 - val_loss: 0.0530 - val_acc: 0.9800
Epoch 15/30
2000/2000 [==============================] - 0s - loss: 0.0080 - acc: 1.0000 - val_loss: 0.0415 - val_acc: 0.9842
Epoch 16/30
2000/2000 [==============================] - 0s - loss: 0.0068 - acc: 0.9995 - val_loss: 0.0466 - val_acc: 0.9827
Epoch 17/30
2000/2000 [==============================] - 0s - loss: 0.0052 - acc: 1.0000 - val_loss: 0.0540 - val_acc: 0.9803
Epoch 18/30
2000/2000 [==============================] - 0s - loss: 0.0043 - acc: 1.0000 - val_loss: 0.0597 - val_acc: 0.9792
Epoch 19/30
2000/2000 [==============================] - 0s - loss: 0.0046 - acc: 1.0000 - val_loss: 0.0563 - val_acc: 0.9805
Epoch 20/30
2000/2000 [==============================] - 0s - loss: 0.0037 - acc: 1.0000 - val_loss: 0.0671 - val_acc: 0.9785
Epoch 21/30
2000/2000 [==============================] - 0s - loss: 0.0034 - acc: 1.0000 - val_loss: 0.0833 - val_acc: 0.9742
Epoch 22/30
2000/2000 [==============================] - 0s - loss: 0.0037 - acc: 1.0000 - val_loss: 0.0470 - val_acc: 0.9838
Epoch 23/30
2000/2000 [==============================] - 0s - loss: 0.0029 - acc: 1.0000 - val_loss: 0.0541 - val_acc: 0.9813
Epoch 24/30
2000/2000 [==============================] - 0s - loss: 0.0023 - acc: 1.0000 - val_loss: 0.0615 - val_acc: 0.9803
Epoch 25/30
2000/2000 [==============================] - 0s - loss: 0.0025 - acc: 1.0000 - val_loss: 0.0597 - val_acc: 0.9805
Epoch 26/30
2000/2000 [==============================] - 0s - loss: 0.0019 - acc: 1.0000 - val_loss: 0.0619 - val_acc: 0.9802
Epoch 27/30
2000/2000 [==============================] - 0s - loss: 0.0018 - acc: 1.0000 - val_loss: 0.0604 - val_acc: 0.9807
Epoch 28/30
2000/2000 [==============================] - 0s - loss: 0.0015 - acc: 1.0000 - val_loss: 0.0594 - val_acc: 0.9808
Epoch 29/30
2000/2000 [==============================] - 0s - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0598 - val_acc: 0.9812
Epoch 30/30
2000/2000 [==============================] - 0s - loss: 0.0016 - acc: 1.0000 - val_loss: 0.0544 - val_acc: 0.9823
Test score: 0.0544348773411
Test accuracy: 0.982333333333
CPU times: user 26.7 s, sys: 348 ms, total: 27.1 s
Wall time: 14 s

In [21]:
%%time

build_layer_1(30,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/30
6000/6000 [==============================] - 0s - loss: 0.1961 - acc: 0.9213 - val_loss: 0.1100 - val_acc: 0.9650
Epoch 2/30
6000/6000 [==============================] - 0s - loss: 0.0969 - acc: 0.9668 - val_loss: 0.0390 - val_acc: 0.9863
Epoch 3/30
6000/6000 [==============================] - 0s - loss: 0.0692 - acc: 0.9767 - val_loss: 0.0516 - val_acc: 0.9808
Epoch 4/30
6000/6000 [==============================] - 0s - loss: 0.0562 - acc: 0.9808 - val_loss: 0.1070 - val_acc: 0.9607
Epoch 5/30
6000/6000 [==============================] - 0s - loss: 0.0430 - acc: 0.9858 - val_loss: 0.0649 - val_acc: 0.9767
Epoch 6/30
6000/6000 [==============================] - 0s - loss: 0.0337 - acc: 0.9887 - val_loss: 0.0341 - val_acc: 0.9875
Epoch 7/30
6000/6000 [==============================] - 0s - loss: 0.0284 - acc: 0.9913 - val_loss: 0.0433 - val_acc: 0.9843
Epoch 8/30
6000/6000 [==============================] - 0s - loss: 0.0228 - acc: 0.9930 - val_loss: 0.0437 - val_acc: 0.9857
Epoch 9/30
6000/6000 [==============================] - 0s - loss: 0.0182 - acc: 0.9953 - val_loss: 0.0372 - val_acc: 0.9870
Epoch 10/30
6000/6000 [==============================] - 0s - loss: 0.0136 - acc: 0.9968 - val_loss: 0.0274 - val_acc: 0.9908
Epoch 11/30
6000/6000 [==============================] - 0s - loss: 0.0132 - acc: 0.9970 - val_loss: 0.0662 - val_acc: 0.9772
Epoch 12/30
6000/6000 [==============================] - 1s - loss: 0.0102 - acc: 0.9973 - val_loss: 0.0457 - val_acc: 0.9843
Epoch 13/30
6000/6000 [==============================] - 1s - loss: 0.0085 - acc: 0.9982 - val_loss: 0.0358 - val_acc: 0.9870
Epoch 14/30
6000/6000 [==============================] - 0s - loss: 0.0068 - acc: 0.9990 - val_loss: 0.0362 - val_acc: 0.9873
Epoch 15/30
6000/6000 [==============================] - 0s - loss: 0.0058 - acc: 0.9992 - val_loss: 0.0394 - val_acc: 0.9865
Epoch 16/30
6000/6000 [==============================] - 0s - loss: 0.0045 - acc: 0.9997 - val_loss: 0.0430 - val_acc: 0.9847
Epoch 17/30
6000/6000 [==============================] - 0s - loss: 0.0049 - acc: 0.9998 - val_loss: 0.0276 - val_acc: 0.9912
Epoch 18/30
6000/6000 [==============================] - 0s - loss: 0.0035 - acc: 0.9995 - val_loss: 0.0294 - val_acc: 0.9897
Epoch 19/30
6000/6000 [==============================] - 0s - loss: 0.0041 - acc: 0.9997 - val_loss: 0.0521 - val_acc: 0.9838
Epoch 20/30
6000/6000 [==============================] - 0s - loss: 0.0031 - acc: 0.9998 - val_loss: 0.0378 - val_acc: 0.9877
Epoch 21/30
6000/6000 [==============================] - 0s - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0461 - val_acc: 0.9855
Epoch 22/30
6000/6000 [==============================] - 1s - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0292 - val_acc: 0.9910
Epoch 23/30
6000/6000 [==============================] - 0s - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0371 - val_acc: 0.9890
Epoch 24/30
6000/6000 [==============================] - 0s - loss: 0.0016 - acc: 1.0000 - val_loss: 0.0361 - val_acc: 0.9895
Epoch 25/30
6000/6000 [==============================] - 1s - loss: 0.0015 - acc: 0.9998 - val_loss: 0.0388 - val_acc: 0.9883
Epoch 26/30
6000/6000 [==============================] - 0s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0444 - val_acc: 0.9868
Epoch 27/30
6000/6000 [==============================] - 0s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0454 - val_acc: 0.9870
Epoch 28/30
6000/6000 [==============================] - 0s - loss: 0.0011 - acc: 1.0000 - val_loss: 0.0407 - val_acc: 0.9888
Epoch 29/30
6000/6000 [==============================] - 0s - loss: 0.0012 - acc: 1.0000 - val_loss: 0.0366 - val_acc: 0.9905
Epoch 30/30
6000/6000 [==============================] - 0s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0410 - val_acc: 0.9880
Test score: 0.040969708725
Test accuracy: 0.988
CPU times: user 56 s, sys: 908 ms, total: 56.9 s
Wall time: 29.2 s

In [22]:
#Function for constructing the multi layer perceptron
# 2 Hidden Layers
def build_layer_2(nb_epoch,X_train,Y_train):
    
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(512))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(nb_classes))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

    model.fit(X_train, Y_train, batch_size=128, nb_epoch=nb_epoch,verbose=1,
              validation_data=(X_test, Y_test))
          

    score = model.evaluate(X_test, Y_test, verbose=0)
    print('Test score:', score[0])
    print('Test accuracy:', score[1])

In [23]:
%%time

build_layer_2(1,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/1
2000/2000 [==============================] - 0s - loss: 0.3443 - acc: 0.8510 - val_loss: 0.0929 - val_acc: 0.9707
Test score: 0.0928595305085
Test accuracy: 0.970666666667
CPU times: user 3.7 s, sys: 56 ms, total: 3.76 s
Wall time: 2.51 s

In [24]:
%%time

build_layer_2(1,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/1
6000/6000 [==============================] - 1s - loss: 0.2310 - acc: 0.9052 - val_loss: 0.0911 - val_acc: 0.9677
Test score: 0.0911035261527
Test accuracy: 0.967666666667
CPU times: user 5.15 s, sys: 108 ms, total: 5.26 s
Wall time: 3.13 s

In [25]:
%%time

build_layer_2(5,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/5
2000/2000 [==============================] - 0s - loss: 0.3333 - acc: 0.8600 - val_loss: 0.1484 - val_acc: 0.9485
Epoch 2/5
2000/2000 [==============================] - 0s - loss: 0.1090 - acc: 0.9640 - val_loss: 0.0775 - val_acc: 0.9718
Epoch 3/5
2000/2000 [==============================] - 0s - loss: 0.1177 - acc: 0.9610 - val_loss: 0.0544 - val_acc: 0.9795
Epoch 4/5
2000/2000 [==============================] - 0s - loss: 0.0476 - acc: 0.9840 - val_loss: 0.0805 - val_acc: 0.9725
Epoch 5/5
2000/2000 [==============================] - 0s - loss: 0.0501 - acc: 0.9830 - val_loss: 0.0527 - val_acc: 0.9802
Test score: 0.0526903004448
Test accuracy: 0.980166666667
CPU times: user 9.15 s, sys: 80 ms, total: 9.23 s
Wall time: 5.12 s

In [26]:
%%time

build_layer_2(5,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/5
6000/6000 [==============================] - 1s - loss: 0.2302 - acc: 0.9098 - val_loss: 0.1235 - val_acc: 0.9577
Epoch 2/5
6000/6000 [==============================] - 1s - loss: 0.0902 - acc: 0.9672 - val_loss: 0.1286 - val_acc: 0.9513
Epoch 3/5
6000/6000 [==============================] - 1s - loss: 0.0594 - acc: 0.9782 - val_loss: 0.0576 - val_acc: 0.9795
Epoch 4/5
6000/6000 [==============================] - 1s - loss: 0.0453 - acc: 0.9842 - val_loss: 0.0628 - val_acc: 0.9770
Epoch 5/5
6000/6000 [==============================] - 1s - loss: 0.0281 - acc: 0.9910 - val_loss: 0.0370 - val_acc: 0.9880
Test score: 0.0369898467099
Test accuracy: 0.988
CPU times: user 18.4 s, sys: 256 ms, total: 18.7 s
Wall time: 9.97 s

In [27]:
%%time

build_layer_2(10,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/10
2000/2000 [==============================] - 0s - loss: 0.3182 - acc: 0.8745 - val_loss: 0.0571 - val_acc: 0.9795
Epoch 2/10
2000/2000 [==============================] - 0s - loss: 0.1132 - acc: 0.9595 - val_loss: 0.0623 - val_acc: 0.9770
Epoch 3/10
2000/2000 [==============================] - 0s - loss: 0.0798 - acc: 0.9735 - val_loss: 0.0441 - val_acc: 0.9832
Epoch 4/10
2000/2000 [==============================] - 0s - loss: 0.0514 - acc: 0.9820 - val_loss: 0.1014 - val_acc: 0.9658
Epoch 5/10
2000/2000 [==============================] - 0s - loss: 0.0449 - acc: 0.9855 - val_loss: 0.0559 - val_acc: 0.9800
Epoch 6/10
2000/2000 [==============================] - 0s - loss: 0.0286 - acc: 0.9910 - val_loss: 0.0311 - val_acc: 0.9880
Epoch 7/10
2000/2000 [==============================] - 0s - loss: 0.0453 - acc: 0.9865 - val_loss: 0.0656 - val_acc: 0.9765
Epoch 8/10
2000/2000 [==============================] - 0s - loss: 0.0124 - acc: 0.9970 - val_loss: 0.0591 - val_acc: 0.9802
Epoch 9/10
2000/2000 [==============================] - 0s - loss: 0.0125 - acc: 0.9965 - val_loss: 0.0584 - val_acc: 0.9807
Epoch 10/10
2000/2000 [==============================] - 0s - loss: 0.0083 - acc: 0.9985 - val_loss: 0.0537 - val_acc: 0.9820
Test score: 0.0537190124753
Test accuracy: 0.982
CPU times: user 18.4 s, sys: 248 ms, total: 18.7 s
Wall time: 10.1 s

In [28]:
%%time

build_layer_2(10,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/10
6000/6000 [==============================] - 1s - loss: 0.2253 - acc: 0.9133 - val_loss: 0.0691 - val_acc: 0.9750
Epoch 2/10
6000/6000 [==============================] - 1s - loss: 0.0880 - acc: 0.9690 - val_loss: 0.0289 - val_acc: 0.9900
Epoch 3/10
6000/6000 [==============================] - 1s - loss: 0.0552 - acc: 0.9823 - val_loss: 0.0400 - val_acc: 0.9838
Epoch 4/10
6000/6000 [==============================] - 1s - loss: 0.0492 - acc: 0.9843 - val_loss: 0.0530 - val_acc: 0.9793
Epoch 5/10
6000/6000 [==============================] - 1s - loss: 0.0303 - acc: 0.9888 - val_loss: 0.0262 - val_acc: 0.9915
Epoch 6/10
6000/6000 [==============================] - 1s - loss: 0.0220 - acc: 0.9922 - val_loss: 0.0516 - val_acc: 0.9845
Epoch 7/10
6000/6000 [==============================] - 1s - loss: 0.0147 - acc: 0.9955 - val_loss: 0.0711 - val_acc: 0.9797
Epoch 8/10
6000/6000 [==============================] - 1s - loss: 0.0132 - acc: 0.9953 - val_loss: 0.0392 - val_acc: 0.9880
Epoch 9/10
6000/6000 [==============================] - 1s - loss: 0.0113 - acc: 0.9967 - val_loss: 0.0283 - val_acc: 0.9900
Epoch 10/10
6000/6000 [==============================] - 1s - loss: 0.0075 - acc: 0.9980 - val_loss: 0.0250 - val_acc: 0.9918
Test score: 0.0250399414618
Test accuracy: 0.991833333333
CPU times: user 34.6 s, sys: 540 ms, total: 35.1 s
Wall time: 18.3 s

In [29]:
%%time

build_layer_2(30,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/30
2000/2000 [==============================] - 0s - loss: 0.3226 - acc: 0.8645 - val_loss: 0.0575 - val_acc: 0.9820
Epoch 2/30
2000/2000 [==============================] - 0s - loss: 0.0994 - acc: 0.9645 - val_loss: 0.0418 - val_acc: 0.9857
Epoch 3/30
2000/2000 [==============================] - 0s - loss: 0.0679 - acc: 0.9785 - val_loss: 0.0919 - val_acc: 0.9678
Epoch 4/30
2000/2000 [==============================] - 0s - loss: 0.0476 - acc: 0.9815 - val_loss: 0.0345 - val_acc: 0.9875
Epoch 5/30
2000/2000 [==============================] - 0s - loss: 0.0437 - acc: 0.9860 - val_loss: 0.0377 - val_acc: 0.9870
Epoch 6/30
2000/2000 [==============================] - 0s - loss: 0.0364 - acc: 0.9875 - val_loss: 0.0696 - val_acc: 0.9755
Epoch 7/30
2000/2000 [==============================] - 0s - loss: 0.0144 - acc: 0.9965 - val_loss: 0.0375 - val_acc: 0.9862
Epoch 8/30
2000/2000 [==============================] - 0s - loss: 0.0156 - acc: 0.9945 - val_loss: 0.0492 - val_acc: 0.9818
Epoch 9/30
2000/2000 [==============================] - 0s - loss: 0.0120 - acc: 0.9975 - val_loss: 0.0641 - val_acc: 0.9800
Epoch 10/30
2000/2000 [==============================] - 0s - loss: 0.0055 - acc: 1.0000 - val_loss: 0.0529 - val_acc: 0.9830
Epoch 11/30
2000/2000 [==============================] - 0s - loss: 0.0051 - acc: 0.9990 - val_loss: 0.0517 - val_acc: 0.9828
Epoch 12/30
2000/2000 [==============================] - 0s - loss: 0.0029 - acc: 1.0000 - val_loss: 0.0832 - val_acc: 0.9767
Epoch 13/30
2000/2000 [==============================] - 0s - loss: 0.0024 - acc: 1.0000 - val_loss: 0.0407 - val_acc: 0.9858
Epoch 14/30
2000/2000 [==============================] - 0s - loss: 0.0021 - acc: 1.0000 - val_loss: 0.0748 - val_acc: 0.9807
Epoch 15/30
2000/2000 [==============================] - 0s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0667 - val_acc: 0.9825
Epoch 16/30
2000/2000 [==============================] - 0s - loss: 0.0014 - acc: 1.0000 - val_loss: 0.0830 - val_acc: 0.9790
Epoch 17/30
2000/2000 [==============================] - 0s - loss: 9.1546e-04 - acc: 1.0000 - val_loss: 0.0759 - val_acc: 0.9812
Epoch 18/30
2000/2000 [==============================] - 0s - loss: 0.0019 - acc: 0.9995 - val_loss: 0.0612 - val_acc: 0.9830
Epoch 19/30
2000/2000 [==============================] - 0s - loss: 7.1941e-04 - acc: 1.0000 - val_loss: 0.0693 - val_acc: 0.9817
Epoch 20/30
2000/2000 [==============================] - 0s - loss: 5.5569e-04 - acc: 1.0000 - val_loss: 0.0733 - val_acc: 0.9817
Epoch 21/30
2000/2000 [==============================] - 0s - loss: 5.3586e-04 - acc: 1.0000 - val_loss: 0.0665 - val_acc: 0.9833
Epoch 22/30
2000/2000 [==============================] - 0s - loss: 3.6450e-04 - acc: 1.0000 - val_loss: 0.0737 - val_acc: 0.9823
Epoch 23/30
2000/2000 [==============================] - 0s - loss: 5.1240e-04 - acc: 1.0000 - val_loss: 0.0824 - val_acc: 0.9802
Epoch 24/30
2000/2000 [==============================] - 0s - loss: 4.3361e-04 - acc: 1.0000 - val_loss: 0.0847 - val_acc: 0.9807
Epoch 25/30
2000/2000 [==============================] - 0s - loss: 3.9501e-04 - acc: 1.0000 - val_loss: 0.0903 - val_acc: 0.9793
Epoch 26/30
2000/2000 [==============================] - 0s - loss: 3.2571e-04 - acc: 1.0000 - val_loss: 0.0859 - val_acc: 0.9808
Epoch 27/30
2000/2000 [==============================] - 0s - loss: 3.2725e-04 - acc: 1.0000 - val_loss: 0.0775 - val_acc: 0.9825
Epoch 28/30
2000/2000 [==============================] - 0s - loss: 2.6452e-04 - acc: 1.0000 - val_loss: 0.0879 - val_acc: 0.9808
Epoch 29/30
2000/2000 [==============================] - 0s - loss: 2.7790e-04 - acc: 1.0000 - val_loss: 0.0818 - val_acc: 0.9817
Epoch 30/30
2000/2000 [==============================] - 0s - loss: 2.4069e-04 - acc: 1.0000 - val_loss: 0.0818 - val_acc: 0.9815
Test score: 0.0817941982662
Test accuracy: 0.9815
CPU times: user 49.2 s, sys: 612 ms, total: 49.8 s
Wall time: 25.7 s

In [30]:
%%time

build_layer_2(30,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/30
6000/6000 [==============================] - 1s - loss: 0.2107 - acc: 0.9122 - val_loss: 0.0451 - val_acc: 0.9843
Epoch 2/30
6000/6000 [==============================] - 1s - loss: 0.0963 - acc: 0.9642 - val_loss: 0.0309 - val_acc: 0.9878
Epoch 3/30
6000/6000 [==============================] - 1s - loss: 0.0684 - acc: 0.9748 - val_loss: 0.0841 - val_acc: 0.9723
Epoch 4/30
6000/6000 [==============================] - 1s - loss: 0.0449 - acc: 0.9843 - val_loss: 0.0543 - val_acc: 0.9812
Epoch 5/30
6000/6000 [==============================] - 1s - loss: 0.0322 - acc: 0.9890 - val_loss: 0.0752 - val_acc: 0.9753
Epoch 6/30
6000/6000 [==============================] - 1s - loss: 0.0246 - acc: 0.9918 - val_loss: 0.0487 - val_acc: 0.9842
Epoch 7/30
6000/6000 [==============================] - 1s - loss: 0.0166 - acc: 0.9947 - val_loss: 0.0305 - val_acc: 0.9903
Epoch 8/30
6000/6000 [==============================] - 1s - loss: 0.0113 - acc: 0.9967 - val_loss: 0.0289 - val_acc: 0.9913
Epoch 9/30
6000/6000 [==============================] - 1s - loss: 0.0131 - acc: 0.9958 - val_loss: 0.0474 - val_acc: 0.9860
Epoch 10/30
6000/6000 [==============================] - 1s - loss: 0.0069 - acc: 0.9982 - val_loss: 0.0516 - val_acc: 0.9855
Epoch 11/30
6000/6000 [==============================] - 1s - loss: 0.0062 - acc: 0.9988 - val_loss: 0.0357 - val_acc: 0.9887
Epoch 12/30
6000/6000 [==============================] - 1s - loss: 0.0047 - acc: 0.9987 - val_loss: 0.0534 - val_acc: 0.9855
Epoch 13/30
6000/6000 [==============================] - 1s - loss: 0.0028 - acc: 0.9995 - val_loss: 0.0807 - val_acc: 0.9805
Epoch 14/30
6000/6000 [==============================] - 1s - loss: 0.0040 - acc: 0.9990 - val_loss: 0.0531 - val_acc: 0.9853
Epoch 15/30
6000/6000 [==============================] - 1s - loss: 0.0018 - acc: 0.9997 - val_loss: 0.0431 - val_acc: 0.9888
Epoch 16/30
6000/6000 [==============================] - 1s - loss: 9.6927e-04 - acc: 1.0000 - val_loss: 0.0435 - val_acc: 0.9890
Epoch 17/30
6000/6000 [==============================] - 1s - loss: 0.0015 - acc: 0.9995 - val_loss: 0.0563 - val_acc: 0.9870
Epoch 18/30
6000/6000 [==============================] - 1s - loss: 6.1235e-04 - acc: 1.0000 - val_loss: 0.0552 - val_acc: 0.9880
Epoch 19/30
6000/6000 [==============================] - 1s - loss: 3.5288e-04 - acc: 1.0000 - val_loss: 0.0570 - val_acc: 0.9875
Epoch 20/30
6000/6000 [==============================] - 1s - loss: 3.6509e-04 - acc: 1.0000 - val_loss: 0.0738 - val_acc: 0.9855
Epoch 21/30
6000/6000 [==============================] - 1s - loss: 3.7298e-04 - acc: 1.0000 - val_loss: 0.0620 - val_acc: 0.9878
Epoch 22/30
6000/6000 [==============================] - 1s - loss: 2.2176e-04 - acc: 1.0000 - val_loss: 0.0499 - val_acc: 0.9892
Epoch 23/30
6000/6000 [==============================] - 1s - loss: 3.6705e-04 - acc: 1.0000 - val_loss: 0.0571 - val_acc: 0.9893
Epoch 24/30
6000/6000 [==============================] - 1s - loss: 3.1907e-04 - acc: 1.0000 - val_loss: 0.0595 - val_acc: 0.9892
Epoch 25/30
6000/6000 [==============================] - 1s - loss: 1.5169e-04 - acc: 1.0000 - val_loss: 0.0616 - val_acc: 0.9885
Epoch 26/30
6000/6000 [==============================] - 1s - loss: 2.2348e-04 - acc: 1.0000 - val_loss: 0.0629 - val_acc: 0.9882
Epoch 27/30
6000/6000 [==============================] - 1s - loss: 1.4265e-04 - acc: 1.0000 - val_loss: 0.0605 - val_acc: 0.9890
Epoch 28/30
6000/6000 [==============================] - 1s - loss: 3.1081e-04 - acc: 1.0000 - val_loss: 0.0660 - val_acc: 0.9877
Epoch 29/30
6000/6000 [==============================] - 1s - loss: 1.3303e-04 - acc: 1.0000 - val_loss: 0.0621 - val_acc: 0.9885
Epoch 30/30
6000/6000 [==============================] - 1s - loss: 1.9673e-04 - acc: 1.0000 - val_loss: 0.0558 - val_acc: 0.9900
Test score: 0.0558245769374
Test accuracy: 0.99
CPU times: user 1min 44s, sys: 1.6 s, total: 1min 46s
Wall time: 54.8 s

In [31]:
#Function for constructing the multi layer perceptron
## Hidden Layers
def build_layer_3(nb_epoch,X_train,Y_train):
    
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(512))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(512))
    model.add(Activation('relu'))
    model.add(Dropout(0.2))
    model.add(Dense(nb_classes))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

    model.fit(X_train, Y_train, batch_size=128, nb_epoch=nb_epoch,verbose=1,
              validation_data=(X_test, Y_test))
          

    score = model.evaluate(X_test, Y_test, verbose=0)
    print('Test score:', score[0])
    print('Test accuracy:', score[1])

In [32]:
%%time

build_layer_3(1,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/1
2000/2000 [==============================] - 0s - loss: 0.4082 - acc: 0.8380 - val_loss: 0.1264 - val_acc: 0.9617
Test score: 0.126446414361
Test accuracy: 0.961666666667
CPU times: user 4.94 s, sys: 48 ms, total: 4.99 s
Wall time: 3.36 s

In [33]:
%%time

build_layer_3(1,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/1
6000/6000 [==============================] - 1s - loss: 0.2440 - acc: 0.8995 - val_loss: 0.2011 - val_acc: 0.9278
Test score: 0.201079314381
Test accuracy: 0.927833333333
CPU times: user 6.78 s, sys: 120 ms, total: 6.9 s
Wall time: 4.1 s

In [34]:
%%time

build_layer_3(5,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/5
2000/2000 [==============================] - 0s - loss: 0.4085 - acc: 0.8165 - val_loss: 0.1072 - val_acc: 0.9667
Epoch 2/5
2000/2000 [==============================] - 0s - loss: 0.1114 - acc: 0.9620 - val_loss: 0.0614 - val_acc: 0.9763
Epoch 3/5
2000/2000 [==============================] - 1s - loss: 0.0597 - acc: 0.9800 - val_loss: 0.2765 - val_acc: 0.9078
Epoch 4/5
2000/2000 [==============================] - 1s - loss: 0.0629 - acc: 0.9760 - val_loss: 0.0876 - val_acc: 0.9708
Epoch 5/5
2000/2000 [==============================] - 0s - loss: 0.0424 - acc: 0.9850 - val_loss: 0.0331 - val_acc: 0.9870
Test score: 0.0331381960697
Test accuracy: 0.987
CPU times: user 12.9 s, sys: 144 ms, total: 13 s
Wall time: 7.14 s

In [35]:
%%time

build_layer_3(5,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/5
6000/6000 [==============================] - 2s - loss: 0.2501 - acc: 0.8952 - val_loss: 0.1598 - val_acc: 0.9417
Epoch 2/5
6000/6000 [==============================] - 2s - loss: 0.0897 - acc: 0.9692 - val_loss: 0.0754 - val_acc: 0.9735
Epoch 3/5
6000/6000 [==============================] - 2s - loss: 0.0594 - acc: 0.9788 - val_loss: 0.0585 - val_acc: 0.9813
Epoch 4/5
6000/6000 [==============================] - 2s - loss: 0.0393 - acc: 0.9850 - val_loss: 0.0376 - val_acc: 0.9870
Epoch 5/5
6000/6000 [==============================] - 2s - loss: 0.0296 - acc: 0.9907 - val_loss: 0.0654 - val_acc: 0.9813
Test score: 0.0653600090276
Test accuracy: 0.981333333333
CPU times: user 25 s, sys: 240 ms, total: 25.3 s
Wall time: 13.9 s

In [36]:
%%time

build_layer_3(10,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/10
2000/2000 [==============================] - 1s - loss: 0.3768 - acc: 0.8190 - val_loss: 0.0626 - val_acc: 0.9803
Epoch 2/10
2000/2000 [==============================] - 1s - loss: 0.0973 - acc: 0.9660 - val_loss: 0.0868 - val_acc: 0.9703
Epoch 3/10
2000/2000 [==============================] - 1s - loss: 0.0765 - acc: 0.9730 - val_loss: 0.0426 - val_acc: 0.9857
Epoch 4/10
2000/2000 [==============================] - 1s - loss: 0.0425 - acc: 0.9890 - val_loss: 0.0583 - val_acc: 0.9783
Epoch 5/10
2000/2000 [==============================] - 1s - loss: 0.0871 - acc: 0.9705 - val_loss: 0.0424 - val_acc: 0.9825
Epoch 6/10
2000/2000 [==============================] - 1s - loss: 0.0277 - acc: 0.9895 - val_loss: 0.0317 - val_acc: 0.9875
Epoch 7/10
2000/2000 [==============================] - 1s - loss: 0.0149 - acc: 0.9965 - val_loss: 0.0345 - val_acc: 0.9883
Epoch 8/10
2000/2000 [==============================] - 1s - loss: 0.0126 - acc: 0.9965 - val_loss: 0.0443 - val_acc: 0.9850
Epoch 9/10
2000/2000 [==============================] - 1s - loss: 0.0160 - acc: 0.9945 - val_loss: 0.0352 - val_acc: 0.9900
Epoch 10/10
2000/2000 [==============================] - 1s - loss: 0.0228 - acc: 0.9915 - val_loss: 0.0490 - val_acc: 0.9840
Test score: 0.0490468299028
Test accuracy: 0.984
CPU times: user 24.7 s, sys: 276 ms, total: 25 s
Wall time: 13.5 s

In [37]:
%%time

build_layer_3(10,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/10
6000/6000 [==============================] - 1s - loss: 0.2291 - acc: 0.9023 - val_loss: 0.0883 - val_acc: 0.9715
Epoch 2/10
6000/6000 [==============================] - 1s - loss: 0.0810 - acc: 0.9732 - val_loss: 0.0742 - val_acc: 0.9760
Epoch 3/10
6000/6000 [==============================] - 1s - loss: 0.0636 - acc: 0.9778 - val_loss: 0.0582 - val_acc: 0.9797
Epoch 4/10
6000/6000 [==============================] - 1s - loss: 0.0406 - acc: 0.9863 - val_loss: 0.0206 - val_acc: 0.9925
Epoch 5/10
6000/6000 [==============================] - 1s - loss: 0.0307 - acc: 0.9898 - val_loss: 0.0462 - val_acc: 0.9847
Epoch 6/10
6000/6000 [==============================] - 1s - loss: 0.0243 - acc: 0.9928 - val_loss: 0.0581 - val_acc: 0.9825
Epoch 7/10
6000/6000 [==============================] - 1s - loss: 0.0202 - acc: 0.9915 - val_loss: 0.0582 - val_acc: 0.9837
Epoch 8/10
6000/6000 [==============================] - 1s - loss: 0.0119 - acc: 0.9952 - val_loss: 0.2725 - val_acc: 0.9412
Epoch 9/10
6000/6000 [==============================] - 1s - loss: 0.0098 - acc: 0.9972 - val_loss: 0.0384 - val_acc: 0.9905
Epoch 10/10
6000/6000 [==============================] - 1s - loss: 0.0118 - acc: 0.9960 - val_loss: 0.0572 - val_acc: 0.9852
Test score: 0.0572001973776
Test accuracy: 0.985166666667
CPU times: user 41.7 s, sys: 420 ms, total: 42.1 s
Wall time: 21.9 s

In [38]:
%%time

build_layer_3(30,X_train,Y_train)


Train on 2000 samples, validate on 6000 samples
Epoch 1/30
2000/2000 [==============================] - 0s - loss: 0.4505 - acc: 0.8350 - val_loss: 0.1202 - val_acc: 0.9683
Epoch 2/30
2000/2000 [==============================] - 0s - loss: 0.0997 - acc: 0.9690 - val_loss: 0.0584 - val_acc: 0.9807
Epoch 3/30
2000/2000 [==============================] - 0s - loss: 0.0750 - acc: 0.9770 - val_loss: 0.0790 - val_acc: 0.9715
Epoch 4/30
2000/2000 [==============================] - 0s - loss: 0.0546 - acc: 0.9815 - val_loss: 0.1913 - val_acc: 0.9347
Epoch 5/30
2000/2000 [==============================] - 0s - loss: 0.0573 - acc: 0.9810 - val_loss: 0.0345 - val_acc: 0.9875
Epoch 6/30
2000/2000 [==============================] - 1s - loss: 0.0332 - acc: 0.9880 - val_loss: 0.0325 - val_acc: 0.9880
Epoch 7/30
2000/2000 [==============================] - 1s - loss: 0.0220 - acc: 0.9925 - val_loss: 0.0365 - val_acc: 0.9875
Epoch 8/30
2000/2000 [==============================] - 1s - loss: 0.0098 - acc: 0.9965 - val_loss: 0.1293 - val_acc: 0.9645
Epoch 9/30
2000/2000 [==============================] - 1s - loss: 0.0092 - acc: 0.9985 - val_loss: 0.1057 - val_acc: 0.9733
Epoch 10/30
2000/2000 [==============================] - 1s - loss: 0.0058 - acc: 0.9975 - val_loss: 0.0569 - val_acc: 0.9847
Epoch 11/30
2000/2000 [==============================] - 1s - loss: 0.0123 - acc: 0.9950 - val_loss: 0.0467 - val_acc: 0.9853
Epoch 12/30
2000/2000 [==============================] - 1s - loss: 0.0057 - acc: 0.9985 - val_loss: 0.0896 - val_acc: 0.9782
Epoch 13/30
2000/2000 [==============================] - 1s - loss: 0.0020 - acc: 1.0000 - val_loss: 0.0692 - val_acc: 0.9840
Epoch 14/30
2000/2000 [==============================] - 1s - loss: 9.3738e-04 - acc: 1.0000 - val_loss: 0.0821 - val_acc: 0.9820
Epoch 15/30
2000/2000 [==============================] - 1s - loss: 7.6186e-04 - acc: 1.0000 - val_loss: 0.0693 - val_acc: 0.9838
Epoch 16/30
2000/2000 [==============================] - 1s - loss: 3.8493e-04 - acc: 1.0000 - val_loss: 0.0903 - val_acc: 0.9828
Epoch 17/30
2000/2000 [==============================] - 1s - loss: 5.3016e-04 - acc: 1.0000 - val_loss: 0.0818 - val_acc: 0.9835
Epoch 18/30
2000/2000 [==============================] - 1s - loss: 3.8072e-04 - acc: 1.0000 - val_loss: 0.1057 - val_acc: 0.9812
Epoch 19/30
2000/2000 [==============================] - 1s - loss: 8.1995e-04 - acc: 1.0000 - val_loss: 0.1371 - val_acc: 0.9755
Epoch 20/30
2000/2000 [==============================] - 1s - loss: 5.5471e-04 - acc: 1.0000 - val_loss: 0.0864 - val_acc: 0.9818
Epoch 21/30
2000/2000 [==============================] - 1s - loss: 2.4686e-04 - acc: 1.0000 - val_loss: 0.1016 - val_acc: 0.9818
Epoch 22/30
2000/2000 [==============================] - 1s - loss: 2.3794e-04 - acc: 1.0000 - val_loss: 0.0870 - val_acc: 0.9833
Epoch 23/30
2000/2000 [==============================] - 1s - loss: 1.4427e-04 - acc: 1.0000 - val_loss: 0.0954 - val_acc: 0.9823
Epoch 24/30
2000/2000 [==============================] - 1s - loss: 0.0026 - acc: 0.9995 - val_loss: 0.0973 - val_acc: 0.9817
Epoch 25/30
2000/2000 [==============================] - 1s - loss: 1.9721e-04 - acc: 1.0000 - val_loss: 0.0919 - val_acc: 0.9825
Epoch 26/30
2000/2000 [==============================] - 1s - loss: 1.7058e-04 - acc: 1.0000 - val_loss: 0.0990 - val_acc: 0.9822
Epoch 27/30
2000/2000 [==============================] - 1s - loss: 0.0013 - acc: 0.9990 - val_loss: 0.0868 - val_acc: 0.9818
Epoch 28/30
2000/2000 [==============================] - 1s - loss: 1.8148e-04 - acc: 1.0000 - val_loss: 0.0952 - val_acc: 0.9822
Epoch 29/30
2000/2000 [==============================] - 1s - loss: 1.8259e-04 - acc: 1.0000 - val_loss: 0.1064 - val_acc: 0.9817
Epoch 30/30
2000/2000 [==============================] - 1s - loss: 7.5019e-04 - acc: 1.0000 - val_loss: 0.0746 - val_acc: 0.9843
Test score: 0.0745642485984
Test accuracy: 0.984333333333
CPU times: user 1min 6s, sys: 840 ms, total: 1min 7s
Wall time: 34.5 s

In [39]:
%%time

build_layer_3(30,rotated_X_train,rotated_Y_train)


Train on 6000 samples, validate on 6000 samples
Epoch 1/30
6000/6000 [==============================] - 1s - loss: 0.2422 - acc: 0.9018 - val_loss: 0.1862 - val_acc: 0.9260
Epoch 2/30
6000/6000 [==============================] - 2s - loss: 0.0901 - acc: 0.9695 - val_loss: 0.0568 - val_acc: 0.9800
Epoch 3/30
6000/6000 [==============================] - 1s - loss: 0.0621 - acc: 0.9773 - val_loss: 0.0240 - val_acc: 0.9912
Epoch 4/30
6000/6000 [==============================] - 2s - loss: 0.0406 - acc: 0.9867 - val_loss: 0.0377 - val_acc: 0.9868
Epoch 5/30
6000/6000 [==============================] - 2s - loss: 0.0321 - acc: 0.9882 - val_loss: 0.0205 - val_acc: 0.9932
Epoch 6/30
6000/6000 [==============================] - 2s - loss: 0.0182 - acc: 0.9932 - val_loss: 0.0695 - val_acc: 0.9828
Epoch 7/30
6000/6000 [==============================] - 2s - loss: 0.0120 - acc: 0.9968 - val_loss: 0.0475 - val_acc: 0.9882
Epoch 8/30
6000/6000 [==============================] - 2s - loss: 0.0124 - acc: 0.9960 - val_loss: 0.0383 - val_acc: 0.9883
Epoch 9/30
6000/6000 [==============================] - 2s - loss: 0.0129 - acc: 0.9955 - val_loss: 0.0386 - val_acc: 0.9908
Epoch 10/30
6000/6000 [==============================] - 2s - loss: 0.0055 - acc: 0.9978 - val_loss: 0.0747 - val_acc: 0.9848
Epoch 11/30
6000/6000 [==============================] - 2s - loss: 0.0081 - acc: 0.9977 - val_loss: 0.0686 - val_acc: 0.9832
Epoch 12/30
6000/6000 [==============================] - 2s - loss: 0.0047 - acc: 0.9987 - val_loss: 0.0289 - val_acc: 0.9927
Epoch 13/30
6000/6000 [==============================] - 2s - loss: 0.0020 - acc: 0.9995 - val_loss: 0.0537 - val_acc: 0.9870
Epoch 14/30
6000/6000 [==============================] - 2s - loss: 0.0014 - acc: 0.9995 - val_loss: 0.0473 - val_acc: 0.9883
Epoch 15/30
6000/6000 [==============================] - 2s - loss: 0.0022 - acc: 0.9993 - val_loss: 0.0662 - val_acc: 0.9870
Epoch 16/30
6000/6000 [==============================] - 2s - loss: 0.0010 - acc: 0.9997 - val_loss: 0.0773 - val_acc: 0.9855
Epoch 17/30
6000/6000 [==============================] - 2s - loss: 5.8606e-04 - acc: 0.9998 - val_loss: 0.0867 - val_acc: 0.9865
Epoch 18/30
6000/6000 [==============================] - 2s - loss: 0.0036 - acc: 0.9987 - val_loss: 0.0573 - val_acc: 0.9882
Epoch 19/30
6000/6000 [==============================] - 2s - loss: 0.0021 - acc: 0.9992 - val_loss: 0.0713 - val_acc: 0.9865
Epoch 20/30
6000/6000 [==============================] - 2s - loss: 2.9991e-04 - acc: 1.0000 - val_loss: 0.0927 - val_acc: 0.9847
Epoch 21/30
6000/6000 [==============================] - 2s - loss: 1.5188e-04 - acc: 1.0000 - val_loss: 0.0781 - val_acc: 0.9872
Epoch 22/30
6000/6000 [==============================] - 2s - loss: 1.0698e-04 - acc: 1.0000 - val_loss: 0.0759 - val_acc: 0.9863
Epoch 23/30
6000/6000 [==============================] - 2s - loss: 1.8544e-04 - acc: 1.0000 - val_loss: 0.0583 - val_acc: 0.9900
Epoch 24/30
6000/6000 [==============================] - 2s - loss: 1.2133e-04 - acc: 1.0000 - val_loss: 0.0849 - val_acc: 0.9850
Epoch 25/30
6000/6000 [==============================] - 2s - loss: 2.4216e-04 - acc: 1.0000 - val_loss: 0.0820 - val_acc: 0.9870
Epoch 26/30
6000/6000 [==============================] - 2s - loss: 9.6081e-04 - acc: 0.9998 - val_loss: 0.0798 - val_acc: 0.9862
Epoch 27/30
6000/6000 [==============================] - 2s - loss: 8.2947e-05 - acc: 1.0000 - val_loss: 0.0702 - val_acc: 0.9875
Epoch 28/30
6000/6000 [==============================] - 2s - loss: 1.6723e-04 - acc: 1.0000 - val_loss: 0.0747 - val_acc: 0.9875
Epoch 29/30
6000/6000 [==============================] - 2s - loss: 1.9794e-04 - acc: 1.0000 - val_loss: 0.0828 - val_acc: 0.9875
Epoch 30/30
6000/6000 [==============================] - 2s - loss: 1.7124e-04 - acc: 1.0000 - val_loss: 0.0672 - val_acc: 0.9897
Test score: 0.0671975688212
Test accuracy: 0.989666666667
CPU times: user 2min 16s, sys: 1.69 s, total: 2min 18s
Wall time: 1min 10s

In [ ]: