In [1]:
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from joblib import Parallel, delayed


Using Theano backend.

In [2]:
#Loading the training and testing data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255

In [3]:
#Seed for reproducibilty
np.random.seed(1338)

#Selecting 6000 random examples from the test data
test_rows = np.random.randint(0,X_test.shape[0],6000)
X_test = X_test[test_rows]
Y = y_test[test_rows]
#Converting the output to binary classification(Six=1,Not Six=0)
Y_test = Y == 6
Y_test = Y_test.astype(int)

#Selecting the 5918 examples where the output is 6
X_six = X_train[y_train == 6]
Y_six = y_train[y_train == 6]
#Selecting the examples where the output is not 6
X_not_six = X_train[y_train != 6]
Y_not_six = y_train[y_train != 6]

#Selecting 6000 random examples from the data that contains only the data where the output is not 6
random_rows = np.random.randint(0,X_six.shape[0],6000)
X_not_six = X_not_six[random_rows]
Y_not_six = Y_not_six[random_rows]

In [4]:
#Appending the data with output as 6 and data with output as not six
X_train = np.append(X_six,X_not_six)
#Reshaping the appended data to appropraite form
X_train = X_train.reshape(X_six.shape[0] + X_not_six.shape[0], 784)
#Appending the labels and converting the labels to binary classification(Six=1,Not Six=0)
Y_labels = np.append(Y_six,Y_not_six)
Y_train = Y_labels == 6 
Y_train = Y_train.astype(int)

In [5]:
#Converting the classes to its binary categorical form
nb_classes = 2
Y_train = np_utils.to_categorical(Y_train, nb_classes)
Y_test = np_utils.to_categorical(Y_test, nb_classes)

In [6]:
#Initializing the values for the convolution neural network
batch_size = 128
nb_epoch = 20

In [7]:
#Buliding the first layer (Dense Layer) of the network
def build_layer_1():
    
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    return model

In [8]:
#Adding dropout to the model(network)
def add_droput(model, x):
    
    model.add(Dropout(x))
    return model

In [9]:
#Adding a dense layer to the model(network)
def add_dense(model,nodes):
    
    model.add(Dense(nodes))
    return model

In [10]:
#Adding a activation layer to the model(network)
def add_activation(model, activation):
    
    model.add(Activation(activation))
    return model

In [11]:
#The final step in building the model, compiling and fitting the model to the data
def build_layer_final(model):
    
    model.add(Dense(nb_classes))
    model.add(Activation('softmax'))
    
    compile_fit_score_model(model)

In [12]:
#Compiling, fitting and scoring the model
def compile_fit_score_model(model_final):
    
    model_final.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])
    
    model_final.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,verbose=1,
              validation_data=(X_test, Y_test))
    
    score = model_final.evaluate(X_test, Y_test, verbose=0)
    print('Test score:', score[0])
    print('Test accuracy:', score[1])

In [13]:
#1.Initial Dense layer
def build_layers():
    np.random.seed(1338)
    
    model = build_layer_1()
    model = add_activation(model, 'relu')
    build_layer_final(model)

In [14]:
#1.Initial dense layer, 2.Dropout(0.2), 3.Dense Layer, 4.Dropout(0.2)
def build_layers_multiple():
    np.random.seed(1338)
    
    model = build_layer_1()
    model = add_activation(model, 'relu')
    model = add_droput(model,0.2)

    model = add_dense(model,512)
    model = add_activation(model, 'relu')
    model = add_droput(model,0.2)
    
    build_layer_final(model)

In [15]:
%timeit -n1 -r1 build_layers()


Train on 11918 samples, validate on 6000 samples
Epoch 1/20
11918/11918 [==============================] - 1s - loss: 0.1224 - acc: 0.9550 - val_loss: 0.0492 - val_acc: 0.9817
Epoch 2/20
11918/11918 [==============================] - 1s - loss: 0.0461 - acc: 0.9856 - val_loss: 0.0368 - val_acc: 0.9848
Epoch 3/20
11918/11918 [==============================] - 0s - loss: 0.0256 - acc: 0.9931 - val_loss: 0.0308 - val_acc: 0.9888
Epoch 4/20
11918/11918 [==============================] - 0s - loss: 0.0163 - acc: 0.9954 - val_loss: 0.0261 - val_acc: 0.9913
Epoch 5/20
11918/11918 [==============================] - 1s - loss: 0.0116 - acc: 0.9970 - val_loss: 0.0531 - val_acc: 0.9820
Epoch 6/20
11918/11918 [==============================] - 1s - loss: 0.0070 - acc: 0.9986 - val_loss: 0.0230 - val_acc: 0.9922
Epoch 7/20
11918/11918 [==============================] - 1s - loss: 0.0044 - acc: 0.9996 - val_loss: 0.0306 - val_acc: 0.9890
Epoch 8/20
11918/11918 [==============================] - 1s - loss: 0.0034 - acc: 0.9994 - val_loss: 0.0301 - val_acc: 0.9887
Epoch 9/20
11918/11918 [==============================] - 1s - loss: 0.0024 - acc: 0.9999 - val_loss: 0.0320 - val_acc: 0.9878
Epoch 10/20
11918/11918 [==============================] - 1s - loss: 0.0017 - acc: 1.0000 - val_loss: 0.0409 - val_acc: 0.9870
Epoch 11/20
11918/11918 [==============================] - 1s - loss: 0.0013 - acc: 1.0000 - val_loss: 0.0376 - val_acc: 0.9875
Epoch 12/20
11918/11918 [==============================] - 1s - loss: 0.0011 - acc: 1.0000 - val_loss: 0.0417 - val_acc: 0.9873
Epoch 13/20
11918/11918 [==============================] - 1s - loss: 8.7534e-04 - acc: 1.0000 - val_loss: 0.0437 - val_acc: 0.9872
Epoch 14/20
11918/11918 [==============================] - 1s - loss: 7.7118e-04 - acc: 1.0000 - val_loss: 0.0360 - val_acc: 0.9882
Epoch 15/20
11918/11918 [==============================] - 1s - loss: 6.7760e-04 - acc: 1.0000 - val_loss: 0.0272 - val_acc: 0.9918
Epoch 16/20
11918/11918 [==============================] - 1s - loss: 6.4711e-04 - acc: 1.0000 - val_loss: 0.0352 - val_acc: 0.9898
Epoch 17/20
11918/11918 [==============================] - 1s - loss: 5.8935e-04 - acc: 1.0000 - val_loss: 0.0456 - val_acc: 0.9875
Epoch 18/20
11918/11918 [==============================] - 1s - loss: 4.8157e-04 - acc: 1.0000 - val_loss: 0.0444 - val_acc: 0.9870
Epoch 19/20
11918/11918 [==============================] - 1s - loss: 4.4039e-04 - acc: 1.0000 - val_loss: 0.0469 - val_acc: 0.9868
Epoch 20/20
11918/11918 [==============================] - 1s - loss: 4.0436e-04 - acc: 1.0000 - val_loss: 0.0478 - val_acc: 0.9867
Test score: 0.0477670164979
Test accuracy: 0.986666666667
1 loop, best of 1: 23.8 s per loop

In [16]:
%timeit -n1 -r1 build_layers_multiple()


Train on 11918 samples, validate on 6000 samples
Epoch 1/20
11918/11918 [==============================] - 2s - loss: 0.1219 - acc: 0.9537 - val_loss: 0.0598 - val_acc: 0.9760
Epoch 2/20
11918/11918 [==============================] - 2s - loss: 0.0389 - acc: 0.9867 - val_loss: 0.0251 - val_acc: 0.9928
Epoch 3/20
11918/11918 [==============================] - 2s - loss: 0.0209 - acc: 0.9930 - val_loss: 0.0248 - val_acc: 0.9915
Epoch 4/20
11918/11918 [==============================] - 2s - loss: 0.0135 - acc: 0.9961 - val_loss: 0.0266 - val_acc: 0.9912
Epoch 5/20
11918/11918 [==============================] - 2s - loss: 0.0091 - acc: 0.9971 - val_loss: 0.0324 - val_acc: 0.9898
Epoch 6/20
11918/11918 [==============================] - 2s - loss: 0.0059 - acc: 0.9982 - val_loss: 0.0500 - val_acc: 0.9853
Epoch 7/20
11918/11918 [==============================] - 2s - loss: 0.0035 - acc: 0.9988 - val_loss: 0.0474 - val_acc: 0.9878
Epoch 8/20
11918/11918 [==============================] - 2s - loss: 0.0018 - acc: 0.9995 - val_loss: 0.0482 - val_acc: 0.9892
Epoch 9/20
11918/11918 [==============================] - 2s - loss: 0.0018 - acc: 0.9997 - val_loss: 0.0443 - val_acc: 0.9895
Epoch 10/20
11918/11918 [==============================] - 2s - loss: 0.0011 - acc: 0.9997 - val_loss: 0.0406 - val_acc: 0.9903
Epoch 11/20
11918/11918 [==============================] - 2s - loss: 4.7290e-04 - acc: 0.9999 - val_loss: 0.0491 - val_acc: 0.9885
Epoch 12/20
11918/11918 [==============================] - 2s - loss: 2.8724e-04 - acc: 1.0000 - val_loss: 0.0413 - val_acc: 0.9902
Epoch 13/20
11918/11918 [==============================] - 2s - loss: 3.5255e-04 - acc: 0.9999 - val_loss: 0.0499 - val_acc: 0.9880
Epoch 14/20
11918/11918 [==============================] - 2s - loss: 2.0966e-04 - acc: 1.0000 - val_loss: 0.0475 - val_acc: 0.9890
Epoch 15/20
11918/11918 [==============================] - 3s - loss: 1.6114e-04 - acc: 1.0000 - val_loss: 0.0533 - val_acc: 0.9883
Epoch 16/20
11918/11918 [==============================] - 2s - loss: 1.0213e-04 - acc: 1.0000 - val_loss: 0.0547 - val_acc: 0.9885
Epoch 17/20
11918/11918 [==============================] - 2s - loss: 1.0450e-04 - acc: 1.0000 - val_loss: 0.0565 - val_acc: 0.9888
Epoch 18/20
11918/11918 [==============================] - 2s - loss: 1.1771e-04 - acc: 1.0000 - val_loss: 0.0523 - val_acc: 0.9890
Epoch 19/20
11918/11918 [==============================] - 2s - loss: 1.4454e-04 - acc: 1.0000 - val_loss: 0.0558 - val_acc: 0.9888
Epoch 20/20
11918/11918 [==============================] - 2s - loss: 9.7533e-05 - acc: 1.0000 - val_loss: 0.0549 - val_acc: 0.9890
Test score: 0.0549472076656
Test accuracy: 0.989
1 loop, best of 1: 51.5 s per loop