In [1]:
import numpy as np
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils


Using Theano backend.

In [2]:
#Loading the training and testing data
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255

In [3]:
#Seed for reproducibilty
np.random.seed(1338)

#Selecting 6000 random examples from the test data
test_rows = np.random.randint(0,X_test.shape[0],6000)
X_test = X_test[test_rows]
Y = y_test[test_rows]
#Converting the output to binary classification(Six=1,Not Six=0)
Y_test = Y == 6
Y_test = Y_test.astype(int)

#Selecting the 5918 examples where the output is 6
X_six = X_train[y_train == 6]
Y_six = y_train[y_train == 6]
#Selecting the examples where the output is not 6
X_not_six = X_train[y_train != 6]
Y_not_six = y_train[y_train != 6]

#Selecting 6000 random examples from the data that contains only the data where the output is not 6
random_rows = np.random.randint(0,X_not_six.shape[0],6000)
X_not_six = X_not_six[random_rows]
Y_not_six = Y_not_six[random_rows]

In [4]:
#Appending the data with output as 6 and data with output as not six
X_train = np.append(X_six,X_not_six)
#Reshaping the appended data to appropraite form
X_train = X_train.reshape(X_six.shape[0] + X_not_six.shape[0], 784)
#Appending the labels and converting the labels to binary classification(Six=1,Not Six=0)
Y_labels = np.append(Y_six,Y_not_six)
Y_train = Y_labels == 6 
Y_train = Y_train.astype(int)

In [5]:
#Converting the classes to its binary categorical form
nb_classes = 2
Y_train = np_utils.to_categorical(Y_train, nb_classes)
Y_test = np_utils.to_categorical(Y_test, nb_classes)

In [6]:
#Initializing the values for the multi layer perceptron
batch_size = 128
nb_epoch = 20
#Activation Functions
activations = ['relu','tanh','sigmoid']

In [7]:
#Function for constructing the multi layer perceptron
def build_model(activation):
    
    np.random.seed(1338)
    model = Sequential()
    model.add(Dense(512, input_shape=(784,)))
    model.add(Activation(activation))
    model.add(Dropout(0.2))
    model.add(Dense(512))
    model.add(Activation(activation))
    model.add(Dropout(0.2))
    model.add(Dense(nb_classes))
    model.add(Activation('softmax'))
    
    model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['accuracy'])

    model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,verbose=1,
              validation_data=(X_test, Y_test))
          

    score = model.evaluate(X_test, Y_test, verbose=0)
    print('Test score:', score[0])
    print('Test accuracy:', score[1])

In [8]:
for i in activations:
    print('ACTIVATION',i,'\n')
    %timeit -n1 -r1 build_model(i)
    print('\n')


ACTIVATION relu 

Train on 11918 samples, validate on 6000 samples
Epoch 1/20
11918/11918 [==============================] - 2s - loss: 0.1884 - acc: 0.9362 - val_loss: 0.0419 - val_acc: 0.9837
Epoch 2/20
11918/11918 [==============================] - 2s - loss: 0.0514 - acc: 0.9827 - val_loss: 0.0459 - val_acc: 0.9815
Epoch 3/20
11918/11918 [==============================] - 2s - loss: 0.0283 - acc: 0.9909 - val_loss: 0.0346 - val_acc: 0.9875
Epoch 4/20
 8192/11918 [===================>..........] - ETA: 0s - loss: 0.0204 - acc: 0.9930
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-8-d8ea9d8e22dc> in <module>()
      1 for i in activations:
      2     print('ACTIVATION',i,'\n')
----> 3     get_ipython().magic('timeit -n1 -r1 build_model(i)')
      4     print('\n')

/home/prajwal/anaconda3/lib/python3.5/site-packages/IPython/core/interactiveshell.py in magic(self, arg_s)
   2161         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2162         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2163         return self.run_line_magic(magic_name, magic_arg_s)
   2164 
   2165     #-------------------------------------------------------------------------

/home/prajwal/anaconda3/lib/python3.5/site-packages/IPython/core/interactiveshell.py in run_line_magic(self, magic_name, line)
   2082                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2083             with self.builtin_trap:
-> 2084                 result = fn(*args,**kwargs)
   2085             return result
   2086 

<decorator-gen-59> in timeit(self, line, cell)

/home/prajwal/anaconda3/lib/python3.5/site-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/home/prajwal/anaconda3/lib/python3.5/site-packages/IPython/core/magics/execution.py in timeit(self, line, cell)
   1044                     break
   1045                 number *= 10
-> 1046         all_runs = timer.repeat(repeat, number)
   1047         best = min(all_runs) / number
   1048 

/home/prajwal/anaconda3/lib/python3.5/timeit.py in repeat(self, repeat, number)
    204         r = []
    205         for i in range(repeat):
--> 206             t = self.timeit(number)
    207             r.append(t)
    208         return r

/home/prajwal/anaconda3/lib/python3.5/site-packages/IPython/core/magics/execution.py in timeit(self, number)
    135         gc.disable()
    136         try:
--> 137             timing = self.inner(it, self.timer)
    138         finally:
    139             if gcold:

<magic-timeit> in inner(_it, _timer)

<ipython-input-7-532e00c72577> in build_model(activation)
     18 
     19     model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,verbose=1,
---> 20               validation_data=(X_test, Y_test))
     21 
     22 

/home/prajwal/anaconda3/lib/python3.5/site-packages/keras/models.py in fit(self, x, y, batch_size, nb_epoch, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, **kwargs)
    403                               shuffle=shuffle,
    404                               class_weight=class_weight,
--> 405                               sample_weight=sample_weight)
    406 
    407     def evaluate(self, x, y, batch_size=32, verbose=1,

/home/prajwal/anaconda3/lib/python3.5/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, nb_epoch, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight)
   1044                               verbose=verbose, callbacks=callbacks,
   1045                               val_f=val_f, val_ins=val_ins, shuffle=shuffle,
-> 1046                               callback_metrics=callback_metrics)
   1047 
   1048     def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None):

/home/prajwal/anaconda3/lib/python3.5/site-packages/keras/engine/training.py in _fit_loop(self, f, ins, out_labels, batch_size, nb_epoch, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics)
    782                 batch_logs['size'] = len(batch_ids)
    783                 callbacks.on_batch_begin(batch_index, batch_logs)
--> 784                 outs = f(ins_batch)
    785                 if type(outs) != list:
    786                     outs = [outs]

/home/prajwal/anaconda3/lib/python3.5/site-packages/keras/backend/theano_backend.py in __call__(self, inputs)
    505     def __call__(self, inputs):
    506         assert type(inputs) in {list, tuple}
--> 507         return self.function(*inputs)
    508 
    509 

/home/prajwal/anaconda3/lib/python3.5/site-packages/theano/compile/function_module.py in __call__(self, *args, **kwargs)
    860         try:
    861             outputs =\
--> 862                 self.fn() if output_subset is None else\
    863                 self.fn(output_subset=output_subset)
    864         except Exception:

KeyboardInterrupt: 

In [ ]:
#%timeit -n1 -r1 build_model()

In [ ]: