In [1]:
# Only use this line to show in jupyter
%matplotlib inline

# from __future__ import print_function

import numpy as np
np.random.seed(1337)  # for reproducibility
import matplotlib.pyplot as plt


---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-1-110cb0eb3f6d> in <module>()
      1 # Only use this line to show in jupyter
----> 2 get_ipython().magic(u'matplotlib inline')
      3 
      4 # from __future__ import print_function
      5 

/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.pyc in magic(self, arg_s)
   2161         magic_name, _, magic_arg_s = arg_s.partition(' ')
   2162         magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2163         return self.run_line_magic(magic_name, magic_arg_s)
   2164 
   2165     #-------------------------------------------------------------------------

/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.pyc in run_line_magic(self, magic_name, line)
   2082                 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
   2083             with self.builtin_trap:
-> 2084                 result = fn(*args,**kwargs)
   2085             return result
   2086 

/usr/local/lib/python2.7/dist-packages/IPython/core/magics/pylab.pyc in matplotlib(self, line)

/usr/local/lib/python2.7/dist-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k)
    191     # but it's overkill for just that one bit of state.
    192     def magic_deco(arg):
--> 193         call = lambda f, *a, **k: f(*a, **k)
    194 
    195         if callable(arg):

/usr/local/lib/python2.7/dist-packages/IPython/core/magics/pylab.pyc in matplotlib(self, line)
     98             print("Available matplotlib backends: %s" % backends_list)
     99         else:
--> 100             gui, backend = self.shell.enable_matplotlib(args.gui)
    101             self._show_matplotlib_backend(args.gui, backend)
    102 

/usr/local/lib/python2.7/dist-packages/IPython/core/interactiveshell.pyc in enable_matplotlib(self, gui)
   2937         """
   2938         from IPython.core import pylabtools as pt
-> 2939         gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
   2940 
   2941         if gui != 'inline':

/usr/local/lib/python2.7/dist-packages/IPython/core/pylabtools.pyc in find_gui_and_backend(gui, gui_select)
    258     """
    259 
--> 260     import matplotlib
    261 
    262     if gui and gui != 'auto':

ImportError: No module named matplotlib

In [2]:
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.utils import np_utils


Using Theano backend.
Batch size = the number of training examples in one forward/backward pass. The higher the batch size, the more memory space you'll need. If you have 1000 training examples, and your batch size is 500, then it will take 2 iterations to complete 1 epoch. The batch size will affect how 'noisy' the gradient is, and will change the path the minimization follows. If you decrease the batch size you should probably decrease the learning rate, and train for more iterations Here is a very interesting paper : http://axon.cs.byu.edu/papers/Wilson.nn03.batch.pdf Classes are from 0 to 9 One epoch = one forward pass and one backward pass of all the training examples

In [3]:
batch_size = 128
nb_classes = 10
nb_epoch = 12
input image dimensions nb_filters is the number of convolutional filters to use nb_pool is the size of pooling area for max pooling convolution kernel size

In [4]:
img_rows, img_cols = 28, 28  
nb_filters = 32
nb_pool = 2
nb_conv = 3

In [5]:
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()

In [6]:
# Just take a look at the shape of the pictures
print(len(X_train[0]))
print(len(X_train[0][0]))
print(X_train[0])


28
28
[[  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   3  18  18  18 126 136
  175  26 166 255 247 127   0   0   0   0]
 [  0   0   0   0   0   0   0   0  30  36  94 154 170 253 253 253 253 253
  225 172 253 242 195  64   0   0   0   0]
 [  0   0   0   0   0   0   0  49 238 253 253 253 253 253 253 253 253 251
   93  82  82  56  39   0   0   0   0   0]
 [  0   0   0   0   0   0   0  18 219 253 253 253 253 253 198 182 247 241
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0  80 156 107 253 253 205  11   0  43 154
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0  14   1 154 253  90   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0 139 253 190   2   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0  11 190 253  70   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0  35 241 225 160 108   1
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0  81 240 253 253 119
   25   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0  45 186 253 253
  150  27   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0  16  93 252
  253 187   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0 249
  253 249  64   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0  46 130 183 253
  253 207   2   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0  39 148 229 253 253 253
  250 182   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0  24 114 221 253 253 253 253 201
   78   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0  23  66 213 253 253 253 253 198  81   2
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0  18 171 219 253 253 253 253 195  80   9   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0  55 172 226 253 253 253 253 244 133  11   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0 136 253 253 253 212 135 132  16   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]
 [  0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0   0
    0   0   0   0   0   0   0   0   0   0]]

In [7]:
plt.imshow(X_train[0], cmap='gray')


Out[7]:
<matplotlib.image.AxesImage at 0x10a200438>

In [8]:
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')


X_train shape: (60000, 1, 28, 28)
60000 train samples
10000 test samples

In [9]:
print(len(X_train[0]))
print(len(X_train[0][0]))
print(X_train[0])


1
28
[[[ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.01176471
    0.07058824  0.07058824  0.07058824  0.49411765  0.53333336  0.68627453
    0.10196079  0.65098041  1.          0.96862745  0.49803922  0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.11764706  0.14117648  0.36862746  0.60392159  0.66666669
    0.99215686  0.99215686  0.99215686  0.99215686  0.99215686  0.88235295
    0.67450982  0.99215686  0.94901961  0.7647059   0.25098041  0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.19215687  0.93333334  0.99215686  0.99215686  0.99215686  0.99215686
    0.99215686  0.99215686  0.99215686  0.99215686  0.98431373  0.36470589
    0.32156864  0.32156864  0.21960784  0.15294118  0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.07058824  0.85882354  0.99215686  0.99215686  0.99215686  0.99215686
    0.99215686  0.7764706   0.71372551  0.96862745  0.94509804  0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.3137255   0.61176473  0.41960785  0.99215686  0.99215686
    0.80392158  0.04313726  0.          0.16862746  0.60392159  0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.05490196  0.00392157  0.60392159  0.99215686
    0.35294119  0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.54509807  0.99215686
    0.74509805  0.00784314  0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.04313726  0.74509805
    0.99215686  0.27450982  0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.13725491
    0.94509804  0.88235295  0.627451    0.42352942  0.00392157  0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.
    0.31764707  0.94117647  0.99215686  0.99215686  0.46666667  0.09803922
    0.          0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.17647059  0.72941178  0.99215686  0.99215686  0.58823532  0.10588235
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.0627451   0.36470589  0.98823529  0.99215686  0.73333335
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.97647059  0.99215686  0.97647059
    0.25098041  0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.18039216  0.50980395  0.71764708  0.99215686  0.99215686  0.81176472
    0.00784314  0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.15294118
    0.58039218  0.89803922  0.99215686  0.99215686  0.99215686  0.98039216
    0.71372551  0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.09411765  0.44705883  0.86666667
    0.99215686  0.99215686  0.99215686  0.99215686  0.78823531  0.30588236
    0.          0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.09019608  0.25882354  0.83529413  0.99215686  0.99215686
    0.99215686  0.99215686  0.7764706   0.31764707  0.00784314  0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.        ]
  [ 0.          0.          0.          0.          0.          0.
    0.07058824  0.67058825  0.85882354  0.99215686  0.99215686  0.99215686
    0.99215686  0.7647059   0.3137255   0.03529412  0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.21568628  0.67450982
    0.88627452  0.99215686  0.99215686  0.99215686  0.99215686  0.95686275
    0.52156866  0.04313726  0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.53333336  0.99215686
    0.99215686  0.99215686  0.83137256  0.52941179  0.51764709  0.0627451
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]
  [ 0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.
    0.          0.          0.          0.          0.          0.          0.        ]]]

In [10]:
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)

In [11]:
print(y_train[0])
print(Y_train[0])


5
[ 0.  0.  0.  0.  0.  1.  0.  0.  0.  0.]
Sequential = http://keras.io/models/sequential/#the-sequential-model-api Convolution2D = http://keras.io/layers/convolutional/#convolution2d Activation = http://keras.io/layers/core/#activation | http://keras.io/activations/ MaxPooling2D = http://keras.io/layers/convolutional/#maxpooling2d Dropout = http://keras.io/layers/core/#dropout Flatten = http://keras.io/layers/core/#flatten Dense = http://keras.io/layers/core/#dense Compile = http://keras.io/models/sequential/#sequential-model-methods

In [12]:
model = Sequential()
model.add(Convolution2D(nb_filters, nb_conv, nb_conv,
                        border_mode='valid',
                        input_shape=(1, img_rows, img_cols)))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Dropout(0.25))

model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy',
              optimizer='adadelta',
              metrics=['accuracy'])

In [13]:
# Lets explore the model
# http://keras.io/models/about-keras-models/
print(model.summary()) #  summary representation of your model


____________________________________________________________________________________________________
Layer (type)                       Output Shape        Param #     Connected to                     
====================================================================================================
convolution2d_1 (Convolution2D)    (None, 32, 26, 26)  320         convolution2d_input_1[0][0]      
____________________________________________________________________________________________________
activation_1 (Activation)          (None, 32, 26, 26)  0           convolution2d_1[0][0]            
____________________________________________________________________________________________________
convolution2d_2 (Convolution2D)    (None, 32, 24, 24)  9248        activation_1[0][0]               
____________________________________________________________________________________________________
activation_2 (Activation)          (None, 32, 24, 24)  0           convolution2d_2[0][0]            
____________________________________________________________________________________________________
maxpooling2d_1 (MaxPooling2D)      (None, 32, 12, 12)  0           activation_2[0][0]               
____________________________________________________________________________________________________
dropout_1 (Dropout)                (None, 32, 12, 12)  0           maxpooling2d_1[0][0]             
____________________________________________________________________________________________________
flatten_1 (Flatten)                (None, 4608)        0           dropout_1[0][0]                  
____________________________________________________________________________________________________
dense_1 (Dense)                    (None, 128)         589952      flatten_1[0][0]                  
____________________________________________________________________________________________________
activation_3 (Activation)          (None, 128)         0           dense_1[0][0]                    
____________________________________________________________________________________________________
dropout_2 (Dropout)                (None, 128)         0           activation_3[0][0]               
____________________________________________________________________________________________________
dense_2 (Dense)                    (None, 10)          1290        dropout_2[0][0]                  
____________________________________________________________________________________________________
activation_4 (Activation)          (None, 10)          0           dense_2[0][0]                    
====================================================================================================
Total params: 600810
____________________________________________________________________________________________________
None

In [14]:
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,
          verbose=1, validation_data=(X_test, Y_test))


Train on 60000 samples, validate on 10000 samples
Epoch 1/12
60000/60000 [==============================] - 114s - loss: 0.2407 - acc: 0.9255 - val_loss: 0.0586 - val_acc: 0.9826
Epoch 2/12
60000/60000 [==============================] - 117s - loss: 0.0903 - acc: 0.9731 - val_loss: 0.0413 - val_acc: 0.9864
Epoch 3/12
60000/60000 [==============================] - 122s - loss: 0.0666 - acc: 0.9802 - val_loss: 0.0335 - val_acc: 0.9898
Epoch 4/12
60000/60000 [==============================] - 117s - loss: 0.0573 - acc: 0.9829 - val_loss: 0.0324 - val_acc: 0.9894
Epoch 5/12
60000/60000 [==============================] - 118s - loss: 0.0503 - acc: 0.9847 - val_loss: 0.0280 - val_acc: 0.9915
Epoch 6/12
60000/60000 [==============================] - 117s - loss: 0.0444 - acc: 0.9864 - val_loss: 0.0289 - val_acc: 0.9912
Epoch 7/12
60000/60000 [==============================] - 119s - loss: 0.0388 - acc: 0.9881 - val_loss: 0.0289 - val_acc: 0.9914
Epoch 8/12
60000/60000 [==============================] - 118s - loss: 0.0369 - acc: 0.9886 - val_loss: 0.0270 - val_acc: 0.9916
Epoch 9/12
60000/60000 [==============================] - 115s - loss: 0.0326 - acc: 0.9899 - val_loss: 0.0275 - val_acc: 0.9920
Epoch 10/12
60000/60000 [==============================] - 114s - loss: 0.0312 - acc: 0.9904 - val_loss: 0.0303 - val_acc: 0.9902
Epoch 11/12
60000/60000 [==============================] - 117s - loss: 0.0289 - acc: 0.9909 - val_loss: 0.0254 - val_acc: 0.9929
Epoch 12/12
60000/60000 [==============================] - 116s - loss: 0.0251 - acc: 0.9920 - val_loss: 0.0283 - val_acc: 0.9920
Out[14]:
<keras.callbacks.History at 0x10cfc50b8>

In [15]:
score = model.evaluate(X_test, Y_test, verbose=0)

In [16]:
print('Test score:', score[0])
print('Test accuracy:', score[1])


Test score: 0.0283172680599
Test accuracy: 0.992

In [21]:
res = model.predict_classes(X_test[100:109])
print(res)


9/9 [==============================] - 0s
[6 0 5 4 9 9 2 1 9]
http://matplotlib.org/faq/installing_faq.html if issue to run matplotlib in a DV : http://stackoverflow.com/questions/29433824/unable-to-import-matplotlib-pyplot-as-plt-in-virtualenv -- $ cd ~/.matplotlib | $ nano matplotlibr | And then, write backend: TkAgg in there.
#%matplotlib inline # Only use this line to show in jupyter #import matplotlib.pyplot as plt #plt.figure(figsize=(10, 10))

In [22]:
for i in range(9):
    plt.subplot(3, 3, i+1)
    plt.imshow(X_test[i+100, 0], cmap='gray')
    plt.gca().get_xaxis().set_ticks([])
    plt.gca().get_yaxis().set_ticks([])
    plt.ylabel("prediction = %d" % res[i], fontsize=8)
    #plt.show()
    #print("Prediction = {}".format(res[i]))