In [2]:
# code comes from here > https://raw.githubusercontent.com/fchollet/keras/master/examples/mnist_mlp.py
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD, Adam, RMSprop

from mnist import load_data

from keras.utils import np_utils


Using Theano backend.
Using gpu device 0: GeForce GTX 960 (CNMeM is disabled)

In [4]:
## Load dataset ##
datasets = load_data('mnist.pkl.gz')

train_set_x, train_set_y = datasets[0]
valid_set_x, valid_set_y = datasets[1]
test_set_x, test_set_y = datasets[2]


... loading data
... loading data

In [5]:
batch_size = 128
nb_classes = 10
nb_epoch = 20

In [6]:
Y_train = np_utils.to_categorical(train_set_y, nb_classes)
Y_test = np_utils.to_categorical(test_set_y, nb_classes)

In [7]:
model = Sequential()
model.add(Dense(512, input_shape=(784,)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))

In [9]:
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)

In [11]:
model.fit(train_set_x, Y_train,
          batch_size=batch_size, nb_epoch=nb_epoch,
          show_accuracy=True, verbose=2,
          validation_data=(test_set_x, Y_test))


Train on 50000 samples, validate on 10000 samples
Epoch 1/20
2s - loss: 0.3091 - acc: 0.9051 - val_loss: 0.1268 - val_acc: 0.9603
Epoch 2/20
2s - loss: 0.1247 - acc: 0.9613 - val_loss: 0.0871 - val_acc: 0.9723
Epoch 3/20
2s - loss: 0.0871 - acc: 0.9734 - val_loss: 0.0755 - val_acc: 0.9758
Epoch 4/20
2s - loss: 0.0676 - acc: 0.9785 - val_loss: 0.0684 - val_acc: 0.9780
Epoch 5/20
2s - loss: 0.0519 - acc: 0.9835 - val_loss: 0.0793 - val_acc: 0.9762
Epoch 6/20
2s - loss: 0.0436 - acc: 0.9863 - val_loss: 0.0732 - val_acc: 0.9783
Epoch 7/20
2s - loss: 0.0365 - acc: 0.9885 - val_loss: 0.0672 - val_acc: 0.9812
Epoch 8/20
2s - loss: 0.0303 - acc: 0.9904 - val_loss: 0.0571 - val_acc: 0.9833
Epoch 9/20
2s - loss: 0.0259 - acc: 0.9915 - val_loss: 0.0655 - val_acc: 0.9816
Epoch 10/20
2s - loss: 0.0223 - acc: 0.9925 - val_loss: 0.0645 - val_acc: 0.9835
Epoch 11/20
2s - loss: 0.0196 - acc: 0.9933 - val_loss: 0.0640 - val_acc: 0.9838
Epoch 12/20
2s - loss: 0.0178 - acc: 0.9939 - val_loss: 0.0757 - val_acc: 0.9806
Epoch 13/20
2s - loss: 0.0153 - acc: 0.9949 - val_loss: 0.0728 - val_acc: 0.9824
Epoch 14/20
2s - loss: 0.0140 - acc: 0.9951 - val_loss: 0.0632 - val_acc: 0.9850
Epoch 15/20
2s - loss: 0.0126 - acc: 0.9957 - val_loss: 0.0657 - val_acc: 0.9842
Epoch 16/20
2s - loss: 0.0111 - acc: 0.9962 - val_loss: 0.0717 - val_acc: 0.9830
Epoch 17/20
2s - loss: 0.0100 - acc: 0.9967 - val_loss: 0.0685 - val_acc: 0.9827
Epoch 18/20
2s - loss: 0.0107 - acc: 0.9964 - val_loss: 0.0667 - val_acc: 0.9841
Epoch 19/20
3s - loss: 0.0083 - acc: 0.9973 - val_loss: 0.0720 - val_acc: 0.9836
Epoch 20/20
2s - loss: 0.0086 - acc: 0.9970 - val_loss: 0.0695 - val_acc: 0.9851
Out[11]:
<keras.callbacks.History at 0x7f38ef946c90>

In [12]:
# testing
score = model.evaluate(test_set_x, Y_test,
                       show_accuracy=True, verbose=0)
print score


[0.069471946638752974, 0.98509999999999998]