In [1]:
import keras
import tensorflow as tf

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.optimizers import Adadelta

from sklearn.preprocessing import StandardScaler, MinMaxScaler, Normalizer


Using TensorFlow backend.

In [2]:
batch_size = 128
num_classes = 10
epochs = 12

In [3]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()

img_rows, img_cols = 28, 28

x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)

input_shape = (img_rows, img_cols, 1)

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')

In [5]:
# same thing as applying minmaxscaler in the (0,1) range
x_train = x_train / 255
x_test = x_test / 255

In [6]:
# one hot encoding
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

In [17]:
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
                 activation='relu',
                 input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))

In [18]:
model.compile(loss="categorical_crossentropy",
              optimizer=Adadelta(),
              metrics=['accuracy'])

In [19]:
model.fit(x_train, y_train,
                    batch_size=batch_size,
                    epochs=epochs,
                    verbose=1,
                    validation_data=(x_test, y_test))


Train on 60000 samples, validate on 10000 samples
Epoch 1/12
60000/60000 [==============================] - 16s - loss: 0.3142 - acc: 0.9048 - val_loss: 0.0744 - val_acc: 0.9758
Epoch 2/12
60000/60000 [==============================] - 16s - loss: 0.1084 - acc: 0.9682 - val_loss: 0.0494 - val_acc: 0.9841
Epoch 3/12
60000/60000 [==============================] - 16s - loss: 0.0852 - acc: 0.9751 - val_loss: 0.0435 - val_acc: 0.9856
Epoch 4/12
60000/60000 [==============================] - 17s - loss: 0.0682 - acc: 0.9798 - val_loss: 0.0388 - val_acc: 0.9867
Epoch 5/12
60000/60000 [==============================] - 16s - loss: 0.0606 - acc: 0.9824 - val_loss: 0.0347 - val_acc: 0.9884
Epoch 6/12
60000/60000 [==============================] - 16s - loss: 0.0540 - acc: 0.9840 - val_loss: 0.0327 - val_acc: 0.9893
Epoch 7/12
60000/60000 [==============================] - 16s - loss: 0.0504 - acc: 0.9849 - val_loss: 0.0325 - val_acc: 0.9884
Epoch 8/12
60000/60000 [==============================] - 16s - loss: 0.0462 - acc: 0.9863 - val_loss: 0.0325 - val_acc: 0.9890
Epoch 9/12
60000/60000 [==============================] - 17s - loss: 0.0429 - acc: 0.9870 - val_loss: 0.0291 - val_acc: 0.9903 3s - - ETA: 1s - l
Epoch 10/12
60000/60000 [==============================] - 17s - loss: 0.0398 - acc: 0.9879 - val_loss: 0.0287 - val_acc: 0.9898
Epoch 11/12
60000/60000 [==============================] - 16s - loss: 0.0390 - acc: 0.9887 - val_loss: 0.0309 - val_acc: 0.9903
Epoch 12/12
60000/60000 [==============================] - 17s - loss: 0.0360 - acc: 0.9891 - val_loss: 0.0314 - val_acc: 0.9906
Out[19]:
<keras.callbacks.History at 0x7f5a2c16afd0>

In [ ]: