In [1]:
import keras
import tensorflow as tf

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop

from sklearn.preprocessing import StandardScaler, MinMaxScaler, Normalizer


Using TensorFlow backend.

In [2]:
batch_size = 128
num_classes = 10
epochs = 20

In [3]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()

x_train = x_train.reshape(60000, 784).astype('float32')
x_test = x_test.reshape(10000, 784).astype('float32')

In [4]:
sc = MinMaxScaler(feature_range=(0,1))

x_train = sc.fit_transform(x_train)
x_test = sc.fit_transform(x_test)

In [5]:
# one hot encoding
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

In [6]:
model = Sequential()
model.add(Dense(800, activation='relu', input_shape=(784,)))
model.add(Dense(10, activation='softmax'))

In [7]:
model.compile(loss='categorical_crossentropy',
              optimizer=RMSprop(),
              metrics=['accuracy'])

In [8]:
history = model.fit(x_train, y_train,
                    batch_size=batch_size,
                    epochs=epochs,
                    verbose=1,
                    validation_data=(x_test, y_test))


Train on 60000 samples, validate on 10000 samples
Epoch 1/20
60000/60000 [==============================] - 2s - loss: 0.2379 - acc: 0.9308 - val_loss: 0.1137 - val_acc: 0.9660
Epoch 2/20
60000/60000 [==============================] - 1s - loss: 0.0921 - acc: 0.9721 - val_loss: 0.0882 - val_acc: 0.9729
Epoch 3/20
60000/60000 [==============================] - 1s - loss: 0.0602 - acc: 0.9819 - val_loss: 0.0772 - val_acc: 0.9759
Epoch 4/20
60000/60000 [==============================] - 1s - loss: 0.0421 - acc: 0.9872 - val_loss: 0.0717 - val_acc: 0.9778
Epoch 5/20
60000/60000 [==============================] - 1s - loss: 0.0311 - acc: 0.9905 - val_loss: 0.0733 - val_acc: 0.9782
Epoch 6/20
60000/60000 [==============================] - 1s - loss: 0.0234 - acc: 0.9928 - val_loss: 0.0676 - val_acc: 0.9813
Epoch 7/20
60000/60000 [==============================] - 1s - loss: 0.0175 - acc: 0.9946 - val_loss: 0.0670 - val_acc: 0.9819
Epoch 8/20
60000/60000 [==============================] - 1s - loss: 0.0129 - acc: 0.9961 - val_loss: 0.0721 - val_acc: 0.9816
Epoch 9/20
60000/60000 [==============================] - 1s - loss: 0.0100 - acc: 0.9968 - val_loss: 0.0670 - val_acc: 0.9834
Epoch 10/20
60000/60000 [==============================] - 1s - loss: 0.0081 - acc: 0.9976 - val_loss: 0.0726 - val_acc: 0.9829
Epoch 11/20
60000/60000 [==============================] - 1s - loss: 0.0063 - acc: 0.9981 - val_loss: 0.0739 - val_acc: 0.9828
Epoch 12/20
60000/60000 [==============================] - 1s - loss: 0.0046 - acc: 0.9986 - val_loss: 0.0852 - val_acc: 0.9810
Epoch 13/20
60000/60000 [==============================] - 1s - loss: 0.0044 - acc: 0.9987 - val_loss: 0.0806 - val_acc: 0.9831
Epoch 14/20
60000/60000 [==============================] - 2s - loss: 0.0030 - acc: 0.9992 - val_loss: 0.0770 - val_acc: 0.9844
Epoch 15/20
60000/60000 [==============================] - 1s - loss: 0.0024 - acc: 0.9994 - val_loss: 0.0857 - val_acc: 0.9834
Epoch 16/20
60000/60000 [==============================] - 1s - loss: 0.0020 - acc: 0.9993 - val_loss: 0.0856 - val_acc: 0.9842
Epoch 17/20
60000/60000 [==============================] - 2s - loss: 0.0014 - acc: 0.9996 - val_loss: 0.1032 - val_acc: 0.9819
Epoch 18/20
60000/60000 [==============================] - 2s - loss: 0.0015 - acc: 0.9996 - val_loss: 0.1036 - val_acc: 0.9816
Epoch 19/20
60000/60000 [==============================] - 1s - loss: 0.0013 - acc: 0.9997 - val_loss: 0.1034 - val_acc: 0.9816
Epoch 20/20
60000/60000 [==============================] - 1s - loss: 0.0012 - acc: 0.9997 - val_loss: 0.1036 - val_acc: 0.9819

In [ ]: