In [1]:
import keras
import tensorflow as tf

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop

from sklearn.preprocessing import StandardScaler, MinMaxScaler, Normalizer


Using TensorFlow backend.

In [2]:
batch_size = 128
num_classes = 10
epochs = 20

In [3]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()

x_train = x_train.reshape(60000, 784).astype('float32')
x_test = x_test.reshape(10000, 784).astype('float32')

In [4]:
sc = MinMaxScaler(feature_range=(0,1))

x_train = sc.fit_transform(x_train)
x_test = sc.fit_transform(x_test)

In [5]:
# one hot encoding
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

In [6]:
model = Sequential()
model.add(Dense(800, activation='relu', input_shape=(784,)))
model.add(Dense(10, activation='softmax'))

In [7]:
model.compile(loss='categorical_crossentropy',
              optimizer=RMSprop(),
              metrics=['accuracy'])

In [8]:
model.fit(x_train, y_train,
                    batch_size=batch_size,
                    epochs=epochs,
                    verbose=1,
                    validation_data=(x_test, y_test))


Train on 60000 samples, validate on 10000 samples
Epoch 1/20
60000/60000 [==============================] - 14s - loss: 0.2408 - acc: 0.9277 - val_loss: 0.1174 - val_acc: 0.9658
Epoch 2/20
60000/60000 [==============================] - 1s - loss: 0.0945 - acc: 0.9719 - val_loss: 0.0909 - val_acc: 0.9724
Epoch 3/20
60000/60000 [==============================] - 1s - loss: 0.0610 - acc: 0.9813 - val_loss: 0.0669 - val_acc: 0.9797
Epoch 4/20
60000/60000 [==============================] - 1s - loss: 0.0440 - acc: 0.9866 - val_loss: 0.0642 - val_acc: 0.9809
Epoch 5/20
60000/60000 [==============================] - 1s - loss: 0.0319 - acc: 0.9908 - val_loss: 0.0671 - val_acc: 0.9804
Epoch 6/20
60000/60000 [==============================] - 1s - loss: 0.0235 - acc: 0.9931 - val_loss: 0.0645 - val_acc: 0.9811
Epoch 7/20
60000/60000 [==============================] - 1s - loss: 0.0176 - acc: 0.9950 - val_loss: 0.0710 - val_acc: 0.9815
Epoch 8/20
60000/60000 [==============================] - 2s - loss: 0.0132 - acc: 0.9961 - val_loss: 0.0772 - val_acc: 0.9801
Epoch 9/20
60000/60000 [==============================] - 1s - loss: 0.0101 - acc: 0.9971 - val_loss: 0.0721 - val_acc: 0.9826
Epoch 10/20
60000/60000 [==============================] - 1s - loss: 0.0082 - acc: 0.9975 - val_loss: 0.0800 - val_acc: 0.9813
Epoch 11/20
60000/60000 [==============================] - 1s - loss: 0.0061 - acc: 0.9984 - val_loss: 0.0788 - val_acc: 0.9826
Epoch 12/20
60000/60000 [==============================] - 1s - loss: 0.0052 - acc: 0.9984 - val_loss: 0.0836 - val_acc: 0.9830
Epoch 13/20
60000/60000 [==============================] - 2s - loss: 0.0035 - acc: 0.9989 - val_loss: 0.0826 - val_acc: 0.9832
Epoch 14/20
60000/60000 [==============================] - 1s - loss: 0.0031 - acc: 0.9992 - val_loss: 0.0897 - val_acc: 0.9823
Epoch 15/20
60000/60000 [==============================] - 1s - loss: 0.0028 - acc: 0.9992 - val_loss: 0.0827 - val_acc: 0.9833
Epoch 16/20
60000/60000 [==============================] - 2s - loss: 0.0022 - acc: 0.9994 - val_loss: 0.0891 - val_acc: 0.9825
Epoch 17/20
60000/60000 [==============================] - 1s - loss: 0.0020 - acc: 0.9994 - val_loss: 0.0855 - val_acc: 0.9837
Epoch 18/20
60000/60000 [==============================] - 1s - loss: 0.0012 - acc: 0.9997 - val_loss: 0.0921 - val_acc: 0.9830
Epoch 19/20
60000/60000 [==============================] - 1s - loss: 0.0014 - acc: 0.9996 - val_loss: 0.0926 - val_acc: 0.9842
Epoch 20/20
60000/60000 [==============================] - 1s - loss: 0.0010 - acc: 0.9998 - val_loss: 0.0970 - val_acc: 0.9832
Out[8]:
<keras.callbacks.History at 0x7ffb42d70f98>

In [ ]: