In [121]:
import tensorflow as tf
mnist = tf.keras.datasets.mnist

(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0  # Keep the values the same but convert to dtype('float64')
# x_train = x_train / 255.0

In [122]:
model = tf.keras.models.Sequential([
  tf.keras.layers.Flatten(),
  tf.keras.layers.Dense(512, activation=tf.nn.relu),
  tf.keras.layers.Dropout(0.2),
  tf.keras.layers.Dense(10, activation=tf.nn.softmax)
])

In [123]:
model.compile(optimizer='adam',
              loss='sparse_categorical_crossentropy',
              metrics=['accuracy'])

In [124]:
model.fit(x_train, y_train, epochs=25, validation_data=(x_test, y_test))


Train on 60000 samples, validate on 10000 samples
Epoch 1/25
60000/60000 [==============================] - 14s 235us/step - loss: 0.2024 - acc: 0.9401 - val_loss: 0.0994 - val_acc: 0.9695
Epoch 2/25
60000/60000 [==============================] - 16s 258us/step - loss: 0.0832 - acc: 0.9750 - val_loss: 0.0823 - val_acc: 0.9745
Epoch 3/25
60000/60000 [==============================] - 15s 247us/step - loss: 0.0533 - acc: 0.9831 - val_loss: 0.0680 - val_acc: 0.9788
Epoch 4/25
60000/60000 [==============================] - 16s 269us/step - loss: 0.0376 - acc: 0.9883 - val_loss: 0.0658 - val_acc: 0.9793
Epoch 5/25
60000/60000 [==============================] - 16s 264us/step - loss: 0.0281 - acc: 0.9908 - val_loss: 0.0634 - val_acc: 0.9818
Epoch 6/25
60000/60000 [==============================] - 16s 274us/step - loss: 0.0223 - acc: 0.9924 - val_loss: 0.0744 - val_acc: 0.9788
Epoch 7/25
60000/60000 [==============================] - 16s 265us/step - loss: 0.0156 - acc: 0.9950 - val_loss: 0.0772 - val_acc: 0.9788
Epoch 8/25
60000/60000 [==============================] - 17s 280us/step - loss: 0.0157 - acc: 0.9948 - val_loss: 0.0910 - val_acc: 0.9777
Epoch 9/25
60000/60000 [==============================] - 16s 267us/step - loss: 0.0119 - acc: 0.9958 - val_loss: 0.0923 - val_acc: 0.9803
Epoch 10/25
60000/60000 [==============================] - 17s 281us/step - loss: 0.0115 - acc: 0.9961 - val_loss: 0.0825 - val_acc: 0.9809
Epoch 11/25
60000/60000 [==============================] - 15s 257us/step - loss: 0.0093 - acc: 0.9967 - val_loss: 0.0894 - val_acc: 0.9817
Epoch 12/25
60000/60000 [==============================] - 14s 231us/step - loss: 0.0081 - acc: 0.9973 - val_loss: 0.0848 - val_acc: 0.9821
Epoch 13/25
60000/60000 [==============================] - 13s 217us/step - loss: 0.0090 - acc: 0.9974 - val_loss: 0.0972 - val_acc: 0.9799
Epoch 14/25
60000/60000 [==============================] - 13s 215us/step - loss: 0.0088 - acc: 0.9971 - val_loss: 0.1000 - val_acc: 0.9794
Epoch 15/25
60000/60000 [==============================] - 12s 196us/step - loss: 0.0087 - acc: 0.9971 - val_loss: 0.0940 - val_acc: 0.9813
Epoch 16/25
60000/60000 [==============================] - 12s 198us/step - loss: 0.0066 - acc: 0.9980 - val_loss: 0.1179 - val_acc: 0.9785
Epoch 17/25
60000/60000 [==============================] - 13s 218us/step - loss: 0.0072 - acc: 0.9979 - val_loss: 0.1196 - val_acc: 0.9799
Epoch 18/25
60000/60000 [==============================] - 12s 204us/step - loss: 0.0071 - acc: 0.9978 - val_loss: 0.1161 - val_acc: 0.9788
Epoch 19/25
60000/60000 [==============================] - 11s 186us/step - loss: 0.0062 - acc: 0.9980 - val_loss: 0.0931 - val_acc: 0.9831
Epoch 20/25
60000/60000 [==============================] - 16s 265us/step - loss: 0.0068 - acc: 0.9980 - val_loss: 0.1123 - val_acc: 0.9797
Epoch 21/25
60000/60000 [==============================] - 12s 204us/step - loss: 0.0045 - acc: 0.9986 - val_loss: 0.1175 - val_acc: 0.9810
Epoch 22/25
60000/60000 [==============================] - 12s 195us/step - loss: 0.0070 - acc: 0.9979 - val_loss: 0.1224 - val_acc: 0.9808
Epoch 23/25
60000/60000 [==============================] - 12s 202us/step - loss: 0.0050 - acc: 0.9984 - val_loss: 0.1045 - val_acc: 0.9836
Epoch 24/25
60000/60000 [==============================] - 11s 187us/step - loss: 0.0045 - acc: 0.9986 - val_loss: 0.1096 - val_acc: 0.9819
Epoch 25/25
60000/60000 [==============================] - 13s 213us/step - loss: 0.0058 - acc: 0.9984 - val_loss: 0.1099 - val_acc: 0.9829
Out[124]:
<tensorflow.python.keras.callbacks.History at 0x7faa75a45e48>

In [126]:
model.evaluate(x_test, y_test)


10000/10000 [==============================] - 1s 52us/step
Out[126]:
[0.1099198426593226, 0.9829]

In [127]:
y_test.shape


Out[127]:
(10000,)

In [103]:
import numpy as np
prediction = model.predict(x_test[:50])

In [102]:
np.argmax(prediction, 1)


Out[102]:
array([7, 2, 1, 0, 4, 1, 4, 9, 5, 9, 0, 6, 9, 0, 1, 5, 9, 7, 3, 4, 9, 6,
       6, 5, 4, 0, 7, 4, 0, 1, 3, 1, 3, 4, 7, 2, 7, 1, 2, 1, 1, 7, 4, 2,
       3, 5, 1, 2, 4, 4])

In [71]:
tf.keras.models.save_model(model, 'test_tf_model')

In [74]:
new_model = tf.keras.models.load_model('test_tf_model', compile=False)

In [85]:
all(np.argmax(new_model.predict(x_test[:50]), 1) == np.argmax(prediction, 1))


Out[85]:
True

In [97]:
(x_train, y_train),(x_test, y_test) = mnist.load_data()

In [98]:
x_train[0].dtype


Out[98]:
dtype('uint8')

In [99]:
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train[0].dtype


Out[99]:
dtype('float64')