In [1]:
import tensorflow as tf

In [2]:
mnist = tf.keras.datasets.mnist

In [8]:
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 25.0

In [9]:
model = tf.keras.models.Sequential([
    tf.keras.layers.Flatten(input_shape=(28, 28)),
    tf.keras.layers.Dense(512, activation=tf.nn.relu),
    tf.keras.layers.Dropout(0.2),
    tf.keras.layers.Dense(10, activation=tf.nn.softmax)
])

In [10]:
model.compile(optimizer='adam',
             loss='sparse_categorical_crossentropy',
             metrics=['accuracy'])

In [13]:
model.fit(x_train, y_train, epochs = 5)


Epoch 1/5
60000/60000 [==============================] - 6s 100us/step - loss: 0.2181 - acc: 0.9355
Epoch 2/5
60000/60000 [==============================] - 6s 94us/step - loss: 0.0964 - acc: 0.9705
Epoch 3/5
60000/60000 [==============================] - 6s 95us/step - loss: 0.0697 - acc: 0.9785
Epoch 4/5
60000/60000 [==============================] - 6s 101us/step - loss: 0.0543 - acc: 0.9825
Epoch 5/5
60000/60000 [==============================] - 6s 105us/step - loss: 0.0425 - acc: 0.9861
Out[13]:
<tensorflow.python.keras.callbacks.History at 0x12843ec10>

In [14]:
model.evaluate(x_test, y_test)


10000/10000 [==============================] - 0s 33us/step
Out[14]:
[0.24156368299915448, 0.9806]

In [ ]: