In this notebook, we improve on our intermediate neural net from Lesson 2 by applying the theory we've covered since.
In [1]:
import numpy as np
np.random.seed(42)
In [2]:
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout # new!
from keras.layers.normalization import BatchNormalization # new!
from keras import regularizers # new!
from keras.optimizers import SGD
Using TensorFlow backend.
In [3]:
(X_train, y_train), (X_test, y_test) = mnist.load_data()
In [4]:
X_train = X_train.reshape(60000, 784).astype('float32')
X_test = X_test.reshape(10000, 784).astype('float32')
In [5]:
X_train /= 255
X_test /= 255
In [6]:
n_classes = 10
y_train = keras.utils.to_categorical(y_train, n_classes)
y_test = keras.utils.to_categorical(y_test, n_classes)
In [7]:
model = Sequential()
model.add(Dense(64, activation='relu', input_shape=(784,)))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
# model.add(Dense(64, activation='relu'))
# model.add(BatchNormalization())
# model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))
In [8]:
model.summary()
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense_1 (Dense) (None, 64) 50240
_________________________________________________________________
batch_normalization_1 (Batch (None, 64) 256
_________________________________________________________________
dropout_1 (Dropout) (None, 64) 0
_________________________________________________________________
dense_2 (Dense) (None, 64) 4160
_________________________________________________________________
batch_normalization_2 (Batch (None, 64) 256
_________________________________________________________________
dropout_2 (Dropout) (None, 64) 0
_________________________________________________________________
dense_3 (Dense) (None, 10) 650
=================================================================
Total params: 55,562
Trainable params: 55,306
Non-trainable params: 256
_________________________________________________________________
In [8]:
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
In [9]:
model.fit(X_train, y_train, batch_size=128, epochs=200, verbose=1, validation_data=(X_test, y_test))
Train on 60000 samples, validate on 10000 samples
Epoch 1/200
60000/60000 [==============================] - 3s - loss: 0.8586 - acc: 0.7308 - val_loss: 0.2594 - val_acc: 0.9230
Epoch 2/200
60000/60000 [==============================] - 2s - loss: 0.4370 - acc: 0.8721 - val_loss: 0.2086 - val_acc: 0.9363
Epoch 3/200
60000/60000 [==============================] - 2s - loss: 0.3658 - acc: 0.8940 - val_loss: 0.1912 - val_acc: 0.9410
Epoch 4/200
60000/60000 [==============================] - 2s - loss: 0.3258 - acc: 0.9051 - val_loss: 0.1642 - val_acc: 0.9504
Epoch 5/200
60000/60000 [==============================] - 2s - loss: 0.2998 - acc: 0.9129 - val_loss: 0.1597 - val_acc: 0.9526
Epoch 6/200
60000/60000 [==============================] - 2s - loss: 0.2850 - acc: 0.9167 - val_loss: 0.1424 - val_acc: 0.9552
Epoch 7/200
60000/60000 [==============================] - 2s - loss: 0.2733 - acc: 0.9197 - val_loss: 0.1401 - val_acc: 0.9562
Epoch 8/200
60000/60000 [==============================] - 2s - loss: 0.2643 - acc: 0.9239 - val_loss: 0.1382 - val_acc: 0.9578
Epoch 9/200
60000/60000 [==============================] - 2s - loss: 0.2496 - acc: 0.9268 - val_loss: 0.1317 - val_acc: 0.9614
Epoch 10/200
60000/60000 [==============================] - 2s - loss: 0.2445 - acc: 0.9285 - val_loss: 0.1287 - val_acc: 0.9599
Epoch 11/200
60000/60000 [==============================] - 2s - loss: 0.2413 - acc: 0.9305 - val_loss: 0.1233 - val_acc: 0.9639
Epoch 12/200
60000/60000 [==============================] - 2s - loss: 0.2287 - acc: 0.9328 - val_loss: 0.1197 - val_acc: 0.9650
Epoch 13/200
60000/60000 [==============================] - 2s - loss: 0.2277 - acc: 0.9332 - val_loss: 0.1202 - val_acc: 0.9637
Epoch 14/200
60000/60000 [==============================] - 2s - loss: 0.2210 - acc: 0.9361 - val_loss: 0.1170 - val_acc: 0.9637
Epoch 15/200
60000/60000 [==============================] - 2s - loss: 0.2176 - acc: 0.9362 - val_loss: 0.1168 - val_acc: 0.9666
Epoch 16/200
60000/60000 [==============================] - 2s - loss: 0.2121 - acc: 0.9367 - val_loss: 0.1164 - val_acc: 0.9665
Epoch 17/200
60000/60000 [==============================] - 2s - loss: 0.2100 - acc: 0.9380 - val_loss: 0.1160 - val_acc: 0.9653
Epoch 18/200
60000/60000 [==============================] - 2s - loss: 0.2063 - acc: 0.9399 - val_loss: 0.1174 - val_acc: 0.9652
Epoch 19/200
60000/60000 [==============================] - 2s - loss: 0.2077 - acc: 0.9391 - val_loss: 0.1086 - val_acc: 0.9697
Epoch 20/200
60000/60000 [==============================] - 2s - loss: 0.2038 - acc: 0.9407 - val_loss: 0.1086 - val_acc: 0.9684
Epoch 21/200
60000/60000 [==============================] - 2s - loss: 0.2003 - acc: 0.9411 - val_loss: 0.1123 - val_acc: 0.9678
Epoch 22/200
60000/60000 [==============================] - 2s - loss: 0.1957 - acc: 0.9415 - val_loss: 0.1130 - val_acc: 0.9663
Epoch 23/200
60000/60000 [==============================] - 2s - loss: 0.2009 - acc: 0.9415 - val_loss: 0.1104 - val_acc: 0.9687
Epoch 24/200
60000/60000 [==============================] - 2s - loss: 0.1961 - acc: 0.9422 - val_loss: 0.1078 - val_acc: 0.9691
Epoch 25/200
60000/60000 [==============================] - 2s - loss: 0.1976 - acc: 0.9422 - val_loss: 0.1100 - val_acc: 0.9682
Epoch 26/200
60000/60000 [==============================] - 2s - loss: 0.1908 - acc: 0.9443 - val_loss: 0.1038 - val_acc: 0.9708
Epoch 27/200
60000/60000 [==============================] - 2s - loss: 0.1942 - acc: 0.9437 - val_loss: 0.1138 - val_acc: 0.9685
Epoch 28/200
60000/60000 [==============================] - 2s - loss: 0.1893 - acc: 0.9440 - val_loss: 0.1081 - val_acc: 0.9709
Epoch 29/200
60000/60000 [==============================] - 2s - loss: 0.1850 - acc: 0.9455 - val_loss: 0.1020 - val_acc: 0.9699
Epoch 30/200
60000/60000 [==============================] - 2s - loss: 0.1865 - acc: 0.9456 - val_loss: 0.1075 - val_acc: 0.9679
Epoch 31/200
60000/60000 [==============================] - 2s - loss: 0.1851 - acc: 0.9457 - val_loss: 0.1046 - val_acc: 0.9708
Epoch 32/200
60000/60000 [==============================] - 2s - loss: 0.1814 - acc: 0.9457 - val_loss: 0.1048 - val_acc: 0.9694
Epoch 33/200
60000/60000 [==============================] - 2s - loss: 0.1858 - acc: 0.9454 - val_loss: 0.1076 - val_acc: 0.9695
Epoch 34/200
60000/60000 [==============================] - 2s - loss: 0.1764 - acc: 0.9481 - val_loss: 0.1076 - val_acc: 0.9695
Epoch 35/200
60000/60000 [==============================] - 2s - loss: 0.1835 - acc: 0.9464 - val_loss: 0.1056 - val_acc: 0.9697
Epoch 36/200
60000/60000 [==============================] - 2s - loss: 0.1821 - acc: 0.9463 - val_loss: 0.1034 - val_acc: 0.9696
Epoch 37/200
60000/60000 [==============================] - 2s - loss: 0.1805 - acc: 0.9468 - val_loss: 0.1058 - val_acc: 0.9710
Epoch 38/200
60000/60000 [==============================] - 2s - loss: 0.1744 - acc: 0.9487 - val_loss: 0.1059 - val_acc: 0.9691
Epoch 39/200
60000/60000 [==============================] - 2s - loss: 0.1792 - acc: 0.9470 - val_loss: 0.1032 - val_acc: 0.9693
Epoch 40/200
60000/60000 [==============================] - 2s - loss: 0.1767 - acc: 0.9472 - val_loss: 0.1055 - val_acc: 0.9700
Epoch 41/200
60000/60000 [==============================] - 2s - loss: 0.1753 - acc: 0.9474 - val_loss: 0.1076 - val_acc: 0.9696
Epoch 42/200
60000/60000 [==============================] - 2s - loss: 0.1740 - acc: 0.9489 - val_loss: 0.1059 - val_acc: 0.9698
Epoch 43/200
60000/60000 [==============================] - 3s - loss: 0.1729 - acc: 0.9473 - val_loss: 0.1054 - val_acc: 0.9687
Epoch 44/200
60000/60000 [==============================] - 2s - loss: 0.1705 - acc: 0.9481 - val_loss: 0.1036 - val_acc: 0.9700
Epoch 45/200
60000/60000 [==============================] - 2s - loss: 0.1689 - acc: 0.9496 - val_loss: 0.1075 - val_acc: 0.9703
Epoch 46/200
60000/60000 [==============================] - 2s - loss: 0.1752 - acc: 0.9491 - val_loss: 0.1088 - val_acc: 0.9690
Epoch 47/200
60000/60000 [==============================] - 3s - loss: 0.1749 - acc: 0.9481 - val_loss: 0.1064 - val_acc: 0.9708
Epoch 48/200
60000/60000 [==============================] - 3s - loss: 0.1723 - acc: 0.9488 - val_loss: 0.1047 - val_acc: 0.9693
Epoch 49/200
60000/60000 [==============================] - 2s - loss: 0.1689 - acc: 0.9499 - val_loss: 0.1098 - val_acc: 0.9691
Epoch 50/200
60000/60000 [==============================] - 2s - loss: 0.1674 - acc: 0.9502 - val_loss: 0.1037 - val_acc: 0.9703
Epoch 51/200
60000/60000 [==============================] - 2s - loss: 0.1682 - acc: 0.9490 - val_loss: 0.1037 - val_acc: 0.9698
Epoch 52/200
60000/60000 [==============================] - 2s - loss: 0.1646 - acc: 0.9515 - val_loss: 0.1047 - val_acc: 0.9703
Epoch 53/200
60000/60000 [==============================] - 2s - loss: 0.1700 - acc: 0.9489 - val_loss: 0.1055 - val_acc: 0.9705
Epoch 54/200
60000/60000 [==============================] - 2s - loss: 0.1679 - acc: 0.9499 - val_loss: 0.1083 - val_acc: 0.9689
Epoch 55/200
60000/60000 [==============================] - 2s - loss: 0.1640 - acc: 0.9514 - val_loss: 0.1078 - val_acc: 0.9693
Epoch 56/200
60000/60000 [==============================] - 2s - loss: 0.1672 - acc: 0.9494 - val_loss: 0.1112 - val_acc: 0.9696
Epoch 57/200
60000/60000 [==============================] - 2s - loss: 0.1615 - acc: 0.9518 - val_loss: 0.1036 - val_acc: 0.9701
Epoch 58/200
60000/60000 [==============================] - 2s - loss: 0.1609 - acc: 0.9521 - val_loss: 0.1049 - val_acc: 0.9703
Epoch 59/200
60000/60000 [==============================] - 2s - loss: 0.1637 - acc: 0.9511 - val_loss: 0.1056 - val_acc: 0.9709
Epoch 60/200
60000/60000 [==============================] - 2s - loss: 0.1662 - acc: 0.9501 - val_loss: 0.1078 - val_acc: 0.9698
Epoch 61/200
60000/60000 [==============================] - 2s - loss: 0.1622 - acc: 0.9507 - val_loss: 0.1032 - val_acc: 0.9706
Epoch 62/200
60000/60000 [==============================] - 2s - loss: 0.1602 - acc: 0.9522 - val_loss: 0.1052 - val_acc: 0.9708
Epoch 63/200
60000/60000 [==============================] - 2s - loss: 0.1626 - acc: 0.9514 - val_loss: 0.1007 - val_acc: 0.9716
Epoch 64/200
60000/60000 [==============================] - 2s - loss: 0.1632 - acc: 0.9512 - val_loss: 0.1084 - val_acc: 0.9692
Epoch 65/200
60000/60000 [==============================] - 2s - loss: 0.1644 - acc: 0.9507 - val_loss: 0.1003 - val_acc: 0.9714
Epoch 66/200
60000/60000 [==============================] - 2s - loss: 0.1617 - acc: 0.9517 - val_loss: 0.1002 - val_acc: 0.9705
Epoch 67/200
60000/60000 [==============================] - 2s - loss: 0.1589 - acc: 0.9523 - val_loss: 0.1037 - val_acc: 0.9712
Epoch 68/200
60000/60000 [==============================] - 2s - loss: 0.1551 - acc: 0.9535 - val_loss: 0.1055 - val_acc: 0.9707
Epoch 69/200
60000/60000 [==============================] - 2s - loss: 0.1586 - acc: 0.9521 - val_loss: 0.1038 - val_acc: 0.9699
Epoch 70/200
60000/60000 [==============================] - 2s - loss: 0.1600 - acc: 0.9514 - val_loss: 0.1037 - val_acc: 0.9700
Epoch 71/200
60000/60000 [==============================] - 2s - loss: 0.1569 - acc: 0.9533 - val_loss: 0.1044 - val_acc: 0.9697
Epoch 72/200
60000/60000 [==============================] - 2s - loss: 0.1543 - acc: 0.9534 - val_loss: 0.1083 - val_acc: 0.9700
Epoch 73/200
60000/60000 [==============================] - 2s - loss: 0.1608 - acc: 0.9516 - val_loss: 0.1029 - val_acc: 0.9712
Epoch 74/200
60000/60000 [==============================] - 2s - loss: 0.1583 - acc: 0.9523 - val_loss: 0.1025 - val_acc: 0.9718
Epoch 75/200
60000/60000 [==============================] - 2s - loss: 0.1542 - acc: 0.9543 - val_loss: 0.1072 - val_acc: 0.9698
Epoch 76/200
60000/60000 [==============================] - 2s - loss: 0.1526 - acc: 0.9539 - val_loss: 0.1041 - val_acc: 0.9708
Epoch 77/200
60000/60000 [==============================] - 2s - loss: 0.1594 - acc: 0.9522 - val_loss: 0.1062 - val_acc: 0.9701
Epoch 78/200
60000/60000 [==============================] - 2s - loss: 0.1565 - acc: 0.9524 - val_loss: 0.1060 - val_acc: 0.9700
Epoch 79/200
60000/60000 [==============================] - 2s - loss: 0.1558 - acc: 0.9529 - val_loss: 0.1024 - val_acc: 0.9714
Epoch 80/200
60000/60000 [==============================] - 2s - loss: 0.1530 - acc: 0.9543 - val_loss: 0.1018 - val_acc: 0.9719
Epoch 81/200
60000/60000 [==============================] - 2s - loss: 0.1519 - acc: 0.9536 - val_loss: 0.1053 - val_acc: 0.9716
Epoch 82/200
60000/60000 [==============================] - 2s - loss: 0.1513 - acc: 0.9548 - val_loss: 0.1024 - val_acc: 0.9711
Epoch 83/200
60000/60000 [==============================] - 2s - loss: 0.1568 - acc: 0.9524 - val_loss: 0.1047 - val_acc: 0.9707
Epoch 84/200
60000/60000 [==============================] - 2s - loss: 0.1520 - acc: 0.9554 - val_loss: 0.1014 - val_acc: 0.9701
Epoch 85/200
60000/60000 [==============================] - 2s - loss: 0.1538 - acc: 0.9535 - val_loss: 0.1044 - val_acc: 0.9709
Epoch 86/200
60000/60000 [==============================] - 3s - loss: 0.1525 - acc: 0.9538 - val_loss: 0.1061 - val_acc: 0.9702
Epoch 87/200
60000/60000 [==============================] - 2s - loss: 0.1480 - acc: 0.9559 - val_loss: 0.1017 - val_acc: 0.9716
Epoch 88/200
60000/60000 [==============================] - 2s - loss: 0.1533 - acc: 0.9538 - val_loss: 0.1016 - val_acc: 0.9723
Epoch 89/200
60000/60000 [==============================] - 2s - loss: 0.1518 - acc: 0.9550 - val_loss: 0.1019 - val_acc: 0.9710
Epoch 90/200
60000/60000 [==============================] - 3s - loss: 0.1468 - acc: 0.9562 - val_loss: 0.1049 - val_acc: 0.9712
Epoch 91/200
60000/60000 [==============================] - 3s - loss: 0.1487 - acc: 0.9542 - val_loss: 0.1042 - val_acc: 0.9705
Epoch 92/200
60000/60000 [==============================] - 3s - loss: 0.1475 - acc: 0.9556 - val_loss: 0.1086 - val_acc: 0.9704
Epoch 93/200
60000/60000 [==============================] - 2s - loss: 0.1513 - acc: 0.9539 - val_loss: 0.1044 - val_acc: 0.9706
Epoch 94/200
60000/60000 [==============================] - 3s - loss: 0.1513 - acc: 0.9537 - val_loss: 0.1053 - val_acc: 0.9706
Epoch 95/200
60000/60000 [==============================] - 3s - loss: 0.1455 - acc: 0.9567 - val_loss: 0.1016 - val_acc: 0.9707
Epoch 96/200
60000/60000 [==============================] - 3s - loss: 0.1487 - acc: 0.9557 - val_loss: 0.1041 - val_acc: 0.9703
Epoch 97/200
60000/60000 [==============================] - 2s - loss: 0.1512 - acc: 0.9548 - val_loss: 0.1040 - val_acc: 0.9715
Epoch 98/200
60000/60000 [==============================] - 2s - loss: 0.1496 - acc: 0.9540 - val_loss: 0.1030 - val_acc: 0.9701
Epoch 99/200
60000/60000 [==============================] - 2s - loss: 0.1478 - acc: 0.9557 - val_loss: 0.1058 - val_acc: 0.9702
Epoch 100/200
60000/60000 [==============================] - 2s - loss: 0.1497 - acc: 0.9552 - val_loss: 0.1080 - val_acc: 0.9696
Epoch 101/200
60000/60000 [==============================] - 2s - loss: 0.1486 - acc: 0.9547 - val_loss: 0.1029 - val_acc: 0.9709
Epoch 102/200
60000/60000 [==============================] - 2s - loss: 0.1472 - acc: 0.9563 - val_loss: 0.1056 - val_acc: 0.9711
Epoch 103/200
60000/60000 [==============================] - 2s - loss: 0.1484 - acc: 0.9552 - val_loss: 0.1041 - val_acc: 0.9710
Epoch 104/200
60000/60000 [==============================] - 3s - loss: 0.1440 - acc: 0.9566 - val_loss: 0.1112 - val_acc: 0.9707
Epoch 105/200
60000/60000 [==============================] - 2s - loss: 0.1430 - acc: 0.9565 - val_loss: 0.1115 - val_acc: 0.9710
Epoch 106/200
60000/60000 [==============================] - 2s - loss: 0.1500 - acc: 0.9552 - val_loss: 0.1031 - val_acc: 0.9711
Epoch 107/200
60000/60000 [==============================] - 2s - loss: 0.1478 - acc: 0.9550 - val_loss: 0.1083 - val_acc: 0.9707
Epoch 108/200
60000/60000 [==============================] - 2s - loss: 0.1450 - acc: 0.9556 - val_loss: 0.1046 - val_acc: 0.9701
Epoch 109/200
60000/60000 [==============================] - 2s - loss: 0.1448 - acc: 0.9559 - val_loss: 0.1096 - val_acc: 0.9692
Epoch 110/200
60000/60000 [==============================] - 2s - loss: 0.1471 - acc: 0.9553 - val_loss: 0.1070 - val_acc: 0.9698
Epoch 111/200
60000/60000 [==============================] - 2s - loss: 0.1432 - acc: 0.9572 - val_loss: 0.1054 - val_acc: 0.9699
Epoch 112/200
60000/60000 [==============================] - 2s - loss: 0.1477 - acc: 0.9548 - val_loss: 0.1088 - val_acc: 0.9698
Epoch 113/200
60000/60000 [==============================] - 2s - loss: 0.1449 - acc: 0.9556 - val_loss: 0.1026 - val_acc: 0.9719
Epoch 114/200
60000/60000 [==============================] - 2s - loss: 0.1419 - acc: 0.9569 - val_loss: 0.1085 - val_acc: 0.9709
Epoch 115/200
60000/60000 [==============================] - 2s - loss: 0.1469 - acc: 0.9547 - val_loss: 0.1056 - val_acc: 0.9696
Epoch 116/200
60000/60000 [==============================] - 2s - loss: 0.1434 - acc: 0.9567 - val_loss: 0.1121 - val_acc: 0.9705
Epoch 117/200
60000/60000 [==============================] - 2s - loss: 0.1438 - acc: 0.9569 - val_loss: 0.1105 - val_acc: 0.9692
Epoch 118/200
60000/60000 [==============================] - 2s - loss: 0.1428 - acc: 0.9568 - val_loss: 0.1087 - val_acc: 0.9706
Epoch 119/200
60000/60000 [==============================] - 2s - loss: 0.1439 - acc: 0.9564 - val_loss: 0.1105 - val_acc: 0.9696
Epoch 120/200
60000/60000 [==============================] - 2s - loss: 0.1463 - acc: 0.9560 - val_loss: 0.1055 - val_acc: 0.9696
Epoch 121/200
60000/60000 [==============================] - 2s - loss: 0.1428 - acc: 0.9569 - val_loss: 0.1078 - val_acc: 0.9703
Epoch 122/200
60000/60000 [==============================] - 2s - loss: 0.1455 - acc: 0.9556 - val_loss: 0.1061 - val_acc: 0.9704
Epoch 123/200
60000/60000 [==============================] - 2s - loss: 0.1421 - acc: 0.9581 - val_loss: 0.1098 - val_acc: 0.9696
Epoch 124/200
60000/60000 [==============================] - 2s - loss: 0.1430 - acc: 0.9567 - val_loss: 0.1079 - val_acc: 0.9699
Epoch 125/200
60000/60000 [==============================] - 3s - loss: 0.1424 - acc: 0.9571 - val_loss: 0.1096 - val_acc: 0.9706
Epoch 126/200
60000/60000 [==============================] - 2s - loss: 0.1425 - acc: 0.9570 - val_loss: 0.1087 - val_acc: 0.9700
Epoch 127/200
60000/60000 [==============================] - 2s - loss: 0.1426 - acc: 0.9560 - val_loss: 0.1093 - val_acc: 0.9703
Epoch 128/200
60000/60000 [==============================] - 2s - loss: 0.1437 - acc: 0.9560 - val_loss: 0.1134 - val_acc: 0.9681
Epoch 129/200
60000/60000 [==============================] - 2s - loss: 0.1415 - acc: 0.9568 - val_loss: 0.1079 - val_acc: 0.9709
Epoch 130/200
60000/60000 [==============================] - 2s - loss: 0.1415 - acc: 0.9570 - val_loss: 0.1117 - val_acc: 0.9694
Epoch 131/200
60000/60000 [==============================] - 2s - loss: 0.1387 - acc: 0.9576 - val_loss: 0.1128 - val_acc: 0.9700
Epoch 132/200
60000/60000 [==============================] - 2s - loss: 0.1426 - acc: 0.9577 - val_loss: 0.1075 - val_acc: 0.9702
Epoch 133/200
60000/60000 [==============================] - 2s - loss: 0.1404 - acc: 0.9571 - val_loss: 0.1128 - val_acc: 0.9700
Epoch 134/200
60000/60000 [==============================] - 2s - loss: 0.1431 - acc: 0.9568 - val_loss: 0.1074 - val_acc: 0.9700
Epoch 135/200
60000/60000 [==============================] - 2s - loss: 0.1406 - acc: 0.9567 - val_loss: 0.1111 - val_acc: 0.9697
Epoch 136/200
60000/60000 [==============================] - 2s - loss: 0.1420 - acc: 0.9571 - val_loss: 0.1067 - val_acc: 0.9704
Epoch 137/200
60000/60000 [==============================] - 2s - loss: 0.1404 - acc: 0.9577 - val_loss: 0.1090 - val_acc: 0.9685
Epoch 138/200
60000/60000 [==============================] - 2s - loss: 0.1392 - acc: 0.9572 - val_loss: 0.1064 - val_acc: 0.9702
Epoch 139/200
60000/60000 [==============================] - 2s - loss: 0.1388 - acc: 0.9564 - val_loss: 0.1086 - val_acc: 0.9703
Epoch 140/200
60000/60000 [==============================] - 2s - loss: 0.1364 - acc: 0.9589 - val_loss: 0.1064 - val_acc: 0.9707
Epoch 141/200
60000/60000 [==============================] - 2s - loss: 0.1424 - acc: 0.9568 - val_loss: 0.1095 - val_acc: 0.9697
Epoch 142/200
60000/60000 [==============================] - 2s - loss: 0.1348 - acc: 0.9594 - val_loss: 0.1082 - val_acc: 0.9695
Epoch 143/200
60000/60000 [==============================] - 2s - loss: 0.1340 - acc: 0.9585 - val_loss: 0.1077 - val_acc: 0.9712
Epoch 144/200
60000/60000 [==============================] - 2s - loss: 0.1380 - acc: 0.9569 - val_loss: 0.1079 - val_acc: 0.9706
Epoch 145/200
60000/60000 [==============================] - 2s - loss: 0.1392 - acc: 0.9582 - val_loss: 0.1113 - val_acc: 0.9689
Epoch 146/200
60000/60000 [==============================] - 3s - loss: 0.1416 - acc: 0.9573 - val_loss: 0.1091 - val_acc: 0.9706
Epoch 147/200
60000/60000 [==============================] - 2s - loss: 0.1388 - acc: 0.9577 - val_loss: 0.1079 - val_acc: 0.9702
Epoch 148/200
60000/60000 [==============================] - 2s - loss: 0.1389 - acc: 0.9584 - val_loss: 0.1095 - val_acc: 0.9692
Epoch 149/200
60000/60000 [==============================] - 2s - loss: 0.1357 - acc: 0.9590 - val_loss: 0.1080 - val_acc: 0.9706
Epoch 150/200
60000/60000 [==============================] - 2s - loss: 0.1374 - acc: 0.9585 - val_loss: 0.1081 - val_acc: 0.9701
Epoch 151/200
60000/60000 [==============================] - 3s - loss: 0.1384 - acc: 0.9587 - val_loss: 0.1080 - val_acc: 0.9708
Epoch 152/200
60000/60000 [==============================] - 2s - loss: 0.1379 - acc: 0.9580 - val_loss: 0.1095 - val_acc: 0.9709
Epoch 153/200
60000/60000 [==============================] - 2s - loss: 0.1388 - acc: 0.9580 - val_loss: 0.1089 - val_acc: 0.9708
Epoch 154/200
60000/60000 [==============================] - 2s - loss: 0.1400 - acc: 0.9575 - val_loss: 0.1106 - val_acc: 0.9699
Epoch 155/200
60000/60000 [==============================] - 2s - loss: 0.1374 - acc: 0.9586 - val_loss: 0.1076 - val_acc: 0.9713
Epoch 156/200
60000/60000 [==============================] - 2s - loss: 0.1360 - acc: 0.9595 - val_loss: 0.1119 - val_acc: 0.9698
Epoch 157/200
60000/60000 [==============================] - 2s - loss: 0.1374 - acc: 0.9579 - val_loss: 0.1104 - val_acc: 0.9711
Epoch 158/200
60000/60000 [==============================] - 2s - loss: 0.1392 - acc: 0.9586 - val_loss: 0.1085 - val_acc: 0.9702
Epoch 159/200
60000/60000 [==============================] - 2s - loss: 0.1330 - acc: 0.9594 - val_loss: 0.1085 - val_acc: 0.9710
Epoch 160/200
60000/60000 [==============================] - 2s - loss: 0.1345 - acc: 0.9588 - val_loss: 0.1085 - val_acc: 0.9701
Epoch 161/200
60000/60000 [==============================] - 2s - loss: 0.1344 - acc: 0.9596 - val_loss: 0.1104 - val_acc: 0.9693
Epoch 162/200
60000/60000 [==============================] - 2s - loss: 0.1363 - acc: 0.9590 - val_loss: 0.1082 - val_acc: 0.9697
Epoch 163/200
60000/60000 [==============================] - 2s - loss: 0.1389 - acc: 0.9577 - val_loss: 0.1089 - val_acc: 0.9696
Epoch 164/200
60000/60000 [==============================] - 2s - loss: 0.1342 - acc: 0.9585 - val_loss: 0.1106 - val_acc: 0.9706
Epoch 165/200
60000/60000 [==============================] - 2s - loss: 0.1369 - acc: 0.9590 - val_loss: 0.1086 - val_acc: 0.9702
Epoch 166/200
60000/60000 [==============================] - 2s - loss: 0.1357 - acc: 0.9585 - val_loss: 0.1083 - val_acc: 0.9700
Epoch 167/200
60000/60000 [==============================] - 3s - loss: 0.1338 - acc: 0.9594 - val_loss: 0.1116 - val_acc: 0.9696
Epoch 168/200
60000/60000 [==============================] - 2s - loss: 0.1381 - acc: 0.9577 - val_loss: 0.1108 - val_acc: 0.9706
Epoch 169/200
60000/60000 [==============================] - 2s - loss: 0.1357 - acc: 0.9586 - val_loss: 0.1108 - val_acc: 0.9704
Epoch 170/200
60000/60000 [==============================] - 2s - loss: 0.1383 - acc: 0.9579 - val_loss: 0.1137 - val_acc: 0.9700
Epoch 171/200
60000/60000 [==============================] - 2s - loss: 0.1325 - acc: 0.9593 - val_loss: 0.1139 - val_acc: 0.9687
Epoch 172/200
60000/60000 [==============================] - 3s - loss: 0.1383 - acc: 0.9581 - val_loss: 0.1124 - val_acc: 0.9687
Epoch 173/200
60000/60000 [==============================] - 3s - loss: 0.1375 - acc: 0.9589 - val_loss: 0.1092 - val_acc: 0.9696
Epoch 174/200
60000/60000 [==============================] - 2s - loss: 0.1349 - acc: 0.9592 - val_loss: 0.1099 - val_acc: 0.9699
Epoch 175/200
60000/60000 [==============================] - 2s - loss: 0.1360 - acc: 0.9582 - val_loss: 0.1127 - val_acc: 0.9692
Epoch 176/200
60000/60000 [==============================] - 2s - loss: 0.1333 - acc: 0.9589 - val_loss: 0.1140 - val_acc: 0.9702
Epoch 177/200
60000/60000 [==============================] - 2s - loss: 0.1382 - acc: 0.9576 - val_loss: 0.1126 - val_acc: 0.9692
Epoch 178/200
60000/60000 [==============================] - 2s - loss: 0.1334 - acc: 0.9589 - val_loss: 0.1100 - val_acc: 0.9692
Epoch 179/200
60000/60000 [==============================] - 2s - loss: 0.1318 - acc: 0.9597 - val_loss: 0.1080 - val_acc: 0.9702
Epoch 180/200
60000/60000 [==============================] - 2s - loss: 0.1348 - acc: 0.9591 - val_loss: 0.1074 - val_acc: 0.9702
Epoch 181/200
60000/60000 [==============================] - 2s - loss: 0.1312 - acc: 0.9608 - val_loss: 0.1130 - val_acc: 0.9698
Epoch 182/200
60000/60000 [==============================] - 3s - loss: 0.1337 - acc: 0.9593 - val_loss: 0.1101 - val_acc: 0.9703
Epoch 183/200
60000/60000 [==============================] - 2s - loss: 0.1346 - acc: 0.9586 - val_loss: 0.1072 - val_acc: 0.9712
Epoch 184/200
60000/60000 [==============================] - 3s - loss: 0.1324 - acc: 0.9598 - val_loss: 0.1112 - val_acc: 0.9693
Epoch 185/200
60000/60000 [==============================] - 2s - loss: 0.1357 - acc: 0.9595 - val_loss: 0.1089 - val_acc: 0.9710
Epoch 186/200
60000/60000 [==============================] - 2s - loss: 0.1298 - acc: 0.9600 - val_loss: 0.1089 - val_acc: 0.9714
Epoch 187/200
60000/60000 [==============================] - 3s - loss: 0.1343 - acc: 0.9585 - val_loss: 0.1123 - val_acc: 0.9709
Epoch 188/200
60000/60000 [==============================] - 2s - loss: 0.1350 - acc: 0.9584 - val_loss: 0.1083 - val_acc: 0.9713
Epoch 189/200
60000/60000 [==============================] - 3s - loss: 0.1345 - acc: 0.9590 - val_loss: 0.1098 - val_acc: 0.9705
Epoch 190/200
60000/60000 [==============================] - 3s - loss: 0.1321 - acc: 0.9584 - val_loss: 0.1099 - val_acc: 0.9708
Epoch 191/200
60000/60000 [==============================] - 2s - loss: 0.1354 - acc: 0.9595 - val_loss: 0.1127 - val_acc: 0.9703
Epoch 192/200
60000/60000 [==============================] - 3s - loss: 0.1326 - acc: 0.9596 - val_loss: 0.1122 - val_acc: 0.9686
Epoch 193/200
60000/60000 [==============================] - 3s - loss: 0.1319 - acc: 0.9605 - val_loss: 0.1102 - val_acc: 0.9699
Epoch 194/200
60000/60000 [==============================] - 3s - loss: 0.1312 - acc: 0.9604 - val_loss: 0.1091 - val_acc: 0.9707
Epoch 195/200
60000/60000 [==============================] - 2s - loss: 0.1353 - acc: 0.9589 - val_loss: 0.1097 - val_acc: 0.9704
Epoch 196/200
60000/60000 [==============================] - 3s - loss: 0.1344 - acc: 0.9580 - val_loss: 0.1131 - val_acc: 0.9701
Epoch 197/200
60000/60000 [==============================] - 3s - loss: 0.1319 - acc: 0.9603 - val_loss: 0.1116 - val_acc: 0.9698
Epoch 198/200
60000/60000 [==============================] - 3s - loss: 0.1328 - acc: 0.9591 - val_loss: 0.1096 - val_acc: 0.9701
Epoch 199/200
60000/60000 [==============================] - 3s - loss: 0.1316 - acc: 0.9603 - val_loss: 0.1127 - val_acc: 0.9696
Epoch 200/200
60000/60000 [==============================] - 2s - loss: 0.1323 - acc: 0.9589 - val_loss: 0.1136 - val_acc: 0.9690
Out[9]:
<keras.callbacks.History at 0x7f321175a748>
In [ ]:
Content source: the-deep-learners/TensorFlow-LiveLessons
Similar notebooks: