In this notebook, we build a Deep Neural Network while discussing the theory we've learnt thus far.
In [ ]:
import numpy as np
np.random.seed(42)
In [ ]:
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout # new!
from keras.optimizers import SGD
In [ ]:
(X_train, y_train), (X_test, y_test) = mnist.load_data()
In [ ]:
X_train = X_train.reshape(60000, 784).astype('float32')
X_test = X_test.reshape(10000, 784).astype('float32')
In [ ]:
X_train /= 255
X_test /= 255
In [ ]:
n_classes = 10
y_train = keras.utils.to_categorical(y_train, n_classes)
y_test = keras.utils.to_categorical(y_test, n_classes)
In [ ]:
model = Sequential()
model.add(Dense(64, activation='sigmoid', input_shape=(784,)))
model.add(Dense(10, activation='softmax'))
In [ ]:
model.summary()
In [ ]:
model.compile(loss='mean_squared_error', optimizer=SGD(lr=0.01), metrics=['accuracy'])
In [ ]:
model.fit(X_train, y_train, batch_size=128, epochs=200, verbose=1, validation_data=(X_test, y_test))
In [ ]: