In [1]:
# Instatiating a small convolutional neural network
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
In [2]:
model = Sequential()
model.add(Conv2D(filters = 32,
kernel_size = (3, 3),
activation = 'relu',
input_shape = (28, 28, 1)))
model.add(MaxPooling2D(pool_size = (2, 2)))
model.add(Conv2D(filters = 64,
kernel_size = (3, 3),
activation ='relu'))
model.add(MaxPooling2D(pool_size = (2, 2)))
model.add(Conv2D(filters = 64,
kernel_size = (3, 3),
activation = 'relu'))
In [3]:
# Prompting summary of the model
model.summary()
In [4]:
from keras.layers import Dense, Flatten
In [5]:
# Adding classifier part of the model
model.add(Flatten())
model.add(Dense(units = 64,
activation = 'relu'))
model.add(Dense(units = 10,
activation = 'softmax'))
In [6]:
# Prompting summary of the model
model.summary()
In [7]:
from keras.datasets import mnist
In [8]:
# Importing the dataset
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
In [9]:
# Reshaping the dataset
train_images = train_images.reshape((60000, 28, 28, 1))
test_images = test_images.reshape((10000, 28, 28, 1))
In [10]:
# Changing type to one used by Keras
train_images = train_images.astype('float32')
test_images = test_images.astype('float32')
In [11]:
# Normalization
# The data has values from 0 to 255.
# It is preferred that to be between 0 and 1.
train_images /= 255
test_images /= 255
In [12]:
from keras.utils import to_categorical
In [13]:
train_labels[0]
Out[13]:
In [14]:
# One-hot enconding
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
In [15]:
train_labels[0]
Out[15]:
In [16]:
# Compiling the network
model.compile(optimizer = 'rmsprop',
loss = 'categorical_crossentropy',
metrics = ['accuracy'])
In [17]:
# Training
model.fit(train_images,
train_labels,
epochs = 5,
batch_size = 64)
Out[17]:
In [18]:
test_loss, test_acc = model.evaluate(test_images, test_labels)
In [19]:
test_acc
Out[19]:
The network improves the prediction (comparing to previous architectures) despite that it was trained for only 5 epochs.