In [1]:
from keras.layers import Input, Dense, Dropout, BatchNormalization
from keras.models import Model
from keras.datasets import mnist
from keras.callbacks import EarlyStopping
from keras.models import Sequential, load_model
from keras.optimizers import RMSprop
from keras.callbacks import TensorBoard
from __future__ import print_function
from IPython.display import SVG, Image
from keras import regularizers, Model
from matplotlib import rc

import keras
import matplotlib.pyplot as plt
import numpy as np


Using TensorFlow backend.

In [10]:
%matplotlib inline
font = {'family' : 'monospace',
        'weight' : 'bold',
        'size'   : 20}

rc('font', **font)

In [2]:
num_classes = 10
input_dim = 784
batch_size = 256

(x_train, y_train), (x_val, y_val) = mnist.load_data()

x_train = x_train.astype('float32') / 255.
x_val = x_val.astype('float32') / 255.

# x_train = np.concatenate((x_train, x_val))

x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_val = x_val.reshape((len(x_val), np.prod(x_val.shape[1:])))
# print(x_train.shape)

y_train = keras.utils.to_categorical(y_train, num_classes)
y_val = keras.utils.to_categorical(y_val, num_classes)

Simple feed forward network with one hidden layer


In [14]:
hidden1_dim = 512
hidden2_dim = 512

In [15]:
input_data = Input(shape=(input_dim,), dtype='float32', name='main_input')
x = Dense(hidden1_dim, activation='relu', kernel_initializer='normal')(input_data)
x = Dropout(0.2)(x)
x = Dense(hidden2_dim, activation='relu', kernel_initializer='normal')(x)
x = Dropout(0.2)(x)
output_layer = Dense(num_classes, activation='softmax', kernel_initializer='normal')(x)

model = Model(input_data, output_layer)

model.compile(loss='binary_crossentropy',
              optimizer=RMSprop(),
              metrics=['accuracy'])

In [16]:
model = keras.models.load_model('models/model.h5')
# model.fit(x_train, y_train, 
#           batch_size=batch_size,
#           epochs=20,
#           shuffle=True,
#           verbose=0,
#           validation_split=0.1)
# model.save('models/model.h5')


Out[16]:
<keras.callbacks.History at 0x24bedcb5e80>

In [17]:
score = model.evaluate(x_val, y_val, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])


Test loss: 0.0161738830416
Test accuracy: 0.996779996109

In [18]:
fig = plt.figure(figsize=(20,10))
plt.plot(model.history.history['val_acc'])
plt.plot(model.history.history['acc'])
plt.axhline(y=score[1], c="red")
plt.text(0, score[1], "test: " + str(round(score[1], 4)), fontdict=font)
plt.title('model accuracy for neural net with 2 hidden layers')
plt.ylabel('accuracy')
plt.xlabel('epochs')
plt.legend(['valid', 'train'], loc='lower right')
plt.show()


Deep Autoencoder


In [19]:
encoding_dim1 = 128
encoding_dim2 = 64
encoding_dim3 = 32
decoding_dim1 = 64
decoding_dim2 = 128
decoding_dim3 = input_dim
epochs = 100
batch_size = 256

In [20]:
input_img = Input(shape=(input_dim,))
encoded = Dense(encoding_dim1, activation='relu')(input_img)
encoded = Dense(encoding_dim2, activation='relu')(encoded)
encoded = Dense(encoding_dim3, activation='relu')(encoded)

decoded = Dense(decoding_dim1, activation='relu')(encoded)
decoded = Dense(decoding_dim2, activation='relu')(decoded)
decoded = Dense(decoding_dim3, activation='sigmoid')(decoded)

In [22]:
deep_autoencoder = Model(input_img, decoded)
deep_autoencoder.compile(optimizer=RMSprop(), loss='binary_crossentropy')

In [23]:
deep_autoencoder = keras.models.load_model('models/deep_autoencoder.h5')
# deep_autoencoder.fit(x_train, x_train,
#                 epochs=epochs,
#                 batch_size=batch_size,
#                 shuffle=True,
#                 validation_split=0.1)
# deep_autoencoder.save('models/deep_autoencoder.h5')


Train on 54000 samples, validate on 6000 samples
Epoch 1/100
54000/54000 [==============================] - 8s 144us/step - loss: 0.2342 - val_loss: 0.1833s: 0.23
Epoch 2/100
54000/54000 [==============================] - 7s 125us/step - loss: 0.1696 - val_loss: 0.1593
Epoch 3/100
54000/54000 [==============================] - 7s 134us/step - loss: 0.1526 - val_loss: 0.1440
Epoch 4/100
54000/54000 [==============================] - 7s 126us/step - loss: 0.1425 - val_loss: 0.1369
Epoch 5/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.1350 - val_loss: 0.1304
Epoch 6/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.1298 - val_loss: 0.1256
Epoch 7/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.1258 - val_loss: 0.1230
Epoch 8/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.1226 - val_loss: 0.1201
Epoch 9/100
54000/54000 [==============================] - 6s 119us/step - loss: 0.1199 - val_loss: 0.1182
Epoch 10/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.1174 - val_loss: 0.1196
Epoch 11/100
54000/54000 [==============================] - 7s 123us/step - loss: 0.1152 - val_loss: 0.1141
Epoch 12/100
54000/54000 [==============================] - 7s 123us/step - loss: 0.1134 - val_loss: 0.1135
Epoch 13/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.1119 - val_loss: 0.1114
Epoch 14/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.1106 - val_loss: 0.1123
Epoch 15/100
54000/54000 [==============================] - 7s 121us/step - loss: 0.1094 - val_loss: 0.1095
Epoch 16/100
54000/54000 [==============================] - 6s 119us/step - loss: 0.1083 - val_loss: 0.1081
Epoch 17/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.1074 - val_loss: 0.1093
Epoch 18/100
54000/54000 [==============================] - 7s 121us/step - loss: 0.1064 - val_loss: 0.1048
Epoch 19/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.1055 - val_loss: 0.1073
Epoch 20/100
54000/54000 [==============================] - 7s 120us/step - loss: 0.1048 - val_loss: 0.1052
Epoch 21/100
54000/54000 [==============================] - 7s 125us/step - loss: 0.1040 - val_loss: 0.1045
Epoch 22/100
54000/54000 [==============================] - 7s 125us/step - loss: 0.1033 - val_loss: 0.1040
Epoch 23/100
54000/54000 [==============================] - 7s 130us/step - loss: 0.1027 - val_loss: 0.1025
Epoch 24/100
54000/54000 [==============================] - 7s 128us/step - loss: 0.1021 - val_loss: 0.1022
Epoch 25/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.1016 - val_loss: 0.1014
Epoch 26/100
54000/54000 [==============================] - 7s 123us/step - loss: 0.1010 - val_loss: 0.1000
Epoch 27/100
54000/54000 [==============================] - 7s 120us/step - loss: 0.1007 - val_loss: 0.0997
Epoch 28/100
54000/54000 [==============================] - 7s 121us/step - loss: 0.1002 - val_loss: 0.1019
Epoch 29/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.0998 - val_loss: 0.0999
Epoch 30/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.0995 - val_loss: 0.0984
Epoch 31/100
54000/54000 [==============================] - 6s 117us/step - loss: 0.0991 - val_loss: 0.1023
Epoch 32/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0988 - val_loss: 0.1018
Epoch 33/100
54000/54000 [==============================] - 6s 117us/step - loss: 0.0984 - val_loss: 0.0995
Epoch 34/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0982 - val_loss: 0.0986
Epoch 35/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.0979 - val_loss: 0.1011
Epoch 36/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0976 - val_loss: 0.0979
Epoch 37/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0974 - val_loss: 0.0984
Epoch 38/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0972 - val_loss: 0.0977
Epoch 39/100
54000/54000 [==============================] - 6s 116us/step - loss: 0.0970 - val_loss: 0.0984
Epoch 40/100
54000/54000 [==============================] - 7s 121us/step - loss: 0.0967 - val_loss: 0.0970
Epoch 41/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.0965 - val_loss: 0.0980
Epoch 42/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.0963 - val_loss: 0.0957
Epoch 43/100
54000/54000 [==============================] - 6s 119us/step - loss: 0.0961 - val_loss: 0.0983
Epoch 44/100
54000/54000 [==============================] - 6s 118us/step - loss: 0.0959 - val_loss: 0.0967
Epoch 45/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0958 - val_loss: 0.0980
Epoch 46/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0956 - val_loss: 0.0964
Epoch 47/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0954 - val_loss: 0.0968
Epoch 48/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0953 - val_loss: 0.0984
Epoch 49/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0951 - val_loss: 0.0955
Epoch 50/100
54000/54000 [==============================] - 6s 109us/step - loss: 0.0949 - val_loss: 0.0976
Epoch 51/100
54000/54000 [==============================] - 6s 109us/step - loss: 0.0948 - val_loss: 0.0969
Epoch 52/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0947 - val_loss: 0.0959
Epoch 53/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0944 - val_loss: 0.0943
Epoch 54/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0943 - val_loss: 0.0964
Epoch 55/100
54000/54000 [==============================] - 6s 112us/step - loss: 0.0942 - val_loss: 0.0964
Epoch 56/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0940 - val_loss: 0.0947
Epoch 57/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0939 - val_loss: 0.0952
Epoch 58/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0937 - val_loss: 0.0960
Epoch 59/100
54000/54000 [==============================] - 7s 124us/step - loss: 0.0936 - val_loss: 0.0946
Epoch 60/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.0935 - val_loss: 0.0942
Epoch 61/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0934 - val_loss: 0.0937
Epoch 62/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0932 - val_loss: 0.0950
Epoch 63/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0931 - val_loss: 0.0950
Epoch 64/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0930 - val_loss: 0.0938
Epoch 65/100
54000/54000 [==============================] - 6s 119us/step - loss: 0.0928 - val_loss: 0.0955
Epoch 66/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0927 - val_loss: 0.0941
Epoch 67/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0926 - val_loss: 0.0932
Epoch 68/100
54000/54000 [==============================] - 6s 112us/step - loss: 0.0925 - val_loss: 0.0959
Epoch 69/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0924 - val_loss: 0.0932
Epoch 70/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0924 - val_loss: 0.0929
Epoch 71/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0922 - val_loss: 0.0921
Epoch 72/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0921 - val_loss: 0.0952
Epoch 73/100
54000/54000 [==============================] - 6s 115us/step - loss: 0.0920 - val_loss: 0.0932
Epoch 74/100
54000/54000 [==============================] - 6s 116us/step - loss: 0.0919 - val_loss: 0.0944
Epoch 75/100
54000/54000 [==============================] - 6s 110us/step - loss: 0.0919 - val_loss: 0.0921
Epoch 76/100
54000/54000 [==============================] - 6s 108us/step - loss: 0.0917 - val_loss: 0.0938
Epoch 77/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.0917 - val_loss: 0.0920
Epoch 78/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.0916 - val_loss: 0.0932
Epoch 79/100
54000/54000 [==============================] - 7s 122us/step - loss: 0.0914 - val_loss: 0.0919
Epoch 80/100
54000/54000 [==============================] - 6s 112us/step - loss: 0.0914 - val_loss: 0.0933
Epoch 81/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0913 - val_loss: 0.0944
Epoch 82/100
54000/54000 [==============================] - 6s 113us/step - loss: 0.0913 - val_loss: 0.0917
Epoch 83/100
54000/54000 [==============================] - 6s 114us/step - loss: 0.0911 - val_loss: 0.0935
Epoch 84/100
54000/54000 [==============================] - 7s 131us/step - loss: 0.0911 - val_loss: 0.0930
Epoch 85/100
54000/54000 [==============================] - 7s 132us/step - loss: 0.0911 - val_loss: 0.0938
Epoch 86/100
54000/54000 [==============================] - 6s 120us/step - loss: 0.0909 - val_loss: 0.0916
Epoch 87/100
54000/54000 [==============================] - 6s 111us/step - loss: 0.0909 - val_loss: 0.0937
Epoch 88/100
54000/54000 [==============================] - 7s 126us/step - loss: 0.0908 - val_loss: 0.0919
Epoch 89/100
54000/54000 [==============================] - 7s 128us/step - loss: 0.0907 - val_loss: 0.0899
Epoch 90/100
54000/54000 [==============================] - 6s 119us/step - loss: 0.0906 - val_loss: 0.0922
Epoch 91/100
54000/54000 [==============================] - 6s 117us/step - loss: 0.0906 - val_loss: 0.0944
Epoch 92/100
54000/54000 [==============================] - 6s 117us/step - loss: 0.0906 - val_loss: 0.0930
Epoch 93/100
54000/54000 [==============================] - 7s 123us/step - loss: 0.0905 - val_loss: 0.0909
Epoch 94/100
54000/54000 [==============================] - 7s 124us/step - loss: 0.0904 - val_loss: 0.0918
Epoch 95/100
54000/54000 [==============================] - 7s 124us/step - loss: 0.0903 - val_loss: 0.0922
Epoch 96/100
54000/54000 [==============================] - 7s 121us/step - loss: 0.0902 - val_loss: 0.0921
Epoch 97/100
54000/54000 [==============================] - 7s 130us/step - loss: 0.0902 - val_loss: 0.0923
Epoch 98/100
54000/54000 [==============================] - 7s 128us/step - loss: 0.0902 - val_loss: 0.0903
Epoch 99/100
54000/54000 [==============================] - 7s 125us/step - loss: 0.0901 - val_loss: 0.0920
Epoch 100/100
54000/54000 [==============================] - 7s 127us/step - loss: 0.0900 - val_loss: 0.0916
Out[23]:
<keras.callbacks.History at 0x24bf01072e8>

In [24]:
score = deep_autoencoder.evaluate(x_val, x_val, verbose=0)
print(score)


0.0907726646543

In [26]:
decoded_imgs = deep_autoencoder.predict(x_val)

In [27]:
n = 10  # how many digits we will display
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_val[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + 1 + n)
    plt.imshow(decoded_imgs[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()


Deep Autoencoder Classifier


In [ ]:
dc_encoded = Dense(encoding_dim1, activation='relu')(input_img)
dc_encoded = Dense(encoding_dim2, activation='relu')(dc_encoded)
dc_encoded = Dense(encoding_dim3, activation='relu')(dc_encoded)
dc_class_layer = Dense(num_classes, activation='softmax')(dc_encoded)

dc = Model(inputs=input_img, outputs=dc_class_layer)

dc.layers[1].set_weights(deep_autoencoder.layers[1].get_weights())
dc.layers[2].set_weights(deep_autoencoder.layers[2].get_weights())
dc.layers[3].set_weights(deep_autoencoder.layers[3].get_weights())
dc.compile(loss='binary_crossentropy', optimizer=RMSprop(), metrics=['accuracy'])

In [ ]:
dc = keras.models.load_model('models/cd.h5')
# dc.fit(x_train, y_train
#        , epochs=7
#        , verbose=True
#        , batch_size=batch_size
#        , validation_split=0.1
#        , shuffle=True)
# dc.save('models/dc.h5')

In [36]:
df_score = dc.evaluate(x_val, y_val)
print('Test loss:', df_score[0])
print('Test accuracy:', df_score[1])


Train on 54000 samples, validate on 6000 samples
Epoch 1/20
54000/54000 [==============================] - 3s 52us/step - loss: 0.0900 - acc: 0.9714 - val_loss: 0.0276 - val_acc: 0.9913
Epoch 2/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0257 - acc: 0.9914 - val_loss: 0.0171 - val_acc: 0.9940
Epoch 3/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0180 - acc: 0.9941 - val_loss: 0.0143 - val_acc: 0.9952
Epoch 4/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0140 - acc: 0.9953 - val_loss: 0.0143 - val_acc: 0.9950
Epoch 5/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0116 - acc: 0.9963 - val_loss: 0.0128 - val_acc: 0.9956
Epoch 6/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0095 - acc: 0.9969 - val_loss: 0.0140 - val_acc: 0.9950
Epoch 7/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0079 - acc: 0.9974 - val_loss: 0.0132 - val_acc: 0.9956
Epoch 8/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0067 - acc: 0.9979 - val_loss: 0.0136 - val_acc: 0.9956
Epoch 9/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0055 - acc: 0.9983 - val_loss: 0.0120 - val_acc: 0.9962
Epoch 10/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0047 - acc: 0.9985 - val_loss: 0.0145 - val_acc: 0.9955
Epoch 11/20
54000/54000 [==============================] - 2s 41us/step - loss: 0.0039 - acc: 0.9988 - val_loss: 0.0147 - val_acc: 0.9956
Epoch 12/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0033 - acc: 0.9990 - val_loss: 0.0177 - val_acc: 0.9950
Epoch 13/20
54000/54000 [==============================] - 2s 41us/step - loss: 0.0029 - acc: 0.9991 - val_loss: 0.0138 - val_acc: 0.9962
Epoch 14/20
54000/54000 [==============================] - 2s 41us/step - loss: 0.0023 - acc: 0.9993 - val_loss: 0.0144 - val_acc: 0.9961
Epoch 15/20
54000/54000 [==============================] - 2s 42us/step - loss: 0.0019 - acc: 0.9995 - val_loss: 0.0185 - val_acc: 0.9956
Epoch 16/20
54000/54000 [==============================] - 2s 43us/step - loss: 0.0017 - acc: 0.9995 - val_loss: 0.0171 - val_acc: 0.9960
Epoch 17/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0014 - acc: 0.9996 - val_loss: 0.0162 - val_acc: 0.9958
Epoch 18/20
54000/54000 [==============================] - 2s 41us/step - loss: 0.0013 - acc: 0.9997 - val_loss: 0.0171 - val_acc: 0.9960
Epoch 19/20
54000/54000 [==============================] - 2s 41us/step - loss: 0.0011 - acc: 0.9997 - val_loss: 0.0174 - val_acc: 0.9963
Epoch 20/20
54000/54000 [==============================] - 2s 41us/step - loss: 9.5712e-04 - acc: 0.9997 - val_loss: 0.0188 - val_acc: 0.9959
10000/10000 [==============================] - 1s 53us/step
Test loss: [0.018384219086539498, 0.99570999622344969]
Test accuracy: [0.018384219086539498, 0.99570999622344969]

Stacked Autoencoder

First layer


In [ ]:
encoding_dim1 = 128
epoch1 = 8

input_img = Input(shape=(input_dim,))
encoded1 = Dense(encoding_dim1, activation='relu')(input_img)
decoded1 = Dense(input_dim, activation='relu')(encoded1)
class1 = Dense(num_classes, activation='softmax')(decoded1)

autoencoder1 = Model(input_img, class1)
autoencoder1.compile(optimizer=RMSprop(), loss='binary_crossentropy', metrics=['accuracy'])
encoder1 = Model(input_img, encoded1)
encoder1.compile(optimizer=RMSprop(), loss='binary_crossentropy')

In [40]:
autoencoder1 = keras.models.load_model('models/autoencoder1.h5')
encoder1 = keras.models.load_model('models/encoder1.h5')
# autoencoder1.fit(x_train
#                  , y_train
#                  , epochs=epoch1
#                  , batch_size=batch_size
#                  , shuffle=True
#                  , verbose=False
#                  , validation_split=0.1
#                  )
# autoencoder1.save('models/autoencoder1.h5')
# encoder1.save('models/encoder1.h5')

In [28]:
score1 = autoencoder1.evaluate(x_val, y_val, verbose=0)
print('Test loss:', score1[0])
print('Test accuracy:', score1[1])


Test loss: 0.0133086971708
Test accuracy: 0.996199996948

Second Layer


In [ ]:
first_layer_code = encoder1.predict(x_train)

encoding_dim2 = 64
epoch2 = 5

encoded_2_input = Input(shape=(encoding_dim1,))
encoded2 = Dense(encoding_dim2, activation='relu')(encoded_2_input)
decoded2 = Dense(encoding_dim1, activation='relu')(encoded2)
class2 = Dense(num_classes, activation='softmax')(decoded2)

autoencoder2 = Model(encoded_2_input, class2)
autoencoder2.compile(optimizer=RMSprop(), loss='binary_crossentropy', metrics=['accuracy'])
encoder2 = Model(encoded_2_input, encoded2)
encoder2.compile(optimizer=RMSprop(), loss='binary_crossentropy')

In [41]:
autoencoder2 = keras.models.load_model('models/autoencoder2.h5')
encoder2 = keras.models.load_model('models/encoder2.h5')
# autoencoder2.fit(first_layer_code
#                  , y_train
#                  , epochs=epoch2
#                  , batch_size=batch_size
#                  , shuffle=True
#                  , verbose=False
#                  , validation_split=0.1
#                  )
# autoencoder2.save('models/autoencoder2.h5')
# encoder2.save('models/encoder2.h5')

In [29]:
first_layer_code_val = encoder1.predict(x_val)

score2 = autoencoder2.evaluate(first_layer_code_val, y_val, verbose=0)
print('Test loss:', score2[0])
print('Test accuracy:', score2[1])


Train on 54000 samples, validate on 6000 samples
Epoch 1/5
54000/54000 [==============================] - 1s 27us/step - loss: 0.0437 - acc: 0.9853 - val_loss: 0.0156 - val_acc: 0.9946
Epoch 2/5
54000/54000 [==============================] - 1s 17us/step - loss: 0.0086 - acc: 0.9973 - val_loss: 0.0148 - val_acc: 0.9954
Epoch 3/5
54000/54000 [==============================] - 1s 17us/step - loss: 0.0060 - acc: 0.9981 - val_loss: 0.0141 - val_acc: 0.9958
Epoch 4/5
54000/54000 [==============================] - 1s 18us/step - loss: 0.0049 - acc: 0.9985 - val_loss: 0.0155 - val_acc: 0.9957
Epoch 5/5
54000/54000 [==============================] - 1s 17us/step - loss: 0.0042 - acc: 0.9986 - val_loss: 0.0177 - val_acc: 0.9953
Test loss: 0.0155990094126
Test accuracy: 0.994939997673

Third layer


In [ ]:
second_layer_code = encoder2.predict(encoder1.predict(x_train))

encoding_dim3 = 32
epoch3 = 5

encoded_3_input = Input(shape=(encoding_dim2,))
encoded3 = Dense(encoding_dim3, activation='relu')(encoded_3_input)
decoded3 = Dense(encoding_dim1, activation='relu')(encoded3)
class3 = Dense(num_classes, activation='softmax')(decoded3)

autoencoder3 = Model(encoded_3_input, class3)
autoencoder3.compile(optimizer=RMSprop(), loss='binary_crossentropy', metrics=['accuracy'])
encoder3 = Model(encoded_3_input, encoded3)
encoder3.compile(optimizer=RMSprop(), loss='binary_crossentropy')

In [42]:
autoencoder2 = keras.models.load_model('models/autoencoder2.h5')
encoder2 = keras.models.load_model('models/encoder2.h5')
# autoencoder3.fit(second_layer_code
#                  , y_train
#                  , epochs=epoch3
#                  , batch_size=batch_size
#                  , shuffle=True
#                  , verbose=False
#                  , validation_split=0.1
#                  )
# autoencoder2.save('models/autoencoder2.h5')
# encoder2.save('models/encoder2.h5')

In [30]:
second_layer_code_val = encoder2.predict(encoder1.predict(x_val))

score3 = autoencoder3.evaluate(second_layer_code_val, y_val, verbose=0)
print('Test loss:', score3[0])
print('Test accuracy:', score3[1])


Test loss: 0.0151252853352
Test accuracy: 0.995599996376

Stacked image reconstruction


In [31]:
epoch4 = 10

sae_encoded1 = Dense(encoding_dim1, activation='relu')(input_img)
sae_encoded2 = Dense(encoding_dim2, activation='relu')(sae_encoded1)
sae_encoded3 = Dense(encoding_dim3, activation='relu')(sae_encoded2)
sae_decoded1 = Dense(encoding_dim2, activation='relu')(sae_encoded3)
sae_decoded2 = Dense(encoding_dim1, activation='relu')(sae_decoded1)
sae_decoded3 = Dense(input_dim, activation='sigmoid')(sae_decoded2)

sae = Model(input_img, sae_decoded3)

sae.layers[1].set_weights(autoencoder1.layers[1].get_weights())
sae.layers[2].set_weights(autoencoder2.layers[1].get_weights())
sae.layers[3].set_weights(autoencoder3.layers[1].get_weights())
# sae.layers[4].set_weights(autoencoder3.layers[2].get_weights())
# sae.layers[5].set_weights(autoencoder2.layers[2].get_weights())
# sae.layers[6].set_weights(autoencoder1.layers[2].get_weights())

sae.compile(loss='binary_crossentropy', optimizer=RMSprop())
sae.fit(x_train
        , x_train
        , epochs=epoch4
        , batch_size=batch_size
        , shuffle=True
        , verbose=False
        , validation_split=0.1
        )

score4 = sae.evaluate(x_val, x_val, verbose=0)
print('Test loss:', score4)


Test loss: 0.113078065169

In [32]:
decoded_imgs = sae.predict(x_val)
n = 10  # how many digits we will display
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_val[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + 1 + n)
    plt.imshow(decoded_imgs[i].reshape(28, 28))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()


Classification with stacked autoencoder


In [43]:
input_img = Input(shape=(input_dim,))
sae_classifier_encoded1 = Dense(encoding_dim1, activation='relu')(input_img)
sae_classifier_encoded2 = Dense(encoding_dim2, activation='relu')(sae_classifier_encoded1)
sae_classifier_encoded3 = Dense(encoding_dim3, activation='relu')(sae_classifier_encoded2)
class_layer = Dense(num_classes, activation='softmax')(sae_classifier_encoded3)

sae_classifier = Model(inputs=input_img, outputs=class_layer)

sae_classifier.layers[1].set_weights(autoencoder1.layers[1].get_weights())
sae_classifier.layers[2].set_weights(autoencoder2.layers[1].get_weights())
sae_classifier.layers[3].set_weights(autoencoder3.layers[1].get_weights())
sae_classifier.compile(loss='binary_crossentropy', optimizer=RMSprop(), metrics=['accuracy'])

In [45]:
sae_classifier = keras.models.load_model('models/sae_classifier.h5')
# sae_classifier.fit(x_train, y_train
#                , epochs=7
#                    , verbose=False
#                , batch_size=batch_size
#                , validation_split=0.1
#                , shuffle=True)
# sae_classifier.save('models/sae_classifier.h5')

In [35]:
score5 = classifier.evaluate(x_val, y_val)
print('Test loss:', score5[0])
print('Test accuracy:', score5[1])


Train on 54000 samples, validate on 6000 samples
Epoch 1/20
54000/54000 [==============================] - 3s 49us/step - loss: 0.0377 - acc: 0.9876 - val_loss: 0.0168 - val_acc: 0.9948
Epoch 2/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0116 - acc: 0.9962 - val_loss: 0.0131 - val_acc: 0.9961
Epoch 3/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0080 - acc: 0.9974 - val_loss: 0.0133 - val_acc: 0.9959
Epoch 4/20
54000/54000 [==============================] - 2s 38us/step - loss: 0.0064 - acc: 0.9978 - val_loss: 0.0132 - val_acc: 0.9963
Epoch 5/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0050 - acc: 0.9984 - val_loss: 0.0138 - val_acc: 0.9958
Epoch 6/20
54000/54000 [==============================] - 2s 38us/step - loss: 0.0042 - acc: 0.9986 - val_loss: 0.0136 - val_acc: 0.9962
Epoch 7/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0034 - acc: 0.9989 - val_loss: 0.0133 - val_acc: 0.9961
Epoch 8/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0027 - acc: 0.9991 - val_loss: 0.0137 - val_acc: 0.9964
Epoch 9/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0024 - acc: 0.9993 - val_loss: 0.0156 - val_acc: 0.9962
Epoch 10/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0020 - acc: 0.9994 - val_loss: 0.0152 - val_acc: 0.9963
Epoch 11/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0018 - acc: 0.9994 - val_loss: 0.0178 - val_acc: 0.9957
Epoch 12/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0014 - acc: 0.9995 - val_loss: 0.0202 - val_acc: 0.9958
Epoch 13/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0012 - acc: 0.9997 - val_loss: 0.0172 - val_acc: 0.9965
Epoch 14/20
54000/54000 [==============================] - 2s 39us/step - loss: 0.0012 - acc: 0.9996 - val_loss: 0.0173 - val_acc: 0.9963
Epoch 15/20
54000/54000 [==============================] - 2s 40us/step - loss: 0.0010 - acc: 0.9997 - val_loss: 0.0186 - val_acc: 0.9962
Epoch 16/20
54000/54000 [==============================] - 2s 40us/step - loss: 9.9780e-04 - acc: 0.9997 - val_loss: 0.0189 - val_acc: 0.9961
Epoch 17/20
54000/54000 [==============================] - 2s 41us/step - loss: 8.5736e-04 - acc: 0.9997 - val_loss: 0.0194 - val_acc: 0.9965
Epoch 18/20
54000/54000 [==============================] - 2s 40us/step - loss: 6.7966e-04 - acc: 0.9998 - val_loss: 0.0184 - val_acc: 0.9965
Epoch 19/20
54000/54000 [==============================] - 2s 41us/step - loss: 6.0000e-04 - acc: 0.9998 - val_loss: 0.0202 - val_acc: 0.9963
Epoch 20/20
54000/54000 [==============================] - 2s 40us/step - loss: 5.4855e-04 - acc: 0.9998 - val_loss: 0.0198 - val_acc: 0.9966
10000/10000 [==============================] - 1s 55us/step
Test loss: 0.0217641029964
Test accuracy: 0.995709997559