Convolutional autoencoder

Since our inputs are images, it makes sense to use convolutional neural networks (convnets) as encoders and decoders. In practical settings, autoencoders applied to images are always convolutional autoencoders --they simply perform much better.

Let's implement one. The encoder will consist in a stack of Conv2D and MaxPooling2D layers (max pooling being used for spatial down-sampling), while the decoder will consist in a stack of Conv2D and UpSampling2D layers.


In [6]:
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K
import numpy as np

In [23]:
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K

input_img = Input(shape=(32, 32, 3))  # adapt this if using `channels_first` image data format

x1 = Conv2D(16, (3, 3), activation='relu', padding='same')(input_img)
x2 = MaxPooling2D((2, 2), padding='same')(x1)
x3 = Conv2D(8, (6, 6), activation='relu', padding='same')(x2)
x4 = MaxPooling2D((2, 2), padding='same')(x3)
x5 = Conv2D(8, (9, 9), activation='relu', padding='same')(x4)
encoded = MaxPooling2D((2, 2), padding='same')(x5)

# at this point the representation is (4, 4, 8) i.e. 128-dimensional

x6 = Conv2D(8, (9, 9), activation='relu', padding='same')(encoded)
x7 = UpSampling2D((2, 2))(x6)
x8 = Conv2D(8, (6, 6), activation='relu', padding='same')(x7)
x9 = UpSampling2D((2, 2))(x8)
x10 = Conv2D(16, (3, 3), activation='relu', padding='same')(x9)
x11 = UpSampling2D((2, 2))(x10)
decoded = Conv2D(3, (3, 3), activation='sigmoid', padding='same')(x11)

autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='adagrad', loss='binary_crossentropy')

In [24]:
from keras.datasets import cifar10
import numpy as np

(x_train, _), (x_test, _) = cifar10.load_data()

x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = np.reshape(x_train, (len(x_train), 32, 32, 3))  # adapt this if using `channels_first` image data format
x_test = np.reshape(x_test, (len(x_test), 32, 32, 3))  # adapt this if using `channels_first` image data format

In [25]:
autoencoder.fit(x_train, x_train,
                epochs=50,
                batch_size=128,
                shuffle=True,
                validation_data=(x_test, x_test))


Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 148s - loss: 0.6373 - val_loss: 0.6144
Epoch 2/50
50000/50000 [==============================] - 148s - loss: 0.6095 - val_loss: 0.6055
Epoch 3/50
50000/50000 [==============================] - 149s - loss: 0.6035 - val_loss: 0.6015
Epoch 4/50
50000/50000 [==============================] - 149s - loss: 0.5995 - val_loss: 0.5982
Epoch 5/50
50000/50000 [==============================] - 148s - loss: 0.5964 - val_loss: 0.5952
Epoch 6/50
50000/50000 [==============================] - 149s - loss: 0.5939 - val_loss: 0.5937
Epoch 7/50
50000/50000 [==============================] - 147s - loss: 0.5918 - val_loss: 0.5911
Epoch 8/50
50000/50000 [==============================] - 147s - loss: 0.5904 - val_loss: 0.5906
Epoch 9/50
50000/50000 [==============================] - 149s - loss: 0.5894 - val_loss: 0.5897
Epoch 10/50
50000/50000 [==============================] - 147s - loss: 0.5887 - val_loss: 0.5891
Epoch 11/50
50000/50000 [==============================] - 148s - loss: 0.5879 - val_loss: 0.5890
Epoch 12/50
50000/50000 [==============================] - 149s - loss: 0.5873 - val_loss: 0.5877
Epoch 13/50
50000/50000 [==============================] - 148s - loss: 0.5867 - val_loss: 0.5870
Epoch 14/50
50000/50000 [==============================] - 148s - loss: 0.5862 - val_loss: 0.5868
Epoch 15/50
50000/50000 [==============================] - 148s - loss: 0.5857 - val_loss: 0.5858
Epoch 16/50
50000/50000 [==============================] - 149s - loss: 0.5852 - val_loss: 0.5861
Epoch 17/50
50000/50000 [==============================] - 148s - loss: 0.5849 - val_loss: 0.5859
Epoch 18/50
50000/50000 [==============================] - 149s - loss: 0.5845 - val_loss: 0.5848
Epoch 19/50
50000/50000 [==============================] - 150s - loss: 0.5842 - val_loss: 0.5846
Epoch 20/50
50000/50000 [==============================] - 149s - loss: 0.5838 - val_loss: 0.5857
Epoch 21/50
50000/50000 [==============================] - 148s - loss: 0.5836 - val_loss: 0.5838
Epoch 22/50
50000/50000 [==============================] - 148s - loss: 0.5832 - val_loss: 0.5845
Epoch 23/50
50000/50000 [==============================] - 149s - loss: 0.5830 - val_loss: 0.5834
Epoch 24/50
50000/50000 [==============================] - 149s - loss: 0.5827 - val_loss: 0.5833
Epoch 25/50
50000/50000 [==============================] - 149s - loss: 0.5824 - val_loss: 0.5830
Epoch 26/50
50000/50000 [==============================] - 149s - loss: 0.5822 - val_loss: 0.5832
Epoch 27/50
50000/50000 [==============================] - 148s - loss: 0.5819 - val_loss: 0.5824
Epoch 28/50
50000/50000 [==============================] - 149s - loss: 0.5818 - val_loss: 0.5821
Epoch 29/50
50000/50000 [==============================] - 148s - loss: 0.5816 - val_loss: 0.5834
Epoch 30/50
50000/50000 [==============================] - 148s - loss: 0.5814 - val_loss: 0.5818
Epoch 31/50
50000/50000 [==============================] - 149s - loss: 0.5813 - val_loss: 0.5820
Epoch 32/50
50000/50000 [==============================] - 148s - loss: 0.5811 - val_loss: 0.5821
Epoch 33/50
50000/50000 [==============================] - 149s - loss: 0.5809 - val_loss: 0.5813
Epoch 34/50
50000/50000 [==============================] - 148s - loss: 0.5808 - val_loss: 0.5821
Epoch 35/50
50000/50000 [==============================] - 149s - loss: 0.5806 - val_loss: 0.5814
Epoch 36/50
50000/50000 [==============================] - 149s - loss: 0.5805 - val_loss: 0.5811
Epoch 37/50
50000/50000 [==============================] - 149s - loss: 0.5804 - val_loss: 0.5809
Epoch 38/50
50000/50000 [==============================] - 149s - loss: 0.5802 - val_loss: 0.5807
Epoch 39/50
50000/50000 [==============================] - 149s - loss: 0.5802 - val_loss: 0.5810
Epoch 40/50
50000/50000 [==============================] - 149s - loss: 0.5800 - val_loss: 0.5811
Epoch 41/50
50000/50000 [==============================] - 148s - loss: 0.5799 - val_loss: 0.5806
Epoch 42/50
50000/50000 [==============================] - 148s - loss: 0.5799 - val_loss: 0.5810
Epoch 43/50
50000/50000 [==============================] - 148s - loss: 0.5797 - val_loss: 0.5805
Epoch 44/50
50000/50000 [==============================] - 148s - loss: 0.5796 - val_loss: 0.5804
Epoch 45/50
50000/50000 [==============================] - 149s - loss: 0.5796 - val_loss: 0.5807
Epoch 46/50
50000/50000 [==============================] - 150s - loss: 0.5795 - val_loss: 0.5801
Epoch 47/50
50000/50000 [==============================] - 173s - loss: 0.5794 - val_loss: 0.5800
Epoch 48/50
50000/50000 [==============================] - 180s - loss: 0.5793 - val_loss: 0.5804
Epoch 49/50
50000/50000 [==============================] - 181s - loss: 0.5793 - val_loss: 0.5806
Epoch 50/50
50000/50000 [==============================] - 180s - loss: 0.5792 - val_loss: 0.5800
Out[25]:
<keras.callbacks.History at 0x121c5b70>

In [2]:
from keras.models import load_model

#autoencoder.save('cifar10_autoencoders.h5')  # creates a HDF5 file 'my_model.h5'
#del model  # deletes the existing model.


Using TensorFlow backend.

In [3]:
# returns a compiled model
# identical to the previous one
autoencoder = load_model('cifar10_autoencoders.h5')

In [26]:
import matplotlib.pyplot as plt

decoded_imgs = autoencoder.predict(x_test)

n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test[i].reshape(32, 32, 3))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
    
    # display reconstruction
    ax = plt.subplot(2, n, i + n + 1)
    plt.imshow(decoded_imgs[i].reshape(32, 32, 3))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
plt.show()


Plotting the weights from the first layer


In [16]:
import matplotlib.pyplot as plt

n = 8

for i in range(n):
    fig = plt.figure(figsize=(1,1))
    conv_1 = np.asarray(autoencoder.layers[1].get_weights())[0][:,:,0,i]
    ax = fig.add_subplot(111)
    plt.imshow(conv_1.transpose(), cmap = 'gray')
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
    plt.show()



In [22]:
autoencoder.layers[3].get_weights()


Out[22]:
[array([[[[  8.15488771e-03,   6.07780479e-02,   3.52038592e-02, ...,
            -2.57759839e-02,   2.50405464e-02,   1.37642130e-01],
          [ -3.99612933e-02,  -4.31965627e-02,   1.55702168e-02, ...,
            -8.22575167e-02,  -4.86114547e-02,  -1.07376175e-02],
          [  2.30774172e-02,  -7.45867491e-02,   1.08525492e-01, ...,
             1.43861622e-01,  -6.99539408e-02,  -7.41299391e-02],
          ..., 
          [  7.24775046e-02,   8.09480175e-02,   3.54284123e-02, ...,
             5.34150703e-03,  -2.38414649e-02,   1.27396472e-02],
          [ -2.81182081e-02,  -6.24120310e-02,  -6.73905341e-03, ...,
            -4.32974026e-02,   2.54292060e-02,   1.36202216e-01],
          [ -2.24752557e-02,   5.53492494e-02,   5.22947162e-02, ...,
            -1.27160829e-02,   6.90510496e-02,  -5.76434610e-03]],
 
         [[  2.35560397e-03,  -2.85049598e-03,  -4.39689942e-02, ...,
             3.96229811e-02,  -5.18459417e-02,  -1.98029634e-02],
          [  4.41494137e-02,   2.84471195e-02,  -3.18532400e-02, ...,
            -7.00206608e-02,  -8.59402269e-02,  -4.19642627e-02],
          [ -2.08359919e-02,   1.25960514e-01,   1.35708213e-01, ...,
             1.28474548e-01,   4.85068560e-02,  -1.24842830e-01],
          ..., 
          [  1.10193323e-02,   9.11960155e-02,  -2.42395755e-02, ...,
             7.30983764e-02,  -4.56133112e-02,  -8.70422870e-02],
          [ -4.40177619e-02,   4.56810519e-02,  -8.63666181e-03, ...,
             8.98648333e-03,  -1.56854078e-01,  -6.16108961e-02],
          [  9.71770845e-03,   9.55173373e-03,   1.03723682e-01, ...,
             1.99029520e-02,   9.76625085e-02,   2.23657861e-02]],
 
         [[  2.43661441e-02,  -3.17463130e-02,   1.04382243e-02, ...,
             1.61210019e-02,  -8.25976059e-02,   9.17967409e-02],
          [ -9.65607241e-02,   1.78900640e-03,  -5.01459800e-02, ...,
             2.50101257e-02,  -1.12152867e-01,   2.57570613e-02],
          [  3.13362777e-02,   6.66133091e-02,   9.68927443e-02, ...,
             1.86010331e-01,   3.86416614e-02,  -4.07423601e-02],
          ..., 
          [ -5.56126906e-05,   6.33124560e-02,  -2.01575700e-02, ...,
             1.87463332e-02,  -7.48115554e-02,  -7.21468683e-03],
          [ -4.41328995e-02,  -5.79332607e-03,   5.15018292e-02, ...,
            -2.82765925e-02,  -1.21696703e-01,  -4.30125520e-02],
          [  3.73466723e-02,  -6.54583797e-02,   1.15404688e-02, ...,
             9.43128541e-02,   8.45833570e-02,   6.29317537e-02]],
 
         [[ -3.77502739e-02,  -7.01722130e-02,  -5.84836714e-02, ...,
             2.73709036e-02,  -5.31258248e-02,   5.31927086e-02],
          [ -5.59373647e-02,  -5.11206780e-03,   3.37519087e-02, ...,
            -2.45643798e-02,   6.01391271e-02,   1.43368626e-02],
          [  5.79551831e-02,   5.45302816e-02,   9.11809206e-02, ...,
             1.81425631e-01,   1.26154833e-02,   2.66123679e-03],
          ..., 
          [ -2.12858319e-02,   6.40505925e-02,   1.04396589e-01, ...,
             1.43214524e-01,   5.23820408e-02,   6.15856536e-02],
          [  5.47284707e-02,  -2.38099732e-02,  -2.01224033e-02, ...,
            -8.12831894e-02,  -6.62480965e-02,  -1.84862435e-01],
          [ -2.03584004e-02,  -3.72815616e-02,  -3.12591001e-04, ...,
            -2.36115288e-02,   7.04920441e-02,  -4.06279340e-02]],
 
         [[ -2.15785624e-03,   3.36857922e-02,   4.65018675e-02, ...,
            -2.47731097e-02,   2.13127863e-02,  -6.59838365e-03],
          [ -1.13975313e-02,   1.50718319e-04,  -5.76116033e-02, ...,
            -5.47358319e-02,  -7.89858922e-02,   7.31434673e-02],
          [  5.95194586e-02,  -2.77179293e-02,   1.09195877e-02, ...,
             3.25132087e-02,  -2.31842082e-02,  -1.75454274e-01],
          ..., 
          [  8.33434686e-02,  -2.39252076e-02,   6.62853718e-02, ...,
             1.31794605e-02,  -5.14929593e-02,  -1.34523064e-01],
          [  6.70597656e-03,  -2.96540763e-02,  -7.46163726e-02, ...,
            -1.16696861e-02,  -1.03112403e-02,   4.25161123e-02],
          [ -1.04142077e-01,  -1.85377393e-02,   7.08616003e-02, ...,
             8.46524090e-02,   4.51045819e-02,  -1.12381592e-01]],
 
         [[  1.88587271e-02,  -4.94744480e-02,  -5.29769226e-04, ...,
            -3.66450511e-02,  -1.11116230e-01,   9.06324759e-03],
          [ -2.87081990e-02,   6.12530410e-02,  -6.09595291e-02, ...,
            -9.54228733e-03,  -6.96167815e-04,   5.51921874e-02],
          [  6.47173524e-02,   9.35782120e-02,   1.57151241e-02, ...,
             3.40428464e-02,  -1.31126851e-01,  -9.10089463e-02],
          ..., 
          [ -3.91775332e-02,  -8.89271125e-02,   3.58739495e-02, ...,
             6.52120542e-03,  -1.53603479e-02,  -1.10902034e-01],
          [ -3.58873489e-03,   3.64888385e-02,   5.49892634e-02, ...,
             6.08769618e-03,   4.23621573e-02,   1.52414158e-01],
          [ -6.45089969e-02,   4.93879914e-02,  -7.26864533e-03, ...,
            -4.10980079e-03,   2.91536804e-02,  -7.60001317e-02]]],
 
 
        [[[  1.00557342e-01,  -2.71129664e-02,  -5.28943911e-02, ...,
            -1.40729388e-02,   1.25879077e-02,   6.41918331e-02],
          [ -8.03123415e-02,  -2.09099483e-02,  -7.26153553e-02, ...,
            -1.18201144e-01,  -4.32270765e-02,  -8.17033052e-02],
          [ -5.04983962e-02,  -1.37400091e-01,  -6.03797697e-02, ...,
             9.21723247e-02,   5.36192581e-02,   1.15747517e-03],
          ..., 
          [  5.67950495e-02,  -1.94253977e-02,  -2.52444856e-02, ...,
             1.18387900e-02,  -5.71467634e-03,  -3.42605775e-03],
          [ -3.25414836e-02,   7.12487102e-02,   4.15924489e-02, ...,
             1.54236713e-02,  -2.84408834e-02,   3.32619585e-02],
          [ -7.52423555e-02,  -3.19194458e-02,   5.57624437e-02, ...,
             1.13002189e-01,   1.59068033e-01,  -3.27825081e-03]],
 
         [[ -1.98856108e-02,  -4.74649370e-02,  -4.60873246e-02, ...,
             8.42018984e-03,   4.72627720e-03,   6.90516531e-02],
          [ -1.77549701e-02,  -1.60417929e-01,  -1.34817332e-01, ...,
            -2.22158618e-02,  -1.23625092e-01,  -5.52937668e-03],
          [ -2.14281660e-02,   5.20871617e-02,   3.18478122e-02, ...,
             2.36526310e-01,   1.04377568e-01,  -6.98784813e-02],
          ..., 
          [  1.06353596e-01,   1.73708603e-01,  -3.85744944e-02, ...,
             1.22929543e-01,   1.86013766e-02,  -1.78970248e-02],
          [ -1.39227184e-02,   1.66608673e-02,  -3.32555287e-02, ...,
            -4.10671048e-02,  -1.32577315e-01,   1.83035638e-02],
          [ -9.45393369e-02,  -9.74400491e-02,   3.55494469e-02, ...,
             4.75995578e-02,   9.21674743e-02,  -4.79348451e-02]],
 
         [[  2.57226564e-02,  -6.67381957e-02,  -2.74203252e-02, ...,
            -7.48067051e-02,   5.16217090e-02,   1.09181486e-01],
          [  9.69948806e-03,  -9.13356990e-02,  -2.19279397e-02, ...,
            -3.56516726e-02,  -1.16810121e-01,  -5.72131090e-02],
          [  2.33353134e-02,  -5.50339855e-02,  -1.71788391e-02, ...,
             1.09318726e-01,   5.18999696e-02,  -9.34572741e-02],
          ..., 
          [  1.09444655e-01,   1.58705432e-02,   3.15426327e-02, ...,
             9.18552354e-02,  -2.35716859e-03,   3.48371305e-02],
          [ -3.95224290e-03,  -4.18616198e-02,  -1.62127092e-02, ...,
            -3.32983918e-02,  -7.91325346e-02,   6.65472075e-02],
          [ -1.09098265e-02,  -9.12308246e-02,   1.03960820e-01, ...,
             9.72316861e-02,   1.75331861e-01,   8.19618702e-02]],
 
         [[ -7.81249404e-02,   2.07060315e-02,   2.19846773e-03, ...,
            -1.04751572e-01,   7.38447951e-03,  -2.43146904e-02],
          [  6.89462870e-02,  -1.04056284e-01,  -9.25099477e-02, ...,
            -2.23668348e-02,  -7.35798255e-02,   6.56073987e-02],
          [ -1.21484742e-01,   1.60056222e-02,   4.35819402e-02, ...,
             1.16918705e-01,   9.09676254e-02,  -1.32970616e-01],
          ..., 
          [  1.27910860e-02,   1.82573110e-01,   1.52800595e-02, ...,
             9.70081240e-02,   2.58851610e-03,   3.74579430e-02],
          [  6.74975887e-02,   6.07646890e-02,  -2.23683622e-02, ...,
            -4.13589589e-02,  -3.58287944e-03,  -1.21026270e-01],
          [ -8.42484683e-02,  -8.37746635e-02,   2.44115479e-02, ...,
             3.83624695e-02,   2.38783211e-02,   9.63523611e-03]],
 
         [[ -4.34817262e-02,  -1.34269651e-02,  -6.49513751e-02, ...,
            -1.94570869e-02,  -4.63641109e-03,  -3.69829088e-02],
          [  7.38101080e-02,  -1.35274306e-01,  -3.66121307e-02, ...,
            -1.00958273e-01,  -1.95575561e-02,   2.09086519e-02],
          [  9.48411226e-03,   1.10780254e-01,  -9.21369810e-03, ...,
             1.54104710e-01,  -2.96171140e-02,  -9.03490260e-02],
          ..., 
          [  4.55689989e-02,   6.15534410e-02,  -3.72874327e-02, ...,
             1.05997346e-01,   4.17429842e-02,  -1.30916998e-01],
          [  7.37766996e-02,   2.07799207e-02,   4.91999201e-02, ...,
            -2.08070781e-02,   4.45860578e-03,  -1.87225584e-02],
          [ -5.58097064e-02,   2.91696806e-02,   9.32755768e-02, ...,
             1.07358083e-01,   6.41658232e-02,  -1.21161580e-01]],
 
         [[ -6.88453903e-03,   2.43811477e-02,   2.88314968e-02, ...,
            -1.05299447e-02,   4.79967222e-02,   3.29353958e-02],
          [  2.55747046e-02,   7.63856620e-02,   2.90590664e-03, ...,
             4.23175693e-02,  -1.44153098e-02,   2.53778528e-02],
          [  3.60132894e-04,  -7.72211403e-02,   1.23625563e-03, ...,
             1.43027641e-02,   2.16762582e-03,  -1.78529307e-01],
          ..., 
          [  8.71259496e-02,   1.03167295e-02,   2.72142068e-02, ...,
             7.68349618e-02,   4.68206443e-02,   2.09151977e-03],
          [ -2.62128226e-02,   2.25753952e-02,   2.43017375e-02, ...,
             3.25826518e-02,   3.74532342e-02,   3.86447944e-02],
          [  3.90551165e-02,  -3.52230631e-02,  -7.95772765e-03, ...,
             9.63470489e-02,   1.10747647e-02,  -3.65248583e-02]]],
 
 
        [[[  7.38220811e-02,  -1.04348641e-02,  -3.70120769e-03, ...,
            -4.02443334e-02,  -5.93658648e-02,   5.22378422e-02],
          [ -4.28777980e-03,  -7.73533732e-02,  -7.25436881e-02, ...,
             3.78630590e-03,  -3.48392241e-02,  -7.05557987e-02],
          [ -4.03252095e-02,   4.49285656e-02,   2.79249959e-02, ...,
             7.65607134e-02,   1.55531364e-02,   1.12538159e-01],
          ..., 
          [  7.73540214e-02,   1.46121919e-01,   3.80067602e-02, ...,
            -3.39148194e-02,   1.37618423e-01,  -3.10116373e-02],
          [ -2.97735841e-03,   4.22500409e-02,  -4.39314172e-02, ...,
             4.22640778e-02,   3.47272567e-02,  -1.44610053e-03],
          [  5.25474660e-02,  -7.80408606e-02,  -2.61579566e-02, ...,
             4.69771624e-02,  -1.92886754e-03,   1.66885573e-02]],
 
         [[ -3.81727070e-02,   1.12471148e-01,  -1.84427463e-02, ...,
            -2.98753027e-02,   2.22496837e-02,   2.40008309e-02],
          [ -3.57828811e-02,  -1.44622684e-01,  -5.98841859e-03, ...,
            -5.13469949e-02,  -6.48094565e-02,  -1.20593339e-01],
          [  2.51975339e-02,   3.77182424e-01,   4.37984727e-02, ...,
             6.12337291e-02,   1.27647847e-01,   1.04484670e-01],
          ..., 
          [  7.57888379e-03,   2.50402600e-01,   6.03508279e-02, ...,
            -1.42036220e-02,   8.91295448e-02,  -1.91243906e-02],
          [  1.25446236e-02,   2.64064334e-02,   1.71057805e-02, ...,
            -2.21416112e-02,  -1.10313296e-01,  -1.79377478e-02],
          [ -2.74797976e-02,  -9.89770330e-03,  -1.25115123e-02, ...,
             1.09301753e-01,   6.08573407e-02,  -2.75517423e-02]],
 
         [[  6.25979006e-02,  -8.95343125e-02,  -8.35877657e-02, ...,
            -1.33486345e-01,  -8.92534014e-03,   7.36310557e-02],
          [ -6.21470995e-02,  -4.07660902e-02,   8.57180431e-02, ...,
            -5.82653694e-02,  -5.85885048e-02,  -1.28959924e-01],
          [  4.95768003e-02,   1.32710606e-01,  -8.78357887e-02, ...,
             5.99528365e-02,   7.66934827e-02,   6.91758329e-03],
          ..., 
          [ -6.09158501e-02,   4.90465350e-02,  -4.61659506e-02, ...,
            -4.53452617e-02,   4.81924117e-02,   5.58741279e-02],
          [  1.96587723e-02,  -7.12864846e-02,  -9.53217596e-02, ...,
             9.61192790e-03,  -1.05619922e-01,   5.71334101e-02],
          [ -9.62554961e-02,  -6.62179291e-02,   2.68824622e-02, ...,
             7.70729408e-02,   5.76471239e-02,   1.39062077e-01]],
 
         [[ -9.52606797e-02,  -3.34034525e-02,  -1.57031976e-02, ...,
            -6.95927010e-04,   5.02294861e-02,  -1.01794355e-01],
          [  2.01416723e-02,  -6.36048689e-02,  -1.62611157e-02, ...,
             2.68885191e-03,  -4.79802527e-02,  -9.70436260e-02],
          [  2.12394744e-02,   1.44262120e-01,   2.92877164e-02, ...,
             6.58633709e-02,   2.03250293e-02,   1.15779236e-01],
          ..., 
          [  1.02278262e-01,   1.92665562e-01,   1.20504173e-02, ...,
             9.34756547e-02,   1.32473543e-01,   7.49316290e-02],
          [  5.88040128e-02,  -6.64554089e-02,   6.62606210e-02, ...,
             7.23148957e-02,  -1.05008893e-01,   5.71983233e-02],
          [ -6.53296039e-02,   1.07204542e-01,   6.29804283e-02, ...,
             3.67047749e-02,   8.39167088e-02,  -8.47633369e-03]],
 
         [[  2.56206766e-02,   6.12446433e-03,  -2.97362749e-02, ...,
            -2.43622661e-02,   4.18509319e-02,   4.00221162e-02],
          [ -4.09736671e-03,  -1.63696539e-02,  -4.71577421e-02, ...,
             2.04951260e-02,  -1.73173547e-02,  -1.89826805e-02],
          [  9.56271738e-02,   1.68021038e-01,   4.89185974e-02, ...,
             1.47411585e-01,   3.51788700e-02,   8.04694518e-02],
          ..., 
          [ -8.92448705e-03,   9.18883309e-02,  -7.77343959e-02, ...,
             4.27523591e-02,   3.03159617e-02,  -2.56711822e-02],
          [ -6.50334917e-03,  -6.96459562e-02,   2.82282885e-02, ...,
             2.66418327e-02,  -1.02086384e-02,   6.15069903e-02],
          [ -1.33430406e-01,  -4.01847214e-02,   1.99399125e-02, ...,
             6.72378317e-02,   8.56390372e-02,  -2.87672803e-02]],
 
         [[ -4.29908708e-02,   9.05078277e-02,  -4.94915508e-02, ...,
            -6.90933019e-02,   2.50092838e-02,  -1.63542591e-02],
          [ -3.32709961e-02,  -5.61802536e-02,   2.08179746e-02, ...,
             5.10710403e-02,   1.95197985e-02,   4.88534160e-02],
          [  5.11306487e-02,   1.52355075e-01,  -1.85144786e-02, ...,
            -5.10574225e-03,   6.62518814e-02,   8.51666778e-02],
          ..., 
          [ -3.68064269e-02,   3.07428185e-02,   6.00146875e-02, ...,
            -3.25805955e-02,  -3.30161564e-02,   1.06424421e-01],
          [  7.94030428e-02,  -1.25503801e-02,   2.55726799e-02, ...,
            -4.11240384e-02,   1.95444450e-02,   2.54104808e-02],
          [ -1.35958895e-01,   9.94959474e-02,   6.05127029e-02, ...,
             4.12885025e-02,   7.22690076e-02,   6.20775558e-02]]],
 
 
        [[[  4.87984344e-02,   3.82635603e-03,  -3.02646067e-02, ...,
            -3.01345531e-02,  -7.51951709e-02,  -1.72073096e-02],
          [ -1.29382946e-02,  -1.33416340e-01,   3.85282002e-02, ...,
            -2.41633691e-02,  -1.88254640e-02,  -2.45915391e-02],
          [  6.77572116e-02,   4.28191684e-02,   7.84342363e-03, ...,
             2.74902489e-02,   5.33572137e-02,  -2.07346864e-02],
          ..., 
          [ -9.65531822e-03,  -5.30826300e-02,   2.07172744e-02, ...,
             7.22508803e-02,  -3.09450906e-02,   1.09063037e-01],
          [ -5.26475012e-02,  -2.03607995e-02,   1.08744958e-02, ...,
            -2.22909823e-02,  -2.86886543e-02,   1.20479941e-01],
          [ -8.89099464e-02,   4.07430017e-03,   1.03902720e-01, ...,
            -9.85616893e-02,  -4.61084619e-02,   2.94793863e-02]],
 
         [[ -5.66372387e-02,   3.38524394e-03,  -4.00966033e-02, ...,
            -9.81810912e-02,  -4.79210429e-02,  -4.41820771e-02],
          [ -1.73185077e-02,  -7.17924908e-02,   1.88963730e-02, ...,
            -4.85109463e-02,  -7.72249177e-02,  -8.07375684e-02],
          [  4.17446019e-03,   1.06469490e-01,   5.92258424e-02, ...,
             4.27900208e-03,  -4.92631197e-02,   6.43537641e-02],
          ..., 
          [  1.84694268e-02,   8.09295997e-02,  -3.62539142e-02, ...,
             7.97649622e-02,   7.36091658e-02,   3.25016007e-02],
          [  1.04266955e-02,   7.74856284e-02,   4.42023799e-02, ...,
             6.08078316e-02,  -9.15103182e-02,  -3.09844818e-02],
          [ -1.79493707e-02,   7.35788941e-02,   1.18655764e-01, ...,
             8.88407454e-02,  -1.83795393e-02,   1.23137675e-01]],
 
         [[  5.79181015e-02,  -1.90426931e-02,  -8.26691538e-02, ...,
             2.53518075e-02,  -5.01923375e-02,   7.10078776e-02],
          [ -7.84346461e-02,  -4.90113236e-02,   6.77821562e-02, ...,
            -7.79582411e-02,  -1.12035476e-01,  -8.28219727e-02],
          [  1.07843742e-01,   1.28193662e-01,   8.43294561e-02, ...,
             1.06410272e-01,   2.80122161e-02,  -2.69562546e-02],
          ..., 
          [  1.01791978e-01,   9.99219790e-02,  -3.76271904e-02, ...,
             8.64723772e-02,   5.52418679e-02,   2.28500590e-02],
          [  8.10204148e-02,  -1.54143095e-01,  -9.96950641e-02, ...,
             5.39316982e-02,   3.82038318e-02,   9.37618129e-03],
          [ -8.55061710e-02,   6.30031824e-02,  -2.60896496e-02, ...,
             5.37981912e-02,   4.99940105e-02,   5.72227351e-02]],
 
         [[ -3.53978388e-02,  -5.35722189e-02,  -2.35678768e-03, ...,
            -9.26132724e-02,  -1.32917181e-01,  -9.01023000e-02],
          [  2.98457383e-03,   3.34196258e-03,   9.95259508e-02, ...,
            -7.06269965e-03,  -2.14789081e-02,  -1.95803475e-02],
          [  8.59327242e-02,   1.14443801e-01,   9.64036286e-02, ...,
             2.91637629e-02,  -5.27102761e-02,   9.85997394e-02],
          ..., 
          [  5.77987656e-02,  -2.42412537e-02,  -1.81995879e-03, ...,
            -5.82536943e-02,   5.39437693e-04,   1.01234801e-01],
          [ -6.70049340e-02,  -8.89468417e-02,  -3.79131660e-02, ...,
             2.05968749e-02,   4.02817354e-02,   5.49851507e-02],
          [  1.53711147e-03,   1.05730228e-01,   1.91255175e-02, ...,
             8.97959061e-03,   1.11910813e-02,   1.33069577e-02]],
 
         [[  4.38440181e-02,   1.73315103e-03,   3.51136178e-02, ...,
            -8.37618634e-02,   7.06536788e-03,  -4.31708992e-02],
          [  8.09320286e-02,  -4.38162573e-02,   2.59434860e-02, ...,
            -1.34519329e-02,  -1.55890370e-02,  -6.49991184e-02],
          [  3.83570641e-02,   8.10671374e-02,   1.56622361e-02, ...,
             3.39561626e-02,  -2.63408776e-02,   1.69951953e-02],
          ..., 
          [ -3.63198705e-02,   4.05097120e-02,   9.43309721e-03, ...,
             9.73996297e-02,   6.96874410e-02,  -2.43969690e-02],
          [ -8.97622854e-02,  -2.91408356e-02,  -1.08442716e-01, ...,
             4.26894724e-02,   4.85990234e-02,   4.65343706e-03],
          [ -9.71526206e-02,  -4.21003923e-02,  -6.47213450e-03, ...,
             7.35112745e-03,   3.87683101e-02,  -4.31007519e-02]],
 
         [[ -3.58572677e-02,   5.91715723e-02,  -4.00302373e-02, ...,
             5.33690080e-02,   3.06813475e-02,   2.35909894e-02],
          [ -9.20638517e-02,  -1.05211340e-01,  -3.77955958e-02, ...,
            -2.66206078e-02,  -6.69788523e-03,  -5.47943600e-02],
          [  1.18634067e-01,   1.32305428e-01,   5.19942911e-03, ...,
            -6.10516220e-03,   1.91619352e-03,  -5.05829901e-02],
          ..., 
          [ -1.78921558e-02,   4.49858308e-02,  -5.39338868e-03, ...,
            -1.09130725e-01,  -6.37488440e-02,   7.20574055e-03],
          [  3.67182717e-02,  -4.82475497e-02,   1.73559301e-02, ...,
            -2.10308954e-02,  -3.57839167e-02,   1.20252576e-02],
          [  2.90314816e-02,   1.01917060e-02,  -1.83971375e-02, ...,
            -1.13641657e-02,  -2.54682004e-02,   6.68117031e-02]]],
 
 
        [[[ -8.99978802e-02,  -2.94925719e-02,  -9.42782965e-04, ...,
             2.40799543e-02,  -5.88887110e-02,   2.69635692e-02],
          [ -1.60340092e-03,  -4.38594967e-02,  -6.14226237e-02, ...,
            -4.20257114e-02,  -3.19956280e-02,  -8.71935561e-02],
          [  4.58673798e-02,  -7.94417039e-02,   2.13008188e-02, ...,
             4.86062728e-02,   1.44674780e-03,  -3.11743282e-03],
          ..., 
          [  2.57989727e-02,   1.98712777e-02,   1.09319068e-01, ...,
             3.84522229e-02,  -8.79694670e-02,   6.44393265e-02],
          [  5.01418067e-03,   7.85017386e-03,  -4.24489379e-02, ...,
            -6.72300905e-02,  -3.08005209e-03,   1.39766529e-01],
          [  3.24308500e-02,   3.72905992e-02,  -2.95851212e-02, ...,
            -4.35414538e-03,   8.49785190e-03,  -3.24718319e-02]],
 
         [[  1.38068935e-02,   1.67125631e-02,   4.12911735e-02, ...,
             2.52964869e-02,  -1.46642745e-01,  -4.63260449e-02],
          [ -6.31405935e-02,  -6.74366429e-02,   4.19389382e-02, ...,
             3.14314552e-02,  -9.52009484e-02,  -1.24035381e-01],
          [  1.26097396e-01,  -9.19949263e-02,   1.44375503e-01, ...,
            -2.31679194e-02,   2.52599129e-03,  -1.45270489e-02],
          ..., 
          [ -2.27691839e-03,  -1.44634480e-02,   1.54149206e-02, ...,
             8.49698260e-02,  -1.94311459e-02,   2.11889092e-02],
          [  8.79088119e-02,  -3.52468081e-02,  -3.02978475e-02, ...,
            -4.53305021e-02,  -4.03332449e-02,  -4.10134867e-02],
          [ -3.72284465e-03,  -8.71449430e-03,   8.16605464e-02, ...,
            -9.18604527e-03,   8.30645636e-02,   3.36735584e-02]],
 
         [[ -1.64409727e-02,  -6.61622286e-02,  -5.81443571e-02, ...,
            -6.68718992e-03,   4.16901745e-02,   9.00167972e-02],
          [  6.12213388e-02,  -1.51829952e-02,   2.78297625e-02, ...,
            -2.05440074e-02,  -1.42934490e-02,  -1.06355056e-01],
          [  3.39766853e-02,   3.30882482e-02,  -2.73365155e-02, ...,
             6.16643988e-02,  -1.28084114e-02,  -1.48612878e-03],
          ..., 
          [ -4.32896614e-03,  -3.10563669e-03,   8.11247097e-04, ...,
            -4.59941141e-02,   3.80932688e-05,   1.06197886e-01],
          [ -2.94177197e-02,  -1.25971004e-01,  -1.04876094e-01, ...,
            -7.69111281e-03,   2.86235251e-02,   5.26867658e-02],
          [ -7.83698112e-02,  -1.69695523e-02,   8.35068151e-02, ...,
            -1.05786724e-02,   4.93102893e-02,   9.57765877e-02]],
 
         [[  3.46232927e-03,   7.80474916e-02,   1.94680449e-02, ...,
            -6.42592609e-02,  -3.32256891e-02,  -5.18384613e-02],
          [ -3.21319848e-02,  -5.63893504e-02,  -6.34378046e-02, ...,
            -7.09324926e-02,   8.84460509e-02,   4.69471626e-02],
          [ -3.37296585e-03,  -7.71213025e-02,   4.95207123e-02, ...,
             1.07580811e-01,  -1.42026665e-02,   1.85216144e-02],
          ..., 
          [ -3.56784149e-04,   8.19529127e-03,   8.90885442e-02, ...,
            -7.00715184e-03,   6.60436749e-02,   2.87629850e-02],
          [  5.19402698e-02,  -1.08111478e-01,  -1.13961056e-01, ...,
             2.66744401e-02,   8.05245936e-02,  -2.40285862e-02],
          [ -6.67934073e-04,  -5.73506989e-02,   1.05042040e-01, ...,
             3.36055248e-03,   4.52501550e-02,   6.41404360e-04]],
 
         [[ -6.96594492e-02,  -1.32638244e-02,  -5.40756918e-02, ...,
            -2.74760388e-02,  -8.98154080e-02,  -2.07541306e-02],
          [ -3.11560314e-02,  -5.51240332e-02,   1.82107482e-02, ...,
             1.46228410e-02,  -7.23394752e-03,  -8.59502852e-02],
          [  2.96264701e-02,  -8.76072049e-02,   2.21866313e-02, ...,
             1.03564737e-02,  -1.37905568e-01,   3.83651964e-02],
          ..., 
          [ -5.44812344e-02,   4.87144664e-02,  -3.25739873e-03, ...,
             8.56959671e-02,  -2.21441370e-02,   2.94217914e-02],
          [ -4.61037904e-02,   6.71050474e-02,  -4.57235463e-02, ...,
            -8.17189738e-02,  -1.56176630e-02,   3.67605798e-02],
          [ -5.05104288e-02,   1.88069381e-02,  -6.60914704e-02, ...,
             8.78059864e-02,  -1.68475695e-02,   2.40638154e-03]],
 
         [[ -4.90406267e-02,  -4.05920185e-02,   1.98162440e-02, ...,
            -3.82538028e-02,  -5.59659675e-02,   7.94997364e-02],
          [ -5.87546937e-02,   2.97586521e-05,   9.25237220e-03, ...,
            -1.44370692e-02,   1.43929841e-02,   5.44852279e-02],
          [  8.25015679e-02,  -4.50904034e-02,   7.50404922e-03, ...,
             8.59871283e-02,  -4.44176793e-02,   5.17368689e-02],
          ..., 
          [ -4.27185409e-02,  -1.18686870e-01,  -2.39925217e-02, ...,
            -3.24152894e-02,   4.00165915e-02,   4.51863781e-02],
          [ -5.10140322e-02,   2.39307582e-02,  -3.69259790e-02, ...,
            -1.29959616e-03,  -9.09777731e-02,   4.88356836e-02],
          [ -1.31681664e-02,  -5.46850301e-02,  -5.89500777e-02, ...,
             7.24122748e-02,  -3.55940424e-02,   8.66353661e-02]]],
 
 
        [[[  3.37732658e-02,   4.70141061e-02,  -2.73390524e-02, ...,
             5.69345690e-02,   6.10719472e-02,   7.68560022e-02],
          [ -1.14177978e-02,  -1.90311596e-02,  -7.45443404e-02, ...,
            -7.30257854e-02,   1.35493996e-02,  -6.40842393e-02],
          [ -5.03449403e-02,  -2.13133898e-02,   7.42788389e-02, ...,
             7.12022558e-02,  -2.28929780e-02,   2.41481364e-02],
          ..., 
          [  7.39556178e-02,  -1.59551781e-02,   9.36700106e-02, ...,
             3.27616893e-02,   1.98335778e-02,   1.76787987e-01],
          [  1.25069758e-02,   4.91577154e-03,   1.06137153e-02, ...,
             3.75298061e-03,  -7.81062394e-02,   9.54881012e-02],
          [ -3.28685045e-02,  -5.08762673e-02,   5.41699752e-02, ...,
            -9.86825302e-02,   5.94573542e-02,   8.96935642e-04]],
 
         [[ -6.18150411e-03,   5.30510172e-02,  -3.33897956e-02, ...,
            -3.12347561e-02,  -1.00131124e-01,  -1.36266008e-01],
          [  1.26323318e-02,  -8.47624149e-04,  -3.29525247e-02, ...,
             4.28067259e-02,  -3.24072852e-03,  -4.84415367e-02],
          [ -3.77446935e-02,  -1.18086869e-02,   4.53118235e-02, ...,
             3.04672997e-02,  -5.49185500e-02,   4.19702865e-02],
          ..., 
          [ -3.67772114e-03,   5.47842458e-02,  -3.79045047e-02, ...,
             7.27431923e-02,  -2.79709250e-02,  -1.08677097e-01],
          [ -4.47631478e-02,  -1.37410313e-02,  -7.24175498e-02, ...,
             1.26788523e-02,  -4.95044664e-02,  -7.02855065e-02],
          [ -4.06055599e-02,  -1.31344013e-02,   7.53779113e-02, ...,
             6.59692734e-02,   1.16551612e-02,   4.23581749e-02]],
 
         [[  6.59258515e-02,   6.45051152e-02,  -1.23667106e-01, ...,
            -1.80889815e-02,   4.77154180e-02,  -6.71910122e-02],
          [  5.17802872e-02,  -7.67704397e-02,  -3.30441967e-02, ...,
            -1.01587828e-02,  -3.20533440e-02,  -5.29243052e-02],
          [ -3.64961592e-03,   9.33625698e-02,   3.20100673e-02, ...,
             7.16386214e-02,   1.13253230e-02,   8.25790614e-02],
          ..., 
          [ -2.60269456e-02,   7.87197426e-02,   1.46252208e-03, ...,
             5.34540601e-02,   3.10727656e-02,   8.08137581e-02],
          [  5.57148270e-02,  -4.73770089e-02,  -1.13500394e-01, ...,
            -6.16307184e-02,   9.66131911e-02,   4.80613373e-02],
          [ -9.22719687e-02,   1.40383681e-02,  -3.24634090e-02, ...,
            -6.20407350e-02,   4.16204073e-02,  -4.93977591e-03]],
 
         [[ -4.17911485e-02,   1.41658587e-02,   2.45441925e-02, ...,
             3.89956608e-02,  -3.02535165e-02,  -9.82441157e-02],
          [  3.82802896e-02,  -1.15586612e-02,  -1.90582220e-02, ...,
            -2.77688019e-02,  -3.01499404e-02,  -7.45150894e-02],
          [ -6.84433281e-02,  -6.71280622e-02,  -2.77484376e-02, ...,
             9.10072401e-02,  -1.54824443e-02,  -9.51138884e-03],
          ..., 
          [ -6.21385910e-02,   1.56319533e-02,   8.63514319e-02, ...,
             3.24561894e-02,   6.52559474e-02,   1.14827760e-01],
          [ -4.80031930e-02,   4.71999906e-02,  -7.94476345e-02, ...,
             5.91021515e-02,   6.27506850e-03,  -2.42961217e-02],
          [ -2.43720300e-02,   3.59190032e-02,   1.04922689e-01, ...,
             1.36359632e-02,   1.65708307e-02,   5.79521060e-02]],
 
         [[ -4.03942093e-02,  -3.77871189e-03,  -2.07064953e-02, ...,
             2.17887331e-02,  -8.81602243e-02,  -1.03900902e-01],
          [ -1.24612134e-02,   6.83446601e-02,   6.68213740e-02, ...,
            -6.80247024e-02,   3.67267728e-02,  -2.34887786e-02],
          [  1.84228662e-02,  -1.72797050e-02,   6.87586963e-02, ...,
             1.09961599e-01,  -8.23691022e-03,  -7.89036322e-03],
          ..., 
          [  1.06461933e-02,  -3.28336284e-02,   5.04519753e-02, ...,
             5.63745424e-02,  -6.77858442e-02,  -7.88537264e-02],
          [ -7.04284385e-03,   6.31514266e-02,  -1.05008706e-02, ...,
            -1.16969302e-01,   9.90040749e-02,  -5.53973354e-02],
          [  3.06298379e-02,  -6.69901818e-02,  -3.57860923e-02, ...,
            -7.93785322e-03,  -8.93748328e-02,   1.29387714e-03]],
 
         [[ -3.93752865e-02,  -9.70603619e-03,   2.24248115e-02, ...,
            -4.15351130e-02,  -4.95579615e-02,  -6.59849495e-02],
          [  1.64657086e-02,   4.40087058e-02,  -1.09629463e-02, ...,
            -4.51412387e-02,  -6.94123805e-02,  -1.48318075e-02],
          [  2.08701491e-02,  -3.85760367e-02,   4.55899164e-02, ...,
            -3.78295197e-03,   5.31342924e-02,   9.89002138e-02],
          ..., 
          [ -8.55726842e-03,  -5.22630960e-02,  -3.86141129e-02, ...,
            -8.16830993e-02,   5.95119372e-02,   3.92528921e-02],
          [ -2.67234035e-02,   7.82927349e-02,   3.57640255e-03, ...,
            -2.48036105e-02,  -4.16145101e-02,   4.75415960e-03],
          [ -5.27525209e-02,   2.00328810e-04,  -3.93935591e-02, ...,
             4.37910147e-02,   3.50354868e-03,  -3.08327992e-02]]]], dtype=float32),
 array([-0.03785726, -0.05633533,  0.02821722, -0.05617409, -0.0169664 ,
         0.12677667,  0.07227429,  0.07783621], dtype=float32)]

In [42]:
from keras import backend as K

In [27]:
# K.learning_phase() is a flag that indicates if the network is in training or
# predict phase. It allow layer (e.g. Dropout) to only be applied during training
inputs = [K.learning_phase()] + autoencoder.inputs

_layer1_f = K.function(inputs, [x2])
def convout1_f(X):
    # The [0] is to disable the training phase flag
    return _layer1_f([0] + [X])

#_lay_f = K.function(inputs, [x1])
#def convout1_f(X):
    # The [0] is to disable the training phase flag
#    return _layer1_f([0] + [X])

_layer2_f = K.function(inputs, [x4])
def convout2_f(X):
    # The [0] is to disable the training phase flag
    return _layer2_f([0] + [X])

_layer3_f = K.function(inputs, [encoded])
def convout3_f(X):
    # The [0] is to disable the training phase flag
    return _layer3_f([0] + [X])

_up_layer1_f = K.function(inputs, [x6])
def convout4_f(X):
    # The [0] is to disable the training phase flag
    return _up_layer1_f([0] + [X])

_up_layer2_f = K.function(inputs, [x8])
def convout5_f(X):
    # The [0] is to disable the training phase flag
    return _up_layer2_f([0] + [X])

_up_layer3_f = K.function(inputs, [x10])
def convout6_f(X):
    # The [0] is to disable the training phase flag
    return _up_layer3_f([0] + [X])

_up_layer4_f = K.function(inputs, [decoded])
def convout7_f(X):
    # The [0] is to disable the training phase flag
    return _up_layer4_f([0] + [X])

In [44]:
x2


Out[44]:
<tf.Tensor 'max_pooling2d_4/MaxPool:0' shape=(?, 16, 16, 16) dtype=float32>

In [45]:
i = 1
x = x_test[i:i+1]

Visualizing the first convnet/output layer_1 with sample first test image


In [46]:
np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0).shape


Out[46]:
(16, 16, 16)

In [81]:
#Plotting conv_1
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()


    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(4, 4, figsize=(5, 5))
        for i in range(4):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k], cmap = 'gray')
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()



In [59]:
check.shape


Out[59]:
(16, 16, 16)

Visualizing the second convnet/output layer_2 with sample test image


In [42]:
i = 3
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout2_f(x)),0),0)
check.shape


Out[42]:
(8, 8, 8)

In [82]:
#Plotting conv_2
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()


    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(2, 4, figsize=(5, 5))
        for i in range(2):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k])
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()


Plotting the third convnet/output layer_3 with sample test image


In [50]:
i = 3
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout3_f(x)),0),0)
check.shape


Out[50]:
(4, 4, 8)

In [83]:
#Plotting conv_3
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()


    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(2, 4, figsize=(5, 5))
        for i in range(2):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k])
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()


Visualizing the fourth convnet/decoded/output layer_4 with sample test image


In [58]:
i = 3
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout4_f(x)),0),0)
check.shape


Out[58]:
(4, 4, 8)

In [84]:
#Plotting conv_4
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()

    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(2, 4, figsize=(5, 5))
        for i in range(2):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k])
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()


Visualizing the fifth convnet/decoded/output layer_5 with sample test image


In [66]:
i = 3
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout5_f(x)),0),0)
check.shape


Out[66]:
(8, 8, 8)

In [85]:
#Plotting conv_5
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()


    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(2, 4, figsize=(5, 5))
        for i in range(2):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k])
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()


Visualizing the sixth convnet/decoded/output layer_6 with sample test image


In [74]:
i = 3
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout6_f(x)),0),0)
check.shape


Out[74]:
(16, 16, 16)

In [86]:
#Plotting conv_6
for i in range(4):
    #i = 3
    x = x_test[i:i+1]
    check = np.squeeze(np.squeeze(np.array(convout1_f(x)),0),0)

    temp = x[0,:,:,:]
    fig, axes = plt.subplots(1, 1, figsize=(3, 3))
    plt.imshow(temp)
    plt.show()


    k = 0
    while k < check.shape[2]:
        #plt.figure()
        #plt.subplot(231 + i)
        fig, axes = plt.subplots(4, 4, figsize=(5, 5))
        for i in range(4):
            for j in range(4):
                axes[i,j].imshow(check[:,:,k])
                k += 1
        #axes[0, 0].imshow(R, cmap='jet')
        #plt.imshow(check[:,:,i])

    plt.show()


Visualizing the final decoded/output layer with sample test image


In [76]:
i = 1
x = x_test[i:i+1]
check = np.squeeze(np.squeeze(np.array(convout7_f(x)),0),0)
check.shape


Out[76]:
(32, 32, 3)

In [80]:
#Plot final decoded layer

decoded_imgs = autoencoder.predict(x_test)

n = 4
plt.figure(figsize=(20, 4))
for i in range(n):
    # display original
    ax = plt.subplot(2, n, i + 1)
    plt.imshow(x_test[i].reshape(32, 32, 3))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)

    # display reconstruction
    ax = plt.subplot(2, n, i + n + 1)
    plt.imshow(decoded_imgs[i].reshape(32, 32, 3))
    plt.gray()
    ax.get_xaxis().set_visible(False)
    ax.get_yaxis().set_visible(False)
    
plt.show()



In [ ]: