In [1]:
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm

from keras.layers import Input, Dense, Lambda, Layer
from keras.models import Model
from keras import backend as K
from keras import metrics
from keras.datasets import cifar10

batch_size = 10000
original_dim = 3072
latent_dim = 2
intermediate_dim = 1024
epochs = 50
epsilon_std = 1.0


x = Input(shape=(original_dim,))
h = Dense(intermediate_dim, activation='relu')(x)
z_mean = Dense(latent_dim)(h)
z_log_var = Dense(latent_dim)(h)


def sampling(args):
    z_mean, z_log_var = args
    epsilon = K.random_normal(shape=(K.shape(z_mean)[0], latent_dim), mean=0.,
                              stddev=epsilon_std)
    return z_mean + K.exp(z_log_var / 2) * epsilon

# note that "output_shape" isn't necessary with the TensorFlow backend
z = Lambda(sampling, output_shape=(latent_dim,))([z_mean, z_log_var])

# we instantiate these layers separately so as to reuse them later
decoder_h = Dense(intermediate_dim, activation='relu')
decoder_mean = Dense(original_dim, activation='sigmoid')
h_decoded = decoder_h(z)
x_decoded_mean = decoder_mean(h_decoded)


# Custom loss layer
class CustomVariationalLayer(Layer):
    def __init__(self, **kwargs):
        self.is_placeholder = True
        super(CustomVariationalLayer, self).__init__(**kwargs)

    def vae_loss(self, x, x_decoded_mean):
        xent_loss = original_dim * metrics.binary_crossentropy(x, x_decoded_mean)
        kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
        return K.mean(xent_loss + kl_loss)

    def call(self, inputs):
        x = inputs[0]
        x_decoded_mean = inputs[1]
        loss = self.vae_loss(x, x_decoded_mean)
        self.add_loss(loss, inputs=inputs)
        # We won't actually use the output.
        return x

y = CustomVariationalLayer()([x, x_decoded_mean])
vae = Model(x, y)
vae.compile(optimizer='rmsprop', loss=None)


# train the VAE on cifar10 images
(x_train, y_train), (x_test, y_test) = cifar10.load_data()

x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))

vae.fit(x_train,
        shuffle=True,
        epochs=epochs,
        batch_size=batch_size,
        validation_data=(x_test, None))

# build a model to project inputs on the latent space
encoder = Model(x, z_mean)

# display a 2D plot of the 10 classes in the latent space
x_test_encoded = encoder.predict(x_test, batch_size=batch_size)
plt.figure(figsize=(6, 6))
plt.scatter(x_test_encoded[:, 0], x_test_encoded[:, 1], c=y_test)
plt.colorbar()
plt.show()

# build a generator that can sample from the learned distribution
decoder_input = Input(shape=(latent_dim,))
_h_decoded = decoder_h(decoder_input)
_x_decoded_mean = decoder_mean(_h_decoded)
generator = Model(decoder_input, _x_decoded_mean)

# display a 2D manifold
n = 15  # figure with 15x15
img_size = 32
figure = np.zeros((img_size * n, img_size * n))
# linearly spaced coordinates on the unit square were transformed through the inverse CDF (ppf) of the Gaussian
# to produce values of the latent variables z, since the prior of the latent space is Gaussian
grid_x = norm.ppf(np.linspace(0.05, 0.95, n))
grid_y = norm.ppf(np.linspace(0.05, 0.95, n))


Using TensorFlow backend.
C:\Users\Tejas\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py:62: UserWarning: Output "custom_variational_layer_1" missing from loss dictionary. We assume this was done on purpose, and we will not be expecting any data to be passed to "custom_variational_layer_1" during training.
Train on 50000 samples, validate on 10000 samples
Epoch 1/50
50000/50000 [==============================] - 63s - loss: 3974.7093 - val_loss: 2144.8538
Epoch 2/50
50000/50000 [==============================] - 61s - loss: 2148.0521 - val_loss: 2139.4617
Epoch 3/50
50000/50000 [==============================] - 61s - loss: 2123.7954 - val_loss: 2174.3137
Epoch 4/50
50000/50000 [==============================] - 62s - loss: 2143.6012 - val_loss: 2101.9033
Epoch 5/50
50000/50000 [==============================] - 61s - loss: 2090.3918 - val_loss: 2073.8547
Epoch 6/50
50000/50000 [==============================] - 59s - loss: 2193.2839 - val_loss: 2085.0769
Epoch 7/50
50000/50000 [==============================] - 56s - loss: 2092.1875 - val_loss: 2094.6724
Epoch 8/50
50000/50000 [==============================] - 69s - loss: 2086.9613 - val_loss: 2075.7393
Epoch 9/50
50000/50000 [==============================] - 72s - loss: 2087.1264 - val_loss: 2064.2078
Epoch 10/50
50000/50000 [==============================] - 69s - loss: 2100.6018 - val_loss: 2072.0889
Epoch 11/50
50000/50000 [==============================] - 54s - loss: 2085.7930 - val_loss: 2064.1113
Epoch 12/50
50000/50000 [==============================] - 69s - loss: 2086.9650 - val_loss: 2070.0732
Epoch 13/50
50000/50000 [==============================] - 51s - loss: 2079.4016 - val_loss: 2055.5593
Epoch 14/50
50000/50000 [==============================] - 54s - loss: 2068.8963 - val_loss: 2091.3955
Epoch 15/50
50000/50000 [==============================] - 56s - loss: 2073.7952 - val_loss: 2067.0669
Epoch 16/50
50000/50000 [==============================] - 59s - loss: 2076.2647 - val_loss: 2109.4629
Epoch 17/50
50000/50000 [==============================] - 54s - loss: 2087.4368 - val_loss: 2070.3955
Epoch 18/50
50000/50000 [==============================] - 57s - loss: 2058.7924 - val_loss: 2097.4165
Epoch 19/50
50000/50000 [==============================] - 56s - loss: 2073.5842 - val_loss: 2053.0764
Epoch 20/50
50000/50000 [==============================] - 55s - loss: 2077.3159 - val_loss: 2053.3333
Epoch 21/50
50000/50000 [==============================] - 55s - loss: 2065.8666 - val_loss: 2044.5048
Epoch 22/50
50000/50000 [==============================] - 58s - loss: 2061.5883 - val_loss: 2048.3730
Epoch 23/50
50000/50000 [==============================] - 57s - loss: 2051.6351 - val_loss: 2084.1123
Epoch 24/50
50000/50000 [==============================] - 60s - loss: 2077.8114 - val_loss: 2067.5015
Epoch 25/50
50000/50000 [==============================] - 58s - loss: 2056.1819 - val_loss: 2059.5132
Epoch 26/50
50000/50000 [==============================] - 57s - loss: 2036.1025 - val_loss: 2051.8533
Epoch 27/50
50000/50000 [==============================] - 55s - loss: 2065.8786 - val_loss: 2041.1178
Epoch 28/50
50000/50000 [==============================] - 56s - loss: 2040.7001 - val_loss: 2048.2126
Epoch 29/50
50000/50000 [==============================] - 58s - loss: 2046.9919 - val_loss: 2030.1764
Epoch 30/50
50000/50000 [==============================] - 59s - loss: 2035.4444 - val_loss: 2041.8000
Epoch 31/50
50000/50000 [==============================] - 60s - loss: 2034.7125 - val_loss: 2051.2319
Epoch 32/50
50000/50000 [==============================] - 59s - loss: 2034.9371 - val_loss: 2046.2388
Epoch 33/50
50000/50000 [==============================] - 60s - loss: 2034.6800 - val_loss: 2052.5581
Epoch 34/50
50000/50000 [==============================] - 60s - loss: 2037.8751 - val_loss: 2011.7676
Epoch 35/50
50000/50000 [==============================] - 58s - loss: 2033.8749 - val_loss: 2015.3127
Epoch 36/50
50000/50000 [==============================] - 59s - loss: 2054.6790 - val_loss: 2020.8348
Epoch 37/50
50000/50000 [==============================] - 58s - loss: 2018.4781 - val_loss: 2041.1860
Epoch 38/50
50000/50000 [==============================] - 59s - loss: 2021.7364 - val_loss: 2022.5480
Epoch 39/50
50000/50000 [==============================] - 69s - loss: 2026.3729 - val_loss: 2034.2290
Epoch 40/50
50000/50000 [==============================] - 63s - loss: 2025.0910 - val_loss: 2036.3732
Epoch 41/50
50000/50000 [==============================] - 65s - loss: 2020.6958 - val_loss: 2032.9189
Epoch 42/50
50000/50000 [==============================] - 67s - loss: 2025.4242 - val_loss: 2049.5176
Epoch 43/50
50000/50000 [==============================] - 71s - loss: 2030.7198 - val_loss: 2023.4816
Epoch 44/50
50000/50000 [==============================] - 70s - loss: 2029.3148 - val_loss: 2044.8036
Epoch 45/50
50000/50000 [==============================] - 62s - loss: 2022.1904 - val_loss: 2016.2356
Epoch 46/50
50000/50000 [==============================] - 62s - loss: 2013.2806 - val_loss: 2013.2356
Epoch 47/50
50000/50000 [==============================] - 65s - loss: 2015.6434 - val_loss: 2006.2128
Epoch 48/50
50000/50000 [==============================] - 61s - loss: 2009.0099 - val_loss: 2009.9510
Epoch 49/50
50000/50000 [==============================] - 60s - loss: 2021.7947 - val_loss: 2017.2244
Epoch 50/50
50000/50000 [==============================] - 60s - loss: 2010.7574 - val_loss: 2001.8315
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-1-3bf13f81c5f6> in <module>()
    106         z_sample = np.array([[xi, yi]])
    107         x_decoded = generator.predict(z_sample)
--> 108         digit = x_decoded[0].reshape(digit_size, digit_size)
    109         figure[i * digit_size: (i + 1) * digit_size,
    110                j * digit_size: (j + 1) * digit_size] = digit

ValueError: cannot reshape array of size 3072 into shape (32,32)

In [5]:
digit = x_decoded[0].reshape(img_size, img_size,3)

In [7]:
digit.shape


Out[7]:
(32, 32, 3)

In [11]:
from matplotlib import pyplot as plt
from matplotlib import axes

fig = plt.figure(figsize = (5,5))
ax4 = fig.add_subplot(111)
ax4.imshow((digit), cmap = 'ocean')
#plt.plot([0,250],[137,137],'r-')

plt.show()



In [ ]: