In [ ]:
# imports and logger!
from opendeep.log import config_root_logger
from opendeep.models import GSN
from opendeep.optimization import SGD
from opendeep.data import MNIST
from opendeep.utils.misc import closest_to_square_factors
from PIL import Image as pil_img
from opendeep.utils.image import tile_raster_images
import numpy
config_root_logger()
In [ ]:
# A denoising autoencoder (DAE) is a special case of a generative stochastic network (GSN).
# GSN's can have multiple denoising layers that interact with each other both above and below.
dae = GSN(
input_size=28*28,
hidden_size=1000,
visible_activation='sigmoid',
hidden_activation='tanh',
layers=1,
walkbacks=3,
input_noise='salt_and_pepper',
input_noise_level=0.3
)
In [ ]:
# Create the MNIST data object
mnist = MNIST(concat_train_valid=True)
# Create the optimizer object
optimizer = SGD(dataset=mnist,
epochs=40,
batch_size=100,
learning_rate=.25,
lr_decay='exponential',
lr_decay_factor=.995,
momentum=.5,
nesterov_momentum=False)
In [ ]:
# Train the model with the optimizer on the mnist dataset!
dae.train(optimizer)
In [ ]:
# Run some numbers to see the output
n_examples = 100
xs_test = mnist.test_inputs[:n_examples]
noisy_xs_test = dae.f_noise(xs_test)
reconstructed = dae.run(noisy_xs_test)
# Concatenate stuff
stacked = numpy.vstack(
[numpy.vstack([xs_test[i * 10: (i + 1) * 10],
noisy_xs_test[i * 10: (i + 1) * 10],
reconstructed[i * 10: (i + 1) * 10]])
for i in range(10)])
number_reconstruction = pil_img.fromarray(
tile_raster_images(stacked, (dae.image_height, dae.image_width), (10, 30), (1, 1))
)
number_reconstruction.save("dae_opendeep_test.png")
# Construct image from the weight matrix
image = pil_img.fromarray(
tile_raster_images(
X=dae.weights_list[0].get_value(borrow=True).T,
img_shape=(28, 28),
tile_shape=closest_to_square_factors(dae.layer_sizes[1]),
tile_spacing=(1, 1)
)
)
image.save("dae_opendeep_filters.png")
print "Done!"
In [ ]: