In [1]:
import os
import scipy.misc
import scipy.io
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
ops.reset_default_graph()

In [2]:
sess = tf.Session()

In [26]:
original_image_file = '/Users/shouzeluo/Downloads/book_cover.jpg'
style_image_file = '/Users/shouzeluo/Downloads/starry_night.jpg'

vgg_path = '/Users/shouzeluo/Downloads/imagenet-vgg-verydeep-19.mat'

original_image_weight = 5.0
style_image_weight = 500.0
regularization_weight = 100
learning_rate = 0.001
generations = 5000
output_generations = 250
beta1 = 0.9   # For the Adam optimizer
beta2 = 0.999 # For the Adam optimizer

In [4]:
original_image = scipy.misc.imread(original_image_file)
style_image = scipy.misc.imread(style_image_file)
style_image = scipy.misc.imresize(style_image, float(original_image.shape[1]) / style_image.shape[1])

In [5]:
# VGG-19 Layer Setup
# From paper
vgg_layers = ['conv1_1', 'relu1_1',
              'conv1_2', 'relu1_2', 'pool1',
              'conv2_1', 'relu2_1',
              'conv2_2', 'relu2_2', 'pool2',
              'conv3_1', 'relu3_1',
              'conv3_2', 'relu3_2',
              'conv3_3', 'relu3_3',
              'conv3_4', 'relu3_4', 'pool3',
              'conv4_1', 'relu4_1',
              'conv4_2', 'relu4_2',
              'conv4_3', 'relu4_3',
              'conv4_4', 'relu4_4', 'pool4',
              'conv5_1', 'relu5_1',
              'conv5_2', 'relu5_2',
              'conv5_3', 'relu5_3',
              'conv5_4', 'relu5_4']

In [6]:
def extract_net_info(path_to_params):
    vgg_data = scipy.io.loadmat(path_to_params)
    normalization_matrix = vgg_data['normalization'][0][0][0]
    mat_mean = np.mean(normalization_matrix, axis=(0,1))
    network_weights = vgg_data['layers'][0]
    return(mat_mean, network_weights)

In [7]:
def vgg_network(network_weights, init_image):
    network = {}
    image = init_image

    for i, layer in enumerate(vgg_layers):
        if layer[0] == 'c':
            weights, bias = network_weights[i][0][0][0][0]
            weights = np.transpose(weights, (1, 0, 2, 3))
            bias = bias.reshape(-1)
            conv_layer = tf.nn.conv2d(image, tf.constant(weights), (1, 1, 1, 1), 'SAME')
            image = tf.nn.bias_add(conv_layer, bias)
        elif layer[0] == 'r':
            image = tf.nn.relu(image)
        else:
            image = tf.nn.max_pool(image, (1, 2, 2, 1), (1, 2, 2, 1), 'SAME')
        network[layer] = image
    return(network)

In [8]:
original_layer = 'relu4_2'
style_layers = ['relu1_1', 'relu2_1', 'relu3_1', 'relu4_1', 'relu5_1']

In [9]:
normalization_mean, network_weights = extract_net_info(vgg_path)

shape = (1,) + original_image.shape
style_shape = (1,) + style_image.shape
original_features = {}
style_features = {}

In [10]:
image = tf.placeholder('float', shape=shape)
vgg_net = vgg_network(network_weights, image)

In [11]:
original_minus_mean = original_image - normalization_mean
original_norm = np.array([original_minus_mean])
original_features[original_layer] = sess.run(vgg_net[original_layer], feed_dict={image: original_norm})

In [12]:
image = tf.placeholder('float', shape=style_shape)
vgg_net = vgg_network(network_weights, image)
style_minus_mean = style_image - normalization_mean
style_norm = np.array([style_minus_mean])

for layer in style_layers:
    layer_output = sess.run(vgg_net[layer], feed_dict={image: style_norm})
    layer_output = np.reshape(layer_output, (-1, layer_output.shape[3]))
    style_gram_matrix = np.matmul(layer_output.T, layer_output) / layer_output.size
    style_features[layer] = style_gram_matrix

In [13]:
initial = tf.random_normal(shape) * 0.256
image = tf.Variable(initial)
vgg_net = vgg_network(network_weights, image)

In [14]:
# Loss
original_loss = original_image_weight * (2 * tf.nn.l2_loss(vgg_net[original_layer] - original_features[original_layer]) /
                original_features[original_layer].size)

# Loss from Style Image
style_loss = 0
style_losses = []
for style_layer in style_layers:
    layer = vgg_net[style_layer]
    feats, height, width, channels = [x.value for x in layer.get_shape()]
    size = height * width * channels
    features = tf.reshape(layer, (-1, channels))
    style_gram_matrix = tf.matmul(tf.transpose(features), features) / size
    style_expected = style_features[style_layer]
    #style_temp_loss = sess.run(2 * tf.nn.l2_loss(style_gram_matrix - style_expected) / style_expected.size)
    #print('Layer: {}, Loss: {}'.format(style_layer, style_temp_loss))
    style_losses.append(2 * tf.nn.l2_loss(style_gram_matrix - style_expected) / style_expected.size)
style_loss += style_image_weight * tf.reduce_sum(style_losses)

# To Smooth the resuts, we add in total variation loss       
total_var_x = sess.run(tf.reduce_prod(image[:,1:,:,:].get_shape()))
total_var_y = sess.run(tf.reduce_prod(image[:,:,1:,:].get_shape()))
first_term = regularization_weight * 2
second_term_numerator = tf.nn.l2_loss(image[:,1:,:,:] - image[:,:shape[1]-1,:,:])
second_term = second_term_numerator / total_var_y
third_term = (tf.nn.l2_loss(image[:,:,1:,:] - image[:,:,:shape[2]-1,:]) / total_var_x)
total_variation_loss = first_term * (second_term + third_term)

# Combined Loss
loss = original_loss + style_loss + total_variation_loss

In [15]:
style_layer = 'relu2_1'
layer = vgg_net[style_layer]
feats, height, width, channels = [x.value for x in layer.get_shape()]
size = height * width * channels
features = tf.reshape(layer, (-1, channels))
style_gram_matrix = tf.matmul(tf.transpose(features), features) / size
style_expected = style_features[style_layer]
style_losses.append(2 * tf.nn.l2_loss(style_gram_matrix - style_expected) / style_expected.size)

In [16]:
# Declare Optimization Algorithm
optimizer = tf.train.AdamOptimizer(learning_rate, beta1, beta2)
train_step = optimizer.minimize(loss)

# Initialize Variables and start Training
sess.run(tf.global_variables_initializer())

In [17]:
for style_layer in style_layers:
    print('-------Layer: {} -------'.format(style_layer))
    layer = vgg_net[style_layer]
    print(sess.run(layer))
    feats, height, width, channels = [x.value for x in layer.get_shape()]
    size = height * width * channels
    print(size)
    print('')


-------Layer: relu1_1 -------
[[[[ 1.06823254  0.11015341  0.16431621 ...,  1.17520785  0.69044644
     0.76868963]
   [ 1.21858847  0.0363241   0.03528029 ...,  1.13480961  0.55015796
     0.58493781]
   [ 0.80623901  0.11544328  0.1386127  ...,  1.21436143  0.7710135
     0.63534391]
   ..., 
   [ 0.9099049   0.18666521  0.         ...,  1.11785877  0.15313232
     0.10561061]
   [ 0.46581382  0.22070536  0.         ...,  1.11768508  0.30681044
     0.01905251]
   [ 0.28489742  0.19358587  0.         ...,  1.09997785  0.44224295
     0.17892423]]

  [[ 1.11893606  0.          0.05088263 ...,  1.14737499  0.52848512
     0.59910756]
   [ 1.14851952  0.          0.         ...,  1.19754612  0.45327955
     0.43737614]
   [ 1.12203133  0.040387    0.09709938 ...,  1.38273478  0.84626943
     0.80368125]
   ..., 
   [ 0.86333048  0.24856645  0.02632043 ...,  1.18711758  0.15307793
     0.08068445]
   [ 0.10731632  0.30550557  0.         ...,  1.1756953   0.42172512
     0.14618507]
   [ 0.31364417  0.28033549  0.03655057 ...,  1.09155715  0.36218742
     0.23817539]]

  [[ 0.48659146  0.          0.         ...,  1.11118162  0.08967164
     0.05007377]
   [ 0.21551651  0.          0.         ...,  1.0626955   0.          0.        ]
   [ 0.32361898  0.          0.         ...,  1.17900825  0.          0.        ]
   ..., 
   [ 0.8326869   0.13696301  0.01081971 ...,  1.11791825  0.53032857
     0.3208307 ]
   [ 0.44713467  0.1711835   0.09100785 ...,  1.1248467   0.8918246
     0.70946503]
   [ 1.02595043  0.22235441  0.15880854 ...,  1.00591815  0.60717428
     0.61357403]]

  ..., 
  [[ 0.53563386  0.072506    0.         ...,  1.05170739  0.31511655
     0.2280218 ]
   [ 0.10847521  0.11497661  0.         ...,  0.9073838   0.2224662   0.        ]
   [ 0.51844209  0.18746287  0.08287978 ...,  0.96761215  0.38579237
     0.38165802]
   ..., 
   [ 0.73459548  0.0245952   0.11511433 ...,  1.01711941  0.64310306
     0.60402852]
   [ 0.98489666  0.          0.03828888 ...,  1.01780927  0.26565832
     0.35455069]
   [ 0.56917274  0.          0.         ...,  0.97259778  0.          0.        ]]

  [[ 0.77985275  0.16170874  0.142619   ...,  1.06674492  0.60680532
     0.56303573]
   [ 0.65494299  0.25802544  0.15821421 ...,  0.94744521  0.69104475
     0.52932042]
   [ 1.26285791  0.30069104  0.3038789  ...,  1.10791349  0.85291749
     1.02107525]
   ..., 
   [ 0.98499632  0.          0.04553895 ...,  0.97694707  0.48263958
     0.45544904]
   [ 1.03551722  0.00558875  0.13908088 ...,  1.24446845  0.6928466
     0.77140445]
   [ 0.89340824  0.          0.         ...,  1.2188549   0.48889375
     0.48959404]]

  [[ 0.82776332  0.1201492   0.0633904  ...,  1.03969634  0.56918442
     0.53981721]
   [ 0.78443527  0.15343496  0.03355509 ...,  0.89745522  0.64597231
     0.47633833]
   [ 1.18537378  0.15255615  0.11937086 ...,  1.06455088  0.61370003
     0.75920665]
   ..., 
   [ 0.86043334  0.05607176  0.03200537 ...,  1.06160688  0.35212165
     0.29328489]
   [ 0.79698455  0.          0.00599258 ...,  1.07841516  0.549465
     0.4675104 ]
   [ 0.8912558   0.          0.         ...,  1.03959334  0.56237054
     0.487809  ]]]]
9555712

-------Layer: relu2_1 -------
[[[[  0.           0.           0.         ...,   4.84566259   0.
     12.49402237]
   [  0.           0.           0.         ...,   5.3123045    0.
     18.50726128]
   [  0.           0.25236556   0.         ...,   8.8816061    0.
     17.33383942]
   ..., 
   [  0.           2.37972236   0.         ...,   5.39513874   0.
     13.73983383]
   [  0.           0.14727315   0.         ...,   6.21737719   0.
     14.73579025]
   [  0.           1.72488904   0.         ...,   3.81897712   0.
     11.08298302]]

  [[  0.           0.           0.         ...,   3.7355361    0.24682124
      3.01860118]
   [  0.           0.           0.         ...,   5.36848116   3.78581047
      3.22222638]
   [  0.           0.           0.10930822 ...,   5.81380939   0.
      3.06669831]
   ..., 
   [  0.           1.82248664   0.         ...,   9.57259464   0.
      0.14355962]
   [  0.           0.           0.         ...,   4.23341131   0.
      3.74216962]
   [  0.           3.06596422   0.         ...,   5.6204071    0.
      3.92507577]]

  [[  0.           0.           1.26102114 ...,   3.12884092   0.
      0.2956478 ]
   [  0.           0.           0.57829905 ...,   2.95822024   0.           0.        ]
   [  0.           0.           0.         ...,   3.77234411   0.           0.        ]
   ..., 
   [  0.           1.11886144   0.12922761 ...,   7.58935642   0.
      1.0112468 ]
   [  0.           0.           0.         ...,   5.76835155   0.
      1.84017289]
   [  0.           3.96817064   0.         ...,   7.09755898   3.92252707
      1.68414366]]

  ..., 
  [[  0.           0.           0.         ...,   1.95069766   0.           0.        ]
   [  0.           0.           0.         ...,   2.62579107   0.
      0.631015  ]
   [  0.           0.5034253    0.09307144 ...,   6.06257772   0.
      4.03381634]
   ..., 
   [  0.           0.15236232   0.         ...,   7.26456213   0.
      0.41215914]
   [  0.           0.78867871   0.         ...,   4.42117739   0.           0.        ]
   [  0.           0.94455427   1.00472069 ...,   2.4496336    0.
      0.79402655]]

  [[  0.           0.           0.         ...,   3.37944603   0.           0.        ]
   [  0.           0.           0.         ...,   6.47639942   4.12387753
      0.        ]
   [  0.           0.           1.99368429 ...,   9.71406937   1.78322732
      0.        ]
   ..., 
   [  0.           0.           0.         ...,   4.17576599   1.23014438
      0.        ]
   [  0.           1.26564753   0.         ...,   6.81092501   0.77571446
      0.        ]
   [  0.           2.5177505    0.         ...,   5.8119626    2.8001883
      0.        ]]

  [[  0.           0.           2.43787313 ...,   6.51941824   0.74310404
      0.        ]
   [  0.           0.88544923   3.92413759 ...,   8.31076527   2.76247168
      0.        ]
   [  0.           0.           4.5048418  ...,   6.47554779   2.48635221
      0.        ]
   ..., 
   [  0.           0.49532017   1.84271336 ...,   5.28180122   3.20004559
      0.        ]
   [  0.           0.68915164   2.26557994 ...,   5.65185165   2.49482727
      0.        ]
   [  0.           2.16225338   2.46268177 ...,   4.07326317   1.51848245
      0.        ]]]]
4777856

-------Layer: relu3_1 -------
[[[[  0.00000000e+00   6.71974421e+00   1.25062630e-01 ...,
      0.00000000e+00   1.98959625e+00   1.26387863e+01]
   [  2.45350337e+00   3.10330796e+00   4.53936195e+00 ...,
      0.00000000e+00   7.50107145e+00   1.01076479e+01]
   [  4.29452181e+00   0.00000000e+00   1.44453096e+00 ...,
      0.00000000e+00   5.81023455e+00   1.13092976e+01]
   ..., 
   [  2.29894423e+00   0.00000000e+00   3.03170830e-01 ...,
      4.11429214e+00   5.85599327e+00   1.23297129e+01]
   [  2.76403117e+00   0.00000000e+00   7.90732265e-01 ...,
      1.01598942e+00   6.60001850e+00   6.45528078e+00]
   [  1.23788559e+00   0.00000000e+00   0.00000000e+00 ...,
      0.00000000e+00   8.71704102e+00   0.00000000e+00]]

  [[  0.00000000e+00   3.80184746e+00   0.00000000e+00 ...,
      7.09051549e-01   0.00000000e+00   1.37725325e+01]
   [  2.42347145e+00   0.00000000e+00   4.17432690e+00 ...,
      2.21850586e+00   0.00000000e+00   8.18712807e+00]
   [  5.01415133e-01   0.00000000e+00   0.00000000e+00 ...,
      0.00000000e+00   0.00000000e+00   6.99228144e+00]
   ..., 
   [  1.13831556e+00   0.00000000e+00   0.00000000e+00 ...,
      4.76150513e+00   6.12177134e+00   1.19388485e+01]
   [  1.96520245e+00   0.00000000e+00   1.10383809e+00 ...,
      3.41560173e+00   7.55078077e+00   3.94581842e+00]
   [  3.25662374e-01   0.00000000e+00   0.00000000e+00 ...,
      1.40577590e+00   1.07317133e+01   0.00000000e+00]]

  [[  0.00000000e+00   0.00000000e+00   0.00000000e+00 ...,
      2.31482196e+00   4.46509457e+00   1.69733715e+01]
   [  2.17569685e+00   0.00000000e+00   3.29854417e+00 ...,
      3.61963582e+00   4.41946697e+00   9.67645931e+00]
   [  1.26454401e+00   0.00000000e+00   0.00000000e+00 ...,
      5.74727774e-01   0.00000000e+00   5.91210032e+00]
   ..., 
   [  1.99272466e+00   0.00000000e+00   0.00000000e+00 ...,
      2.42437053e+00   1.00854244e+01   1.20215902e+01]
   [  1.94243181e+00   1.11487782e+00   1.27941780e-02 ...,
      2.90377140e+00   1.31821527e+01   3.18597269e+00]
   [  0.00000000e+00   0.00000000e+00   0.00000000e+00 ...,
      1.06519914e+00   1.60003471e+01   0.00000000e+00]]

  ..., 
  [[  0.00000000e+00   6.33040524e+00   0.00000000e+00 ...,
      1.10977173e-01   0.00000000e+00   1.20686541e+01]
   [  2.84334540e+00   0.00000000e+00   2.14882946e+00 ...,
      0.00000000e+00   0.00000000e+00   9.19086361e+00]
   [  3.04600167e+00   0.00000000e+00   0.00000000e+00 ...,
      0.00000000e+00   2.66800642e-01   4.80191660e+00]
   ..., 
   [  1.77500522e+00   0.00000000e+00   0.00000000e+00 ...,
      6.16667652e+00   0.00000000e+00   2.51191330e+00]
   [  1.51523101e+00   0.00000000e+00   8.75605702e-01 ...,
      4.78366196e-01   2.13777065e+00   0.00000000e+00]
   [  2.35781860e+00   1.19421077e+00   0.00000000e+00 ...,
      0.00000000e+00   8.22249413e+00   0.00000000e+00]]

  [[  9.99140143e-01   2.72387600e+00   0.00000000e+00 ...,
      1.36217785e+00   0.00000000e+00   1.39687109e+01]
   [  2.09030128e+00   0.00000000e+00   5.89677334e+00 ...,
      0.00000000e+00   0.00000000e+00   1.06337662e+01]
   [  4.39104509e+00   0.00000000e+00   1.49351525e+00 ...,
      0.00000000e+00   0.00000000e+00   4.04934502e+00]
   ..., 
   [  4.40276670e+00   0.00000000e+00   7.93305278e-01 ...,
      2.77095079e+00   0.00000000e+00   2.72082901e+00]
   [  4.78408003e+00   1.99823511e+00   0.00000000e+00 ...,
      4.62143540e-01   2.12469149e+00   0.00000000e+00]
   [  3.31783009e+00   3.77507353e+00   0.00000000e+00 ...,
      0.00000000e+00   5.39687920e+00   0.00000000e+00]]

  [[  0.00000000e+00   0.00000000e+00   0.00000000e+00 ...,
      8.45000207e-01   0.00000000e+00   1.14023428e+01]
   [  0.00000000e+00   0.00000000e+00   3.91858840e+00 ...,
      1.82360661e+00   0.00000000e+00   1.01210985e+01]
   [  0.00000000e+00   0.00000000e+00   2.66941953e+00 ...,
      1.03537810e+00   7.01941699e-02   8.75932789e+00]
   ..., 
   [  0.00000000e+00   2.51642752e+00   2.44818616e+00 ...,
      2.51129651e+00   0.00000000e+00   7.71949911e+00]
   [  0.00000000e+00   4.20468140e+00   2.21890509e-01 ...,
      8.91991794e-01   7.77386010e-01   5.26134253e+00]
   [  0.00000000e+00   3.53835201e+00   0.00000000e+00 ...,
      7.09007680e-01   2.80391288e+00   2.17866397e+00]]]]
2414080

-------Layer: relu4_1 -------
[[[[ 13.86879158   0.           0.         ...,   0.          31.06976509
      1.74210608]
   [ 13.99967194   0.           0.         ...,   0.          18.75124359
      4.15607882]
   [  4.8088398    9.79184628   0.         ...,   0.          12.05978966
     17.69343185]
   ..., 
   [ 16.97172928  25.02368546   0.         ...,   0.          13.67253399
      6.94924212]
   [ 15.02601242  25.41618156   0.         ...,   0.           0.
     17.3018837 ]
   [  9.94981766  28.34239769   5.94055223 ...,   0.           0.
     24.33323669]]

  [[ 21.36205101   1.85927761   0.         ...,   0.          47.87252808
      0.        ]
   [  0.97121727  14.85327148   0.         ...,   0.          23.6798954
      0.        ]
   [  0.          25.74872971   0.         ...,   0.          11.67242241
      0.        ]
   ..., 
   [ 17.47617912  38.01199722   0.         ...,   0.          20.38186455
      0.        ]
   [  9.13005447  41.74302292   0.         ...,   0.           0.
      3.16584587]
   [  0.20681065  44.68583679  17.64965248 ...,   0.           0.
     25.70185661]]

  [[ 22.02315712  20.98425865   0.         ...,   0.          66.15026855
      0.        ]
   [  4.08737898  35.28104401   0.73844814 ...,   0.          21.59782219
      0.        ]
   [  2.28797364  33.03773499   0.         ...,   0.           3.46475768
      0.        ]
   ..., 
   [  0.          33.91624451   0.         ...,   0.          29.00991631
      0.        ]
   [  0.          44.44845963   0.         ...,   0.           0.
      1.00129819]
   [  0.          47.46167374  15.81106472 ...,   0.           0.
     22.02981758]]

  ..., 
  [[ 18.41979599  13.30069733   0.         ...,   0.          50.836586
      0.        ]
   [  0.          27.34680748   0.         ...,   0.          31.96951103
      0.        ]
   [  0.          25.72644043   0.         ...,   0.          20.06382179
      0.        ]
   ..., 
   [  0.          30.13908958   0.         ...,   0.          31.91316414
      0.        ]
   [  0.          42.08649063   0.         ...,   0.           6.10801363
      1.46824205]
   [  0.          46.87562943   8.64092445 ...,   0.           0.
     24.21432304]]

  [[  2.67179227  12.6603117    0.         ...,   0.          41.26224899
      0.        ]
   [  0.          28.44793701   0.         ...,   0.          29.53791237
      0.        ]
   [  0.          28.87229538   0.         ...,   0.          16.96293831
      0.        ]
   ..., 
   [  0.          32.48650742   0.         ...,   0.          31.1956749
      0.        ]
   [  0.          46.913517     0.         ...,   0.          16.96985245
      0.        ]
   [  0.          46.71961212  13.20678616 ...,   0.           0.
     21.7707901 ]]

  [[ 15.86958599   7.80876827   3.94851303 ...,   0.          29.77599335
      0.        ]
   [  1.69952726  25.39976501  16.05968475 ...,   0.          24.00768471
      0.        ]
   [  0.          26.03476906   9.1994009  ...,   0.          18.36257744
      0.        ]
   ..., 
   [  0.          27.43281937   2.75648785 ...,   0.          23.2630825
      0.        ]
   [  0.          38.46162415  12.98642635 ...,   0.          19.58389473
      0.        ]
   [  0.          36.2350769   24.26500511 ...,   0.           0.
     10.97179985]]]]
1217536

-------Layer: relu5_1 -------
[[[[ 0.          0.          4.39754009 ...,  1.65491593  0.          0.        ]
   [ 1.25098133  0.          4.30199146 ...,  0.74937618  0.          0.        ]
   [ 1.42344201  0.          2.29107809 ...,  0.          0.          0.        ]
   ..., 
   [ 1.09629631  0.          1.61408079 ...,  0.          0.          0.        ]
   [ 0.          0.          4.67718363 ...,  0.          0.          0.        ]
   [ 2.60027719  0.          4.59681892 ...,  0.          0.          0.        ]]

  [[ 0.          0.          0.         ...,  0.2549499   0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 1.56839573  0.          0.         ...,  0.          0.          0.        ]]

  [[ 0.          0.          0.         ...,  0.37249827  0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]]

  ..., 
  [[ 0.          0.          0.         ...,  0.69679576  0.          0.        ]
   [ 0.          0.          0.         ...,  0.09975313  0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]]

  [[ 0.          0.          0.         ...,  1.15358555  0.          0.        ]
   [ 0.          0.          0.         ...,  0.19800051  0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.31291139 ...,  0.          0.          0.        ]]

  [[ 0.          0.          0.         ...,  0.15569028  0.          0.59514487]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.29704553  0.          0.        ]
   ..., 
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]
   [ 0.          0.          0.         ...,  0.          0.          0.        ]]]]
311808


In [25]:
# Declare Optimization Algorithm
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train_step = optimizer.minimize(loss)

# Initialize Variables and start Training
sess.run(tf.global_variables_initializer())
for i in range(generations):
    
    sess.run(train_step)

    # Print update and save temporary output
    if (i+1) % output_generations == 0:
        print('Generation {} out of {}, loss: {}'.format(i + 1, generations,sess.run(loss)))
        image_eval = sess.run(image)
        best_image_add_mean = image_eval.reshape(shape[1:]) + normalization_mean
        output_file = 'temp_output_{}.jpg'.format(i)
        scipy.misc.imsave(output_file, best_image_add_mean)


Generation 1 out of 20, loss: 273236416.0
Generation 2 out of 20, loss: 269309024.0
Generation 3 out of 20, loss: 223181504.0
Generation 4 out of 20, loss: 633766144.0
Generation 5 out of 20, loss: 3.02298333184e+11
Generation 6 out of 20, loss: 4.32774593665e+19
Generation 7 out of 20, loss: inf
Generation 8 out of 20, loss: inf
Generation 9 out of 20, loss: nan
/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/scipy/misc/pilutil.py:98: RuntimeWarning: invalid value encountered in greater
  bytedata[bytedata > high] = high
/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/scipy/misc/pilutil.py:99: RuntimeWarning: invalid value encountered in less
  bytedata[bytedata < 0] = 0
Generation 10 out of 20, loss: nan
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-25-ad617238e333> in <module>()
      7 for i in range(generations):
      8 
----> 9     sess.run(train_step)
     10 
     11     # Print update and save temporary output

/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in run(self, fetches, feed_dict, options, run_metadata)
    893     try:
    894       result = self._run(None, fetches, feed_dict, options_ptr,
--> 895                          run_metadata_ptr)
    896       if run_metadata:
    897         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run(self, handle, fetches, feed_dict, options, run_metadata)
   1122     if final_fetches or final_targets or (handle and feed_dict_tensor):
   1123       results = self._do_run(handle, final_targets, final_fetches,
-> 1124                              feed_dict_tensor, options, run_metadata)
   1125     else:
   1126       results = []

/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1319     if handle is None:
   1320       return self._do_call(_run_fn, self._session, feeds, fetches, targets,
-> 1321                            options, run_metadata)
   1322     else:
   1323       return self._do_call(_prun_fn, self._session, handle, feeds, fetches)

/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _do_call(self, fn, *args)
   1325   def _do_call(self, fn, *args):
   1326     try:
-> 1327       return fn(*args)
   1328     except errors.OpError as e:
   1329       message = compat.as_text(e.message)

/Users/shouzeluo/Library/Enthought/Canopy_64bit/User/lib/python2.7/site-packages/tensorflow/python/client/session.pyc in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1304           return tf_session.TF_Run(session, options,
   1305                                    feed_dict, fetch_list, target_list,
-> 1306                                    status, run_metadata)
   1307 
   1308     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [ ]:
image_eval = sess.run(image)
best_image_add_mean = image_eval.reshape(shape[1:]) + normalization_mean
output_file = 'final_output.jpg'
scipy.misc.imsave(output_file, best_image_add_mean)

In [ ]: