In [1]:
import os
import math
import zipfile
import time
import requests
from tqdm import tqdm
import numpy as np
from glob import glob
import matplotlib.pyplot as plt
import imageio
import skimage.transform
import tensorflow as tf
tf.enable_eager_execution()

In [2]:
# some stock code for downloading the images
def download_file_from_google_drive(id, destination):
  URL = "https://docs.google.com/uc?export=download"
  session = requests.Session()

  response = session.get(URL, params={ 'id': id }, stream=True)
  token = get_confirm_token(response)

  if token:
    params = { 'id' : id, 'confirm' : token }
    response = session.get(URL, params=params, stream=True)

  save_response_content(response, destination)

def get_confirm_token(response):
  for key, value in response.cookies.items():
    if key.startswith('download_warning'):
      return value
  return None

def save_response_content(response, destination, chunk_size=32*1024):
  total_size = int(response.headers.get('content-length', 0))
  with open(destination, "wb") as f:
    for chunk in tqdm(response.iter_content(chunk_size), total=total_size,
              unit='B', unit_scale=True, desc=destination):
      if chunk: # filter out keep-alive new chunks
        f.write(chunk)

def download_celeb_a(dirpath):
  data_dir = 'celebA'
  if os.path.exists(os.path.join(dirpath, data_dir)):
    print('Found Celeb-A - skip')
    return

  filename, drive_id  = "img_align_celeba.zip", "0B7EVK8r0v71pZjFTYXZWM3FlRnM"
  save_path = os.path.join(dirpath, filename)

  if os.path.exists(save_path):
    print('[*] {} already exists'.format(save_path))
  else:
    download_file_from_google_drive(drive_id, save_path)

  zip_dir = ''
  with zipfile.ZipFile(save_path) as zf:
    zip_dir = zf.namelist()[0]
    zf.extractall(dirpath)
  os.remove(save_path)
  os.rename(os.path.join(dirpath, zip_dir), os.path.join(dirpath, data_dir))
    
download_celeb_a(r'C:\Users\liori\datasets')


Found Celeb-A - skip

In [12]:
def get_image(image_path, input_height, input_width,
              resize_height=28, resize_width=28,
              crop=True):
  image = imageio.imread(image_path).astype(np.float)
  if crop:
    input_width = input_width or input_height #null handling
    crop_start_idx_h = int(round((image.shape[0] - input_height)/2.))
    crop_start_idx_w = int(round((image.shape[1] - input_width)/2.))
    image = image[crop_start_idx_h:crop_start_idx_h+input_height, crop_start_idx_w:crop_start_idx_w+input_width]    
  cropped_image = skimage.transform.resize(image, [resize_height, resize_width])
  return np.array(cropped_image)/127.5 - 1. #Normalize to [-1,1]

def show_image(image):
    plt.imshow(np.copy(image * 127.5 + 127.5).astype('uint8'), cmap='gray')
    plt.show()

def get_image_sample(path_regex, sample_size):
    image_paths = glob(path_regex)[:sample_size] #param: dataset sample size
    return np.array([get_image(image_path, 128, 108,crop=True) for image_path in image_paths]
                   ).astype(np.float32)#param: how much of center image to keep

def get_all_images_in_batches(path_regex, batch_size):
    current_index = 0
    image_paths = glob(path_regex)
    while current_index + batch_size <= len(image_paths):
        data_batch = np.array([get_image(image_path, 128, 108,crop=True) for image_path in 
                               image_paths[current_index:min(current_index+batch_size, len(image_paths))]]
                   ).astype(np.float32)
        current_index += batch_size
        yield data_batch
    
    
images = get_image_sample(r'C:\Users\liori\datasets\celebA\*.jpg', 1000)
show_image(images[32])
print(images.shape)


C:\ProgramData\Anaconda3\lib\site-packages\skimage\transform\_warps.py:84: UserWarning: The default mode, 'constant', will be changed to 'reflect' in skimage 0.15.
  warn("The default mode, 'constant', will be changed to 'reflect' in "
(1000, 28, 28, 3)

In [13]:
class Generator(tf.keras.Model):
  def __init__(self):
    super(Generator, self).__init__()
    self.fc1 = tf.keras.layers.Dense(4*4*128, use_bias=False) #1024 is a param
    self.batchnorm1 = tf.keras.layers.BatchNormalization()
    self.relu1 = tf.keras.layers.LeakyReLU()
    self.drop1 = tf.keras.layers.Dropout(rate=0.2) #0.2 is a param
    
    # 8x8 => 16x16 (conv size and stride only change data dependencies, not size)
    # 512 is a param
    self.conv1 = tf.keras.layers.Conv2DTranspose(64, (5, 5), strides=(2, 2), padding='valid', use_bias=False)
    self.batchnorm2 = tf.keras.layers.BatchNormalization()
    self.relu2 = tf.keras.layers.LeakyReLU()
    self.drop2 = tf.keras.layers.Dropout(rate=0.2)
    
    # 16x16 => 32x32 (conv size and stride only change data dependencies, not size)
    self.conv2 = tf.keras.layers.Conv2DTranspose(64, (5, 5), strides=(2, 2), padding='same', use_bias=False)
    self.batchnorm3 = tf.keras.layers.BatchNormalization()
    self.relu3 = tf.keras.layers.ReLU()
    self.drop3 = tf.keras.layers.Dropout(rate=0.2)
    
     # 16x16 => 32x32 (conv size and stride only change data dependencies, not size)
    #self.conv3 = tf.keras.layers.Conv2DTranspose(128, (5, 5), strides=(2, 2), padding='same', use_bias=False)
    #self.batchnorm4 = tf.keras.layers.BatchNormalization()
    #self.relu4 = tf.keras.layers.ReLU()
    #self.drop4 = tf.keras.layers.Dropout(rate=0.2)
    
    # 32x32 => 64x64. 3 = RGB
    self.conv4 = tf.keras.layers.Conv2DTranspose(3, (5, 5), strides=(2, 2), padding='same', use_bias=False)

  def call(self, x, training=True):
    x = self.fc1(x)
    x = tf.reshape(x, shape=(-1, 4, 4, 128))
    x = self.conv1(x)
    x = self.batchnorm1(x, training=training)
    x = self.relu1(x)
    #x = self.drop1(x, training=training)

    print(x.shape)
    x = self.conv2(x)
    x = self.batchnorm2(x, training=training)
    x = self.relu2(x)
    #x = self.drop2(x, training=training)

    x = self.conv2(x)
    x = self.batchnorm3(x, training=training)
    x = self.relu3(x)
    #x = self.drop3(x, training=training)
    
    #x = self.conv3(x)
    #x = self.batchnorm4(x, training=training)
    #x = self.relu4(x)
    #x = self.drop4(x, training=training)

    x = tf.nn.tanh(self.conv4(x))  
    return x

In [14]:
class Discriminator(tf.keras.Model):
  def __init__(self):
    super(Discriminator, self).__init__()
    self.conv1 = tf.keras.layers.Conv2D(32, (5, 5), strides=(2, 2), padding='same')
    self.batchnorm1 = tf.keras.layers.BatchNormalization()
    self.lrelu1 = tf.keras.layers.LeakyReLU(alpha=0.3) #leaky alpha
    self.conv2 = tf.keras.layers.Conv2D(64, (5, 5), strides=(2, 2), padding='same')
    self.batchnorm2 = tf.keras.layers.BatchNormalization()
    self.lrelu2 = tf.keras.layers.LeakyReLU(alpha=0.3) #leaky alpha
    self.conv3 = tf.keras.layers.Conv2D(128, (5, 5), strides=(1, 1), padding='same')
    self.batchnorm3 = tf.keras.layers.BatchNormalization()
    self.lrelu3 = tf.keras.layers.LeakyReLU(alpha=0.3) #leaky alpha
    self.dropout = tf.keras.layers.Dropout(0.2)
    self.flatten = tf.keras.layers.Flatten()
    self.fc1 = tf.keras.layers.Dense(1)

  def call(self, x, training=True):
    #x = self.lrelu1(self.batchnorm1(self.conv1(x)))
    x = self.lrelu1(self.conv1(x))
    #x = self.dropout(x, training=training)
    x = self.lrelu2(self.batchnorm2(self.conv2(x)))
    #x = self.dropout(x, training=training)
    x = self.lrelu3(self.batchnorm3(self.conv3(x)))
    #x = self.dropout(x, training=training)
    x = tf.reshape(x, (-1, 4*4*128))#self.flatten(x)
    x = self.fc1(x)
    return tf.nn.sigmoid(x)

In [15]:
generator = Generator()
discriminator = Discriminator()

# Defun gives 10 secs/epoch performance boost# Defun 
generator.call = tf.contrib.eager.defun(generator.call)
discriminator.call = tf.contrib.eager.defun(discriminator.call)

def discriminator_loss(real_output, generated_output):
    # [1,1,...,1] with real output since it is true and we want
    # our generated examples to look like it
    
    #smooth_factor = 0.1 #param
    #real_loss = tf.losses.sigmoid_cross_entropy(multi_class_labels=tf.ones_like(real_output)* (1 - smooth_factor), logits=real_output)

    # [0,0,...,0] with generated images since they are fake
    #generated_loss = tf.losses.sigmoid_cross_entropy(multi_class_labels=tf.zeros_like(generated_output), logits=generated_output)

    #total_loss = real_loss + generated_loss
    #return total_loss
    label_smoothing = 0.9
    d_loss_real = tf.reduce_mean(
        tf.nn.sigmoid_cross_entropy_with_logits(logits=real_output,
                                                labels=tf.ones_like(real_output) * label_smoothing))
    d_loss_fake = tf.reduce_mean(
        tf.nn.sigmoid_cross_entropy_with_logits(logits=generated_output,
                                                labels=tf.zeros_like(generated_output)))
    
    d_loss = d_loss_real + d_loss_fake
    return d_loss
def generator_loss(generated_output):
    #smooth_factor = 0.1 #param
    #return tf.losses.sigmoid_cross_entropy(multi_class_labels=tf.ones_like(generated_output)* (1 - smooth_factor), logits=generated_output)
    label_smoothing = 0.9
    g_loss = tf.reduce_mean(
        tf.nn.sigmoid_cross_entropy_with_logits(logits=generated_output,
                                                labels=tf.ones_like(generated_output) * label_smoothing))
    return g_loss

discriminator_optimizer = tf.train.AdamOptimizer(2e-4,beta1=0.5)
generator_optimizer = tf.train.AdamOptimizer(2e-4,beta1=0.5)

In [16]:
EPOCHS = 3

BUFFER_SIZE = 202500
BATCH_SIZE = 64
images = get_image_sample(r'C:\Users\liori\datasets\celebA\*.jpg', min(BATCH_SIZE*EPOCHS,BUFFER_SIZE*2))
train_dataset = tf.data.Dataset.from_tensor_slices(images).shuffle(BUFFER_SIZE).batch(BATCH_SIZE)


noise_dim = 100

def train(dataset, epochs, noise_dim):  
  for epoch in range(epochs):
    start = time.time()
    
    for images in dataset:
      # generating noise from a uniform distribution
      noise = tf.random_normal([BATCH_SIZE, noise_dim])
      
      with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
        generated_images = generator(noise, training=True)
        real_output = discriminator(images, training=True)
        generated_output = discriminator(generated_images, training=True)
        
        gen_loss = generator_loss(generated_output)
        disc_loss = discriminator_loss(real_output, generated_output)
        print(epoch + 1, gen_loss, disc_loss)
        print(tf.is_nan(generated_images[0,:,:,0]))
        #print('magics')
        #print(generated_images)
        
      gradients_of_generator = gen_tape.gradient(gen_loss, generator.variables)
      gradients_of_discriminator = disc_tape.gradient(disc_loss, discriminator.variables)
      
      generator_optimizer.apply_gradients(zip(gradients_of_generator, generator.variables))
      discriminator_optimizer.apply_gradients(zip(gradients_of_discriminator, discriminator.variables))

      
    #if epoch % 1 == 0:
    #  display.clear_output(wait=True)
    #  generate_and_save_images(generator,
    #                           epoch + 1,
    #                           random_vector_for_generation)
    
    # saving (checkpoint) the model every 15 epochs
    #if (epoch + 1) % 15 == 0:
    #  checkpoint.save(file_prefix = checkpoint_prefix)
    
    print ('Time taken for epoch {} is {} sec'.format(epoch + 1,
                                                      time.time()-start))
    
train(train_dataset, EPOCHS, noise_dim)


C:\ProgramData\Anaconda3\lib\site-packages\skimage\transform\_warps.py:84: UserWarning: The default mode, 'constant', will be changed to 'reflect' in skimage 0.15.
  warn("The default mode, 'constant', will be changed to 'reflect' in "
(64, 11, 11, 64)
1 tf.Tensor(0.5243197, shape=(), dtype=float32) tf.Tensor(1.4976066, shape=(), dtype=float32)
tf.Tensor(
[[False False False ... False False False]
 [False False False ... False False False]
 [False False False ... False False False]
 ...
 [False False False ... False False False]
 [False False False ... False False False]
 [False False False ... False False False]], shape=(88, 88), dtype=bool)
1 tf.Tensor(0.5240908, shape=(), dtype=float32) tf.Tensor(1.4982951, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
1 tf.Tensor(0.5241046, shape=(), dtype=float32) tf.Tensor(1.4981194, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
Time taken for epoch 1 is 1.5418262481689453 sec
2 tf.Tensor(0.52411866, shape=(), dtype=float32) tf.Tensor(1.4981024, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
2 tf.Tensor(0.52413255, shape=(), dtype=float32) tf.Tensor(1.4980848, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
2 tf.Tensor(0.5241464, shape=(), dtype=float32) tf.Tensor(1.4980679, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
Time taken for epoch 2 is 0.36364269256591797 sec
3 tf.Tensor(0.5241603, shape=(), dtype=float32) tf.Tensor(1.4980505, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
3 tf.Tensor(0.5241742, shape=(), dtype=float32) tf.Tensor(1.4980333, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
3 tf.Tensor(0.524188, shape=(), dtype=float32) tf.Tensor(1.4980159, shape=(), dtype=float32)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(88, 88), dtype=bool)
Time taken for epoch 3 is 0.34510302543640137 sec

In [11]:
num_examples_to_generate = 1
random_vector_for_generation = tf.random_normal([num_examples_to_generate,
                                                 noise_dim])
result_image = generator(random_vector_for_generation, training=False)
show_image(result_image[0,:,:,:])
print(tf.is_nan(result_image[0,:,:,0]))


(1, 8, 8, 64)
tf.Tensor(
[[ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 ...
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]
 [ True  True  True ...  True  True  True]], shape=(64, 64), dtype=bool)

In [35]:
print(generator.variables)


[<tf.Variable 'dense_12/kernel:0' shape=(3, 65536) dtype=float32, numpy=
array([[ 0.00184807, -0.00246174,  0.00922283, ...,  0.00848921,
        -0.0033497 ,  0.00650632],
       [-0.00710263, -0.00738738, -0.00247419, ..., -0.00820803,
        -0.00733932,  0.00233856],
       [-0.0091548 , -0.0056524 ,  0.00709413, ...,  0.00108233,
         0.00339369,  0.00022758]], dtype=float32)>, <tf.Variable 'batch_normalization_39/gamma:0' shape=(1024,) dtype=float32, numpy=array([1., 1., 1., ..., 1., 1., 1.], dtype=float32)>, <tf.Variable 'batch_normalization_39/beta:0' shape=(1024,) dtype=float32, numpy=array([0., 0., 0., ..., 0., 0., 0.], dtype=float32)>, <tf.Variable 'conv2d_transpose_21/kernel:0' shape=(5, 5, 512, 1024) dtype=float32, numpy=
array([[[[-7.34566478e-03,  1.28750782e-03,  4.32581361e-03, ...,
           6.88442308e-03, -5.74769964e-03, -4.92076902e-03],
         [-1.08819874e-02,  1.19088674e-02,  4.62886970e-03, ...,
           2.01931316e-03,  6.83691259e-03,  9.10826121e-03],
         [ 4.12610359e-04, -2.78478488e-04,  7.29028601e-03, ...,
           6.47322554e-03,  3.35265789e-03,  3.93498223e-03],
         ...,
         [ 7.95257185e-03, -7.75232632e-03, -1.83283724e-03, ...,
           1.21202786e-03,  1.29913911e-04,  2.99422164e-03],
         [ 1.65063143e-03, -1.15325572e-02,  7.11223576e-03, ...,
          -1.23148225e-03,  5.12276683e-03,  5.57302032e-03],
         [ 3.97081953e-03, -1.11831753e-02, -4.93728835e-03, ...,
          -1.05643040e-02, -8.49157944e-03, -1.17108105e-02]],

        [[ 1.13831470e-02,  6.68046158e-03,  8.05001799e-03, ...,
           1.21047860e-02,  7.37990532e-03,  6.38329145e-03],
         [-1.17080184e-02,  1.20719709e-03, -8.77376180e-03, ...,
          -9.61725973e-03, -1.94856524e-03,  5.54737542e-03],
         [ 4.57730610e-03,  4.75166831e-03, -7.44985044e-03, ...,
           1.10174054e-02, -6.15412602e-03, -3.84956878e-03],
         ...,
         [ 6.02153223e-03, -9.16625839e-03,  1.87812466e-03, ...,
           1.15706027e-03, -9.56423022e-03,  3.75508983e-03],
         [ 9.69713647e-03,  6.76952023e-03,  7.63533358e-03, ...,
          -7.95431994e-03, -9.46624018e-03, -8.26757587e-03],
         [-4.26466204e-03, -8.26700591e-03, -6.17777416e-03, ...,
          -8.56342632e-03,  3.27776466e-03, -3.13912891e-03]],

        [[ 1.23990858e-02,  1.07496744e-02,  1.95680000e-03, ...,
           4.97764628e-03, -1.14751374e-02,  1.08979912e-02],
         [-1.22735146e-02,  6.67284708e-03,  8.52903817e-03, ...,
          -1.09340074e-02, -1.12126861e-02, -7.81871751e-03],
         [ 8.34600907e-03,  1.23411929e-02,  3.17830127e-03, ...,
          -7.69999949e-03, -5.73593704e-03,  2.80445814e-03],
         ...,
         [-6.09115092e-03,  6.41470496e-03, -2.19068490e-03, ...,
          -7.64083862e-03, -6.64802501e-03, -1.16549404e-02],
         [-2.89593637e-03, -1.10504031e-02,  1.61989965e-03, ...,
          -4.10825294e-03,  4.85261437e-03, -1.00906817e-02],
         [ 6.92631956e-03,  1.24510648e-02,  8.24689865e-04, ...,
          -4.14857827e-03,  4.61685751e-03, -4.41251975e-03]],

        [[-1.20383864e-02,  9.06721968e-03, -5.25069563e-03, ...,
           1.15392217e-02, -3.11749987e-04, -1.10456320e-02],
         [-2.76292302e-03,  2.40671914e-03, -1.15414523e-03, ...,
          -3.39785218e-03,  3.27769201e-03,  1.22579867e-02],
         [-3.50107811e-03,  2.55096890e-03,  1.02520911e-02, ...,
          -1.07125007e-03,  3.67092900e-04,  5.81092853e-03],
         ...,
         [ 9.24177188e-03, -7.47991493e-03, -6.36782963e-03, ...,
          -7.08378851e-03,  2.45476048e-03,  9.93577857e-03],
         [ 6.37374911e-03,  5.83728310e-03,  6.72543421e-04, ...,
           9.23295971e-03, -7.93483295e-03, -2.88848579e-03],
         [ 4.28515952e-03,  5.78699168e-03, -4.92213992e-03, ...,
           1.19330361e-05, -1.01657752e-02, -2.05226522e-03]],

        [[ 3.38449236e-03,  5.66041563e-03, -2.94948835e-03, ...,
          -8.21304694e-03,  7.44629186e-03, -5.97595004e-03],
         [-5.61244180e-03, -3.13213468e-03,  2.75884569e-03, ...,
          -1.22823389e-02,  9.49507579e-04,  8.39823857e-04],
         [ 7.70253036e-03, -7.83973653e-03,  2.92268675e-03, ...,
          -1.19279502e-02, -1.01202037e-02, -6.36998704e-03],
         ...,
         [ 5.83753828e-03,  1.00757750e-02, -7.64596788e-03, ...,
          -2.95868143e-04, -7.75857875e-03,  3.99682019e-03],
         [ 6.31300826e-03,  1.10946381e-02,  2.18546670e-03, ...,
          -8.87379050e-04,  2.85941642e-03,  3.01753916e-03],
         [-5.40325930e-03, -6.20963285e-03,  1.24323731e-02, ...,
          -7.52973370e-04, -1.04121417e-02, -1.16689783e-03]]],


       [[[-4.91558015e-03, -5.09607513e-03, -1.11993104e-02, ...,
          -4.13072295e-04,  5.30778524e-03, -1.24886753e-02],
         [ 3.79678793e-04,  1.04645453e-03, -2.69078929e-03, ...,
          -6.35097036e-03,  1.07366471e-02,  5.31190913e-03],
         [ 2.04497948e-04,  1.20766098e-02,  9.73604713e-03, ...,
          -7.82465003e-03,  9.17246658e-03,  6.84111658e-03],
         ...,
         [ 3.93820647e-03, -8.13452899e-03,  2.75775790e-04, ...,
          -5.23431599e-03, -7.52302120e-03,  1.73241179e-03],
         [-5.64172864e-04, -1.12769222e-02,  8.81969929e-04, ...,
           1.10993162e-04, -1.02943387e-02, -6.58870628e-03],
         [ 4.35123499e-03,  6.01016823e-03, -6.46785507e-03, ...,
           6.22640643e-03, -1.03413220e-02,  1.08874952e-02]],

        [[ 3.41376942e-03,  4.27481718e-04, -4.18600719e-03, ...,
           3.62788420e-03,  9.07199364e-03,  1.71184540e-03],
         [ 7.49640446e-03,  1.51425879e-03, -1.08785098e-02, ...,
          -8.30067135e-03, -4.33175266e-03, -7.74530461e-03],
         [-3.73492017e-04, -1.23700416e-02,  9.85480007e-03, ...,
          -3.08723468e-03, -6.17688289e-03,  2.14575790e-03],
         ...,
         [ 1.09377066e-02, -8.97487998e-03, -2.71113496e-03, ...,
           5.64748142e-03,  7.59290811e-03,  9.51690134e-03],
         [-5.06481528e-03, -7.54700322e-03, -4.51567490e-03, ...,
          -1.15366196e-02,  4.55121417e-03,  3.54405493e-05],
         [ 8.48627370e-03,  3.97416670e-03, -1.08999461e-02, ...,
           9.18000937e-04, -6.44285092e-03, -6.94596162e-03]],

        [[ 3.15899868e-03, -6.27163053e-03,  2.89091468e-03, ...,
           1.22255450e-02,  7.50817452e-03,  7.37542287e-04],
         [-8.60915799e-03, -1.23095037e-02, -3.52064334e-03, ...,
           6.79357443e-03,  1.20803183e-02,  8.46226234e-03],
         [-6.00135326e-03, -6.16373448e-03, -6.52035465e-03, ...,
          -8.17800779e-03,  9.74154193e-03, -8.99178721e-03],
         ...,
         [ 7.52240513e-03,  1.21556381e-02,  1.19356988e-02, ...,
          -1.24197602e-02,  1.20341247e-02,  3.93175799e-03],
         [-9.20697488e-03,  2.10385025e-03, -1.08049959e-02, ...,
          -7.29736686e-03,  5.86927217e-03,  2.42968835e-03],
         [-5.69839776e-03,  1.40701793e-03,  8.12746864e-03, ...,
          -3.43855005e-03, -1.09730363e-02,  1.20509202e-02]],

        [[ 8.41824990e-03,  3.78583651e-03, -8.88973475e-04, ...,
           2.14776117e-03, -6.64630253e-03,  5.52095193e-03],
         [-3.37529741e-03,  1.20429778e-02,  1.23964408e-02, ...,
           8.40871874e-03,  1.19046336e-02,  2.68152356e-03],
         [ 3.18507571e-03, -2.13908218e-03,  7.20264297e-03, ...,
           6.69116620e-03,  1.22454343e-02, -5.14854211e-03],
         ...,
         [-1.20660635e-02,  6.48238976e-03,  2.95098126e-03, ...,
          -8.57172906e-03,  4.96075768e-03, -1.71950739e-03],
         [-1.71006657e-03, -6.16301922e-03, -1.12672662e-02, ...,
           3.48445959e-04,  1.00052627e-02,  1.13358861e-02],
         [-1.06756389e-02, -1.15815084e-03,  6.22730050e-03, ...,
          -3.26553918e-03, -8.37550499e-03, -4.45534289e-03]],

        [[-1.17605003e-02,  1.04834205e-02, -1.20651303e-02, ...,
           1.06139453e-02,  8.17102380e-04, -6.32511079e-03],
         [-5.16951084e-04,  1.25169475e-03,  5.59076015e-03, ...,
           4.01559286e-04, -3.22947279e-04,  1.86972879e-03],
         [ 7.47062918e-03, -4.63731587e-03, -6.62712147e-03, ...,
          -1.23359086e-02,  2.00470351e-03,  7.22492021e-03],
         ...,
         [-8.48560035e-03, -1.18640335e-02,  7.53985625e-03, ...,
          -9.87474900e-03,  1.32957101e-03, -6.87860278e-03],
         [ 7.66451936e-03,  4.96238563e-03, -8.01558513e-03, ...,
           6.01982791e-03,  9.91187710e-03,  7.25759286e-03],
         [-7.41945207e-03, -3.50229722e-03,  8.84813163e-03, ...,
           8.05113558e-03,  1.24968318e-02, -1.92367099e-03]]],


       [[[-5.32266824e-03,  4.31823079e-03, -1.08135855e-02, ...,
          -2.44050939e-03, -8.54726695e-03,  9.67958290e-03],
         [-9.52677429e-03, -1.22800469e-03,  4.26373724e-03, ...,
           1.10719083e-02, -1.03808343e-02, -4.18993644e-04],
         [-3.19820084e-03,  6.18906040e-03,  3.18517629e-03, ...,
          -3.59282456e-03,  2.40659993e-03, -3.04145832e-03],
         ...,
         [ 8.77846871e-03,  5.46847563e-03,  7.31248874e-03, ...,
           4.14925907e-03,  4.13777307e-05,  3.78500018e-03],
         [-6.03388576e-03,  9.56296641e-03,  1.17691988e-02, ...,
           3.03681754e-03,  4.92238719e-03, -4.11248486e-03],
         [-7.03171501e-03, -1.03014521e-04, -2.23630108e-03, ...,
           8.58473498e-03, -1.04194339e-02, -7.54760532e-03]],

        [[ 1.23756016e-02,  4.14367300e-03,  5.53765055e-03, ...,
           8.73167533e-03, -5.85939875e-03, -5.71079878e-03],
         [ 8.90646968e-03,  6.11521583e-03, -1.36298873e-03, ...,
          -1.15866987e-02, -2.67256517e-03,  1.22108124e-03],
         [-7.30390847e-03,  3.01129185e-03, -6.95916405e-03, ...,
          -1.21881161e-02, -7.09819794e-03,  2.41164304e-03],
         ...,
         [-1.51292700e-03,  7.41487462e-03,  7.68441241e-03, ...,
           8.92330427e-03,  3.68508510e-04, -3.67622077e-03],
         [ 2.90071685e-03,  8.84341914e-03, -6.86336774e-03, ...,
           2.02159025e-03, -9.43841599e-03,  4.96698637e-03],
         [ 5.50169405e-03,  1.08355796e-02, -1.09440982e-02, ...,
          -1.00642741e-02, -8.39454122e-04,  3.94454692e-03]],

        [[-1.15740811e-02, -1.19118160e-02, -1.06472522e-02, ...,
           1.04630273e-03, -6.88381493e-03,  6.22367021e-03],
         [ 5.67340013e-03, -1.26862247e-03,  2.64674425e-04, ...,
           9.05750971e-03, -9.51546244e-04,  9.61842295e-03],
         [-3.74641083e-03,  7.40122143e-03, -1.20932106e-02, ...,
          -5.54396212e-03, -8.65390524e-04, -7.15362141e-03],
         ...,
         [ 5.03063854e-03, -1.19633051e-02, -8.18672590e-04, ...,
           4.97052446e-04, -5.05388994e-03,  1.21351415e-02],
         [ 6.40222803e-04, -4.41089831e-03,  1.14479447e-02, ...,
           2.67297961e-03, -1.56530738e-03,  4.17146366e-03],
         [-5.10141579e-03, -9.16182995e-03, -4.95347986e-03, ...,
           8.90713278e-03,  1.21936621e-02, -5.95022738e-03]],

        [[-9.24197119e-03,  1.05238417e-02,  8.88588838e-04, ...,
           4.51526139e-03, -2.22722255e-03,  9.97292344e-03],
         [ 5.58429677e-03,  1.13859745e-02, -6.49297563e-03, ...,
          -2.43388116e-03, -4.63813543e-03, -5.11415629e-03],
         [ 1.00683598e-02, -2.84754671e-03,  9.03016422e-03, ...,
           7.95679446e-03,  4.85436898e-03, -3.35317291e-03],
         ...,
         [ 5.66786807e-03,  4.43797652e-03, -2.68530287e-03, ...,
           5.35271224e-03, -8.45953822e-04,  1.76858343e-03],
         [ 7.62009900e-03,  7.41210300e-03,  5.30954357e-03, ...,
          -1.00343321e-02,  2.30541453e-04, -8.98501277e-03],
         [-5.68014383e-03,  5.53801004e-03,  3.12912278e-04, ...,
          -1.55556761e-03,  8.24667607e-03,  5.96408639e-03]],

        [[-5.42069692e-03, -1.85492914e-03,  1.01107638e-03, ...,
           1.03063313e-02,  2.93294806e-03, -8.72696377e-03],
         [ 8.19414575e-03,  5.86797018e-03,  2.73408275e-03, ...,
           8.78642220e-03,  8.53036623e-03, -1.15372809e-02],
         [ 5.04990760e-03, -4.05898131e-03, -9.34737641e-03, ...,
          -1.11009777e-02, -5.87972393e-03, -1.05881840e-02],
         ...,
         [ 6.66869897e-03,  5.81297372e-03,  2.87800748e-03, ...,
           8.30521341e-03,  7.67872948e-03,  2.54058558e-03],
         [ 1.24082481e-02, -1.63654424e-03, -1.14586595e-02, ...,
          -9.23827570e-03, -3.90949845e-03,  8.57350416e-04],
         [-8.86898302e-03,  1.10764522e-03,  4.52330802e-03, ...,
           4.10642382e-03,  4.57621273e-03, -5.23895025e-04]]],


       [[[ 1.19447680e-02, -9.03750397e-03, -7.21413782e-03, ...,
          -7.94878043e-03, -1.06087886e-03,  3.09807062e-03],
         [-4.41394467e-03, -1.68369990e-03, -1.02386270e-02, ...,
          -1.24104880e-03, -5.13319997e-03,  8.81797355e-03],
         [-1.15490230e-02, -4.10833955e-03, -1.09352386e-02, ...,
          -1.00001395e-02,  6.01114053e-03, -9.94539261e-03],
         ...,
         [-2.49650516e-03,  5.90816792e-03,  1.13615906e-02, ...,
           3.39425448e-03,  6.48168381e-03, -2.62799859e-03],
         [ 2.96232663e-03, -2.54845340e-03, -9.02171992e-03, ...,
           2.70944554e-03, -9.83746722e-05,  1.19143724e-03],
         [ 5.65188285e-03,  1.21099474e-02,  6.04815874e-03, ...,
          -8.53199977e-03,  9.20382794e-03,  2.82052159e-03]],

        [[-3.46241891e-03,  5.43377455e-03, -1.08080003e-02, ...,
           1.20860608e-02,  1.19417282e-02,  5.75156230e-03],
         [-4.46972530e-03,  2.14168988e-03, -9.84854996e-03, ...,
           1.99747365e-03, -1.11007458e-02,  9.48625151e-03],
         [ 2.57286709e-03,  6.44679647e-03, -6.27189875e-05, ...,
          -2.28686351e-03, -9.82835889e-04,  3.35070770e-03],
         ...,
         [ 4.30459157e-04, -7.30909733e-03, -4.11226135e-03, ...,
          -4.12090681e-03,  1.20755015e-02, -2.51027662e-03],
         [-1.45387836e-04, -3.18031013e-03, -7.05560762e-03, ...,
          -6.71805767e-03, -7.40628829e-03, -7.48293102e-03],
         [ 9.23226867e-03, -9.31199826e-03,  1.07243685e-02, ...,
          -8.83049332e-03,  7.86050875e-03,  6.89303502e-04]],

        [[ 4.19261213e-03,  7.77358189e-04,  9.84263141e-03, ...,
          -1.11611187e-02,  1.19223958e-02, -2.49428768e-03],
         [-1.23120844e-06, -5.62675903e-03,  8.04681238e-03, ...,
          -1.08868750e-02,  7.09670596e-04, -7.79122999e-03],
         [-4.40262537e-03, -2.81416159e-03,  8.44967086e-03, ...,
          -1.14642680e-02, -5.27468324e-03, -1.22009814e-02],
         ...,
         [-8.53979308e-03, -1.02756619e-02, -4.38071787e-03, ...,
          -6.30733697e-03,  3.33939772e-03, -1.03738513e-02],
         [-6.16466720e-03, -7.83170480e-03, -8.76159687e-03, ...,
          -1.17031019e-03, -2.57309340e-03, -1.20790778e-02],
         [ 1.19842021e-02, -6.83159241e-03, -8.23337212e-03, ...,
           1.15569094e-02, -4.47943248e-03, -1.04826661e-02]],

        [[ 9.13526770e-03,  9.67597123e-03,  4.73788008e-04, ...,
           1.19434474e-02, -2.15542503e-04, -6.07298920e-03],
         [ 3.59996315e-03, -8.43856949e-03, -3.66204418e-03, ...,
          -1.15686981e-02,  1.21383192e-02,  5.18853124e-03],
         [-7.69349327e-03,  6.28986116e-03,  6.17420394e-03, ...,
          -2.27254350e-03, -4.17927466e-03, -2.01028585e-03],
         ...,
         [ 6.37784880e-03, -1.09792380e-02, -5.13510406e-03, ...,
          -9.64288693e-03,  7.97019619e-03,  2.28442810e-03],
         [ 1.10788224e-02,  4.80515976e-03, -1.16188442e-02, ...,
          -1.08488742e-02,  1.04305269e-02,  6.46368321e-03],
         [-6.26015477e-04, -9.75323655e-03,  3.52321286e-03, ...,
           8.20005313e-04,  6.21551648e-04, -3.88483983e-03]],

        [[-5.89582324e-03, -8.09790753e-03, -4.95243678e-03, ...,
          -7.81435147e-03, -1.77118462e-03,  6.76590484e-03],
         [-4.90569789e-03, -1.13487160e-02, -6.71012420e-03, ...,
           7.20730517e-03,  3.08111589e-03,  2.74080597e-03],
         [ 4.09402605e-03, -7.59695796e-03, -5.66076068e-03, ...,
           5.71278390e-03,  5.39160054e-03, -1.03110103e-02],
         ...,
         [-7.42187817e-03, -9.76455491e-03,  7.41060358e-03, ...,
          -6.75602583e-03,  8.90253764e-03, -8.31381697e-03],
         [-3.40473372e-03, -7.27839163e-03,  9.23474226e-03, ...,
           9.51246638e-03,  2.93471478e-03, -6.39853766e-03],
         [ 9.20710806e-03,  1.23079428e-02,  2.91896146e-03, ...,
          -9.66951530e-03,  2.62925029e-03, -9.06210858e-03]]],


       [[[-4.48220968e-03, -2.18868535e-03,  1.16810119e-02, ...,
          -1.19595509e-03, -2.88065337e-03, -1.09609840e-02],
         [ 7.35955220e-03, -7.94998743e-03, -8.36581923e-03, ...,
           7.09971134e-03, -5.26025286e-03,  9.74554662e-03],
         [-6.37766905e-04,  1.46423280e-03, -4.83537605e-03, ...,
           1.57053769e-03,  1.01398202e-02, -6.65321667e-03],
         ...,
         [-9.68602020e-03, -1.70540251e-03,  1.11739049e-02, ...,
          -1.07039455e-02, -2.08330434e-03,  6.71431702e-03],
         [-7.00911600e-03, -1.78650953e-03, -9.92449932e-04, ...,
           6.61059003e-03, -1.00675039e-04,  1.09972516e-02],
         [-5.63430786e-03, -7.71044800e-03,  5.62189240e-03, ...,
           5.86081389e-03,  8.29695445e-03,  7.09589478e-03]],

        [[ 6.40449114e-04, -8.59236717e-03, -1.05874902e-02, ...,
          -1.21146617e-02,  1.59677863e-03,  3.57218925e-03],
         [ 4.64603957e-03,  7.98923429e-03,  7.59684201e-03, ...,
           7.60817248e-03, -1.30409002e-03, -6.11199765e-03],
         [ 1.06651699e-02,  3.51204630e-03, -6.87977998e-03, ...,
          -1.18447561e-03,  3.56539432e-03,  3.70515231e-03],
         ...,
         [ 6.80342410e-03, -6.72942121e-03,  8.95120297e-03, ...,
           1.15252947e-02, -8.98812991e-03,  8.02232418e-03],
         [ 5.30131999e-03,  5.77977393e-03,  1.08891157e-02, ...,
          -1.46875065e-03, -9.32425261e-03,  1.00182155e-02],
         [ 1.09439706e-02,  1.22219333e-02, -2.75931694e-03, ...,
           1.11045875e-03,  5.33916336e-03,  4.78681270e-03]],

        [[ 9.79664270e-03,  1.21459989e-02, -1.22434832e-03, ...,
          -1.16225807e-02,  3.26436106e-03, -3.64749413e-03],
         [-3.68328672e-03, -4.12775017e-03,  6.60781655e-03, ...,
          -4.00818884e-03, -5.60641894e-03, -4.58202045e-03],
         [-4.98810410e-03, -7.14574149e-03, -7.26355007e-03, ...,
           8.68965778e-03, -7.51230447e-03, -2.96394248e-03],
         ...,
         [-9.63451527e-03,  1.13764778e-04, -4.41627484e-03, ...,
          -7.79451756e-03,  7.08412286e-03,  8.12917296e-03],
         [ 7.61299487e-03,  6.64339680e-03,  9.80078336e-03, ...,
          -5.90239186e-03,  4.45355196e-03, -8.36405158e-03],
         [ 2.03458685e-03, -7.79122440e-03,  9.17258020e-03, ...,
           4.39638272e-04,  1.05567658e-02, -8.97194445e-03]],

        [[-1.24431700e-02, -7.96623901e-03,  7.22220168e-04, ...,
          -7.34563498e-03,  7.63564464e-03,  1.13802264e-02],
         [ 4.29713074e-03, -9.84162651e-03, -6.50434801e-03, ...,
           1.11672254e-02,  8.29971675e-03,  1.19954562e-02],
         [ 9.66817979e-03,  1.87230669e-03,  5.75583894e-03, ...,
           6.26901630e-03,  8.93391948e-03,  7.15975184e-03],
         ...,
         [ 9.93905123e-03, -3.11359763e-04,  5.16574178e-03, ...,
           2.40475871e-03,  6.83477614e-03,  1.07606314e-03],
         [-5.50048938e-03,  1.04552740e-02, -3.16220522e-03, ...,
           7.95378629e-03,  7.56176095e-03, -6.61638379e-03],
         [ 3.59709840e-03, -5.03389863e-03,  8.02884344e-03, ...,
          -3.95688135e-03, -1.50317885e-03,  1.10443393e-02]],

        [[-6.42806292e-04, -2.36913003e-03, -7.73311267e-03, ...,
          -9.60389990e-03, -4.66767512e-03,  6.34094793e-03],
         [-7.05849519e-03,  9.44001321e-03,  1.24087660e-02, ...,
          -4.71507618e-03, -7.10980315e-03, -3.04515380e-03],
         [-7.85789452e-03,  5.46079595e-03, -3.44811101e-03, ...,
          -1.20696453e-02,  8.14991351e-03, -6.47230446e-03],
         ...,
         [-2.71767098e-03,  1.14352526e-02, -6.04989519e-03, ...,
           2.39533186e-03, -1.10829445e-02,  6.71353191e-05],
         [ 9.41297133e-03,  9.16793942e-04,  1.20336423e-02, ...,
          -5.02714515e-03, -2.70162802e-03, -1.07820155e-02],
         [ 7.08284322e-03,  5.76360244e-03,  1.14509845e-02, ...,
           1.13323787e-02,  7.97207002e-03, -6.59669051e-03]]]],
      dtype=float32)>, <tf.Variable 'batch_normalization_40/gamma:0' shape=(512,) dtype=float32, numpy=
array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
       1., 1.], dtype=float32)>, <tf.Variable 'batch_normalization_40/beta:0' shape=(512,) dtype=float32, numpy=
array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
       0., 0.], dtype=float32)>, <tf.Variable 'conv2d_transpose_22/kernel:0' shape=(5, 5, 3, 512) dtype=float32, numpy=
array([[[[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]]],


       [[[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]]],


       [[[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]]],


       [[[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]]],


       [[[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]],

        [[nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan],
         [nan, nan, nan, ..., nan, nan, nan]]]], dtype=float32)>, <tf.Variable 'batch_normalization_39/moving_mean:0' shape=(1024,) dtype=float32, numpy=
array([ 1.5746273e-05,  4.4253811e-06, -1.2583681e-05, ...,
       -5.5467999e-06,  4.4792846e-06, -1.1030195e-05], dtype=float32)>, <tf.Variable 'batch_normalization_39/moving_variance:0' shape=(1024,) dtype=float32, numpy=
array([0.3660924 , 0.36609   , 0.36609125, ..., 0.36609197, 0.36608064,
       0.36609113], dtype=float32)>, <tf.Variable 'batch_normalization_40/moving_mean:0' shape=(512,) dtype=float32, numpy=
array([ 1.22310296e-02, -1.64330890e-03,  7.13012926e-03,  2.11981684e-02,
        1.16097052e-02,  1.37015467e-03, -7.19378283e-03,  3.97166004e-04,
        2.68400591e-02, -2.29420103e-02,  2.53085177e-02, -3.19716111e-02,
       -1.71369296e-02,  2.40344671e-03,  2.01940797e-02, -1.69705984e-03,
        9.88980569e-03, -1.15073193e-02, -1.30738858e-02,  1.65442880e-02,
       -3.29202157e-03, -1.71609633e-02, -2.54389681e-02,  1.33945169e-02,
       -1.05961133e-02,  5.75182587e-03,  4.19741496e-03, -1.19971745e-02,
       -3.39702261e-03, -8.25123675e-03,  1.59506835e-02, -1.56980008e-02,
       -3.72130945e-02,  8.84063914e-03,  2.94681042e-02, -1.52515937e-02,
        2.77025567e-04,  1.61488932e-02, -2.65462254e-03, -9.52607673e-03,
       -1.91715471e-02,  5.84825967e-03, -2.45270971e-02,  2.59787440e-02,
        7.76423048e-03,  9.62328445e-03,  1.88819170e-02,  1.68079380e-02,
        2.56405920e-02,  2.35315762e-03, -1.28309159e-02, -1.62872747e-02,
       -1.05915377e-02,  1.24690880e-03,  3.14727426e-02, -9.46244411e-03,
       -5.22080576e-03, -2.14988342e-03,  1.27015011e-02,  1.29567320e-02,
        4.51081246e-03,  2.18035188e-02,  2.63002366e-02,  3.14284191e-02,
       -1.40431346e-02, -1.06509570e-02,  1.86022169e-06,  6.38224743e-03,
       -1.04951533e-02,  1.51866479e-04,  2.14958843e-02,  1.66071523e-02,
        9.70459171e-03, -4.91117779e-03, -3.42215113e-02, -1.45760253e-02,
       -1.30409421e-02,  6.79344917e-03,  7.76899513e-03,  3.12167918e-03,
        1.54497437e-02, -3.74996499e-03,  1.14910747e-03, -3.84636931e-02,
       -2.19230470e-03, -2.41922364e-02, -1.61969252e-02, -3.07074711e-02,
       -8.22096830e-04,  2.39871256e-03,  7.12818699e-03, -1.94463076e-03,
        1.84101220e-02, -5.41896420e-03,  2.13059541e-02,  2.02550460e-02,
        9.83934291e-03,  8.69336538e-03,  8.08490440e-03,  3.11639439e-02,
        4.30974597e-03, -1.20466435e-02, -4.76395153e-03,  3.82159166e-02,
       -1.06109381e-02,  2.35044323e-02,  5.21154329e-03, -7.57935038e-03,
        2.73537021e-02,  1.20837456e-02, -1.33204851e-02, -6.79745397e-04,
        6.40210463e-03, -2.66424962e-03,  7.14190165e-03,  7.01942295e-03,
       -9.86900739e-03, -1.18704531e-02, -1.06385145e-02, -2.77243461e-02,
        1.06723858e-02,  3.13374698e-02,  3.00360322e-02,  1.19881341e-02,
       -1.96410939e-02, -1.31179458e-02, -5.26292599e-04,  3.36515456e-02,
       -3.29908691e-02, -1.93721000e-02,  4.03394476e-02,  1.20202927e-02,
        4.71816259e-03,  1.01182144e-02, -2.74216617e-03,  1.86606422e-02,
       -1.34509224e-02, -2.18994287e-03,  9.01815109e-03, -3.41159874e-03,
        6.86171930e-03,  8.26347247e-03,  2.50116121e-02, -5.94623433e-03,
       -8.18391796e-03,  3.33581753e-02, -1.59172583e-02,  4.14227368e-04,
        4.41088062e-03,  6.61554467e-03,  6.05364610e-03,  2.67721829e-03,
        1.09921461e-02,  2.36145165e-02,  2.23748293e-02,  4.55715181e-03,
        9.69022512e-03,  1.89868044e-02, -2.44541727e-02,  9.41569544e-03,
        2.30557867e-03,  1.11923069e-02, -1.75329689e-02, -6.80037716e-04,
       -3.28067727e-02, -2.00963579e-02,  2.18043458e-02,  1.06978770e-02,
        6.16468117e-03, -2.97804903e-02, -3.56070092e-03, -6.31896080e-03,
       -3.52325626e-02, -5.81494672e-03,  6.93830149e-03,  6.26159273e-03,
        5.18253772e-03, -1.24629512e-02, -1.90260366e-03,  2.04145699e-03,
       -2.33498011e-02,  3.31814168e-04, -2.61094924e-02,  7.21281767e-03,
       -5.04707359e-03, -1.03274500e-02, -8.01606569e-04,  1.79644153e-02,
       -1.04301088e-02,  1.17300646e-02, -4.88991663e-02,  1.65786967e-02,
       -2.87567042e-02,  8.19760934e-03, -2.11812146e-02,  1.92921422e-02,
       -2.99713574e-02,  2.02816492e-03, -1.94645803e-02, -9.34678502e-03,
        3.02247312e-02,  1.58802997e-02, -1.36661073e-02, -2.94896937e-03,
       -9.04548261e-03, -1.83445169e-03, -1.58459507e-02, -1.82428993e-02,
       -1.38736311e-02,  1.76967196e-02, -2.44956482e-02,  2.69774497e-02,
        1.50485896e-02, -1.48492807e-03, -6.51202630e-03, -2.65182741e-02,
        2.93937139e-02, -1.00523224e-02,  2.18192823e-02,  6.99384185e-03,
       -2.95811910e-02,  6.79713711e-02,  1.16699189e-02,  1.22708885e-03,
       -2.71936506e-02, -2.11159326e-02, -4.58506905e-02,  1.56699792e-02,
       -1.23340208e-02,  2.79215369e-02, -1.47670545e-02, -3.29509489e-02,
       -2.04668548e-02, -1.25635257e-02, -2.67017856e-02, -3.24487151e-03,
       -1.66829992e-02,  2.59981249e-02,  1.19851790e-02, -1.72757986e-03,
       -1.09061170e-02, -1.48513755e-02,  5.61502464e-02, -2.22133081e-02,
       -1.91016577e-03, -1.91563647e-02, -5.69264288e-04, -1.54247507e-02,
       -6.14093151e-03, -4.55939956e-03, -1.79247782e-02,  3.40546258e-02,
       -1.35368649e-02, -4.45971675e-02, -3.47416149e-03, -1.24832811e-02,
       -4.58601955e-03,  7.92926736e-03,  3.51896025e-02, -2.34600455e-02,
        1.14738513e-02,  3.71778058e-03,  2.05838401e-02,  2.01970730e-02,
       -2.46916451e-02, -7.40371505e-03,  1.60438567e-02, -2.61178985e-02,
       -1.68035626e-02,  3.08522489e-03,  1.63679831e-02,  9.54786688e-03,
        2.48396248e-02,  6.84485212e-03, -1.77038107e-02, -1.95708890e-02,
        3.22350450e-02, -2.14373656e-02, -9.00959596e-03,  1.70164201e-02,
       -9.03818407e-04, -5.70650911e-03, -5.40862745e-03,  4.41625342e-03,
       -1.73410692e-03, -1.67202968e-02,  2.68111657e-02, -1.55285420e-03,
       -3.52538005e-02, -9.54224262e-03, -2.21989080e-02, -1.53516186e-02,
       -1.53916618e-02, -8.34255293e-03, -8.37106444e-03,  1.25763388e-02,
       -5.81820612e-04, -3.08664404e-02,  1.82383284e-02,  9.52822994e-03,
        1.38542696e-03, -3.61060649e-02,  1.58797856e-02, -6.73140585e-03,
       -2.39733383e-02,  5.52178768e-04,  3.65155451e-02,  1.65190957e-02,
       -1.70675700e-03, -8.39882717e-03, -4.99014854e-02,  1.07871918e-02,
       -8.56295507e-03,  4.26224843e-02,  2.89712008e-02,  6.42181188e-02,
       -1.40731977e-02,  1.67936292e-02,  8.63572676e-03, -1.35598360e-02,
       -1.45602422e-02,  3.39573785e-03, -1.42239337e-03,  1.63370892e-02,
       -1.49350632e-02,  1.17878756e-02,  5.15916664e-03,  1.51237873e-02,
        1.96897797e-02,  1.09953620e-02,  1.09521132e-02,  1.88704748e-02,
       -2.26163659e-02,  3.10045928e-02, -1.86797362e-02, -1.12730237e-02,
        2.17472436e-03, -1.25989402e-02,  2.71631386e-02, -3.09618632e-03,
       -1.14216597e-03,  2.42768861e-02, -5.89291751e-02, -8.86309426e-03,
        9.47573129e-03,  2.00211126e-02, -2.61113979e-03, -2.13329513e-02,
        1.61036979e-02,  2.89291404e-02,  2.50482224e-02,  1.01744393e-02,
        2.69678701e-02,  1.04940115e-02, -5.38368523e-02, -2.24533994e-02,
       -1.30570550e-02, -2.94579222e-04,  5.48650092e-03,  1.92620941e-02,
        7.37379864e-03,  1.11079868e-03, -7.39027373e-03,  3.41650620e-02,
       -2.18272693e-02, -2.80315969e-02,  1.66068561e-02, -1.41297770e-03,
        8.85984488e-03,  2.76023825e-03,  1.11667821e-02,  3.20627764e-02,
       -3.10727721e-03,  2.99315844e-02,  2.76873130e-02,  4.90293885e-03,
        6.31406554e-04,  3.10113048e-03, -1.38501415e-03, -2.82472670e-02,
        1.80681387e-03, -1.20341778e-02, -1.63151249e-02, -2.29845126e-03,
       -6.45759935e-03, -3.64080891e-02, -2.52640527e-02,  2.24848418e-03,
        2.04798337e-02,  1.64102837e-02, -3.37460474e-03, -7.93703925e-03,
       -2.04052534e-02, -2.42101848e-02,  3.11769685e-03, -6.34020520e-03,
       -1.07226213e-02,  3.71997729e-02, -1.55405058e-02, -2.89877690e-02,
        1.05905803e-02, -1.01335719e-02, -2.17052307e-02, -2.75709387e-02,
        1.85712390e-02,  5.50835626e-03,  4.41633584e-03,  4.42290073e-03,
        1.25585869e-02, -2.15812400e-02,  6.70524463e-02,  9.32214875e-03,
        3.85866035e-03,  1.13837458e-02, -7.38241896e-03, -1.76333394e-02,
       -7.32539315e-03,  1.80781651e-02, -2.59012785e-02,  8.82688165e-03,
       -8.65951367e-03,  1.40838074e-02,  3.37244826e-03, -1.37180751e-02,
        3.25113721e-03,  3.67251132e-03,  1.00571308e-02,  3.98358423e-03,
       -4.66162758e-03,  1.20006772e-02, -2.84317024e-02, -3.37876589e-03,
        1.38500473e-02,  2.24386621e-02, -6.00585481e-03, -2.32232120e-02,
        1.78099629e-02, -1.78052939e-03, -2.28915401e-02,  1.60757895e-03,
       -1.88340172e-02,  2.64763962e-02, -2.45313696e-03, -1.23506300e-02,
        1.38659701e-02, -8.91410001e-03, -6.72313198e-03,  1.61059741e-02,
       -6.84587704e-03, -1.67984664e-02,  2.21317131e-02,  2.33844928e-02,
        3.88394459e-03, -1.84030272e-02,  8.45026597e-03, -8.92886147e-03,
        2.23952252e-02, -3.82141967e-04,  4.61544981e-03,  4.52476926e-03,
        1.93497967e-02,  1.74405389e-02, -1.70737952e-02, -7.35107530e-03,
       -9.74088348e-03,  2.22996473e-02,  1.90251172e-02,  1.18628349e-02,
       -2.04506814e-02,  1.96831711e-02, -2.21576765e-02, -1.15925539e-03,
        7.89003819e-03, -8.30210221e-04,  8.40354338e-03, -2.64056027e-03,
       -5.55231795e-03, -2.45808833e-03, -2.29434874e-02, -2.27313465e-03,
        1.98170003e-02,  2.86884103e-02,  1.10975010e-02,  1.80534401e-03,
       -5.13124373e-03, -1.20459553e-02, -4.99460101e-03,  3.43272239e-02,
       -8.81848740e-04,  1.76821332e-02,  2.06242017e-02,  2.05405299e-02,
       -4.73640673e-03, -2.43757479e-03, -3.01323701e-02, -1.79708917e-02,
        3.25339325e-02,  2.98406114e-04,  1.99625324e-02, -1.33094005e-02,
       -3.20596695e-02, -2.51999740e-02, -1.63889304e-02, -4.04155906e-03,
       -6.16800319e-03,  1.45447245e-02, -9.15659126e-03,  1.05688255e-02,
        3.83419171e-02, -1.80698931e-02,  3.13836499e-03, -7.02987239e-03],
      dtype=float32)>, <tf.Variable 'batch_normalization_40/moving_variance:0' shape=(512,) dtype=float32, numpy=
array([0.37634504, 0.37407592, 0.37450475, 0.37813702, 0.3731643 ,
       0.37490237, 0.37660527, 0.3743985 , 0.3739335 , 0.37595117,
       0.37574843, 0.37604704, 0.37401345, 0.37394628, 0.37420425,
       0.37389413, 0.37870392, 0.3740293 , 0.37476614, 0.37348998,
       0.37671158, 0.37469584, 0.37594652, 0.37367663, 0.37468782,
       0.37373984, 0.37734798, 0.37371108, 0.37317654, 0.37473878,
       0.37675884, 0.37692863, 0.37423795, 0.3735149 , 0.3739327 ,
       0.37586826, 0.3773249 , 0.3740187 , 0.37412614, 0.3758351 ,
       0.37701637, 0.3757623 , 0.37728512, 0.37500563, 0.37355652,
       0.37440673, 0.37358773, 0.37391856, 0.37397498, 0.3753317 ,
       0.37920213, 0.37439516, 0.37315166, 0.3756745 , 0.37428206,
       0.37400895, 0.37640062, 0.37420484, 0.37342915, 0.37342995,
       0.38075686, 0.37403217, 0.37434924, 0.37673515, 0.37344676,
       0.37478483, 0.37357858, 0.3790268 , 0.3750754 , 0.37489685,
       0.37521616, 0.37401602, 0.37376818, 0.3757479 , 0.3759818 ,
       0.37549737, 0.37448302, 0.37356293, 0.37562725, 0.3758677 ,
       0.37423685, 0.3747589 , 0.3739876 , 0.37712106, 0.3750812 ,
       0.37442657, 0.37473872, 0.37698162, 0.3756568 , 0.3739418 ,
       0.3746749 , 0.3739755 , 0.37484893, 0.3745842 , 0.37367463,
       0.3758279 , 0.3757177 , 0.37428454, 0.37341097, 0.37469688,
       0.37729484, 0.374025  , 0.37429267, 0.37463558, 0.37445486,
       0.37557244, 0.3739746 , 0.37597466, 0.37776646, 0.37354875,
       0.37411094, 0.3749462 , 0.3761095 , 0.37357527, 0.3732264 ,
       0.37430626, 0.37406498, 0.37352708, 0.3743299 , 0.37594503,
       0.37822887, 0.37373197, 0.37441212, 0.3731857 , 0.3744515 ,
       0.37533355, 0.37468478, 0.37477857, 0.37377778, 0.3747897 ,
       0.3760608 , 0.37501952, 0.37580532, 0.3755082 , 0.37767276,
       0.37596327, 0.3743693 , 0.37539738, 0.37478337, 0.37354892,
       0.3745285 , 0.3735356 , 0.37439805, 0.37353176, 0.37309384,
       0.37331432, 0.37617242, 0.3735904 , 0.3738208 , 0.37530324,
       0.37362367, 0.3731955 , 0.37670872, 0.37438414, 0.374788  ,
       0.3749288 , 0.37394494, 0.37311777, 0.3797699 , 0.37418514,
       0.37349242, 0.373757  , 0.377095  , 0.37330234, 0.3749548 ,
       0.37395135, 0.37503728, 0.3735551 , 0.37359655, 0.3754203 ,
       0.37435308, 0.3735974 , 0.3739214 , 0.37449628, 0.3741128 ,
       0.37466854, 0.37431538, 0.37322268, 0.37441742, 0.37701717,
       0.37424028, 0.3758187 , 0.37512696, 0.3740064 , 0.37358776,
       0.3754184 , 0.3754832 , 0.3751449 , 0.37717083, 0.37559015,
       0.3741044 , 0.3755641 , 0.37510234, 0.37432754, 0.37335986,
       0.3755127 , 0.37587166, 0.37501055, 0.37650502, 0.37683982,
       0.37348247, 0.37506062, 0.37298676, 0.37328988, 0.3745336 ,
       0.37595645, 0.37585378, 0.37407216, 0.37391558, 0.3798523 ,
       0.3741855 , 0.37459683, 0.37568188, 0.3770006 , 0.3733792 ,
       0.37480792, 0.37417004, 0.3736064 , 0.38000974, 0.37372002,
       0.37572408, 0.37620768, 0.37731722, 0.3739675 , 0.3745029 ,
       0.37360233, 0.3761876 , 0.3745187 , 0.37616876, 0.3741582 ,
       0.37447563, 0.37423307, 0.3749407 , 0.37477955, 0.37507632,
       0.37537658, 0.37843835, 0.3750094 , 0.37440532, 0.37624416,
       0.37720746, 0.37373504, 0.3791209 , 0.37445325, 0.37460297,
       0.37488994, 0.37549856, 0.3737986 , 0.37381783, 0.37360987,
       0.37385738, 0.37380704, 0.3762848 , 0.37515548, 0.37445688,
       0.37322786, 0.375287  , 0.374537  , 0.37523642, 0.373862  ,
       0.37360525, 0.3764375 , 0.3755854 , 0.37688112, 0.37359634,
       0.37386417, 0.37421134, 0.37423137, 0.37793493, 0.37467343,
       0.3815799 , 0.37294313, 0.3747808 , 0.37917927, 0.37583923,
       0.37738988, 0.37630475, 0.3741975 , 0.3838965 , 0.37493023,
       0.37314582, 0.37560222, 0.37341115, 0.3760199 , 0.37469962,
       0.37783387, 0.37481913, 0.37453568, 0.375297  , 0.37359405,
       0.37775254, 0.37492704, 0.37953275, 0.37526184, 0.3731739 ,
       0.3754464 , 0.37586525, 0.37438992, 0.3733806 , 0.3745638 ,
       0.37500143, 0.37403825, 0.3745358 , 0.37455755, 0.37363246,
       0.3735224 , 0.37928125, 0.37457517, 0.37644732, 0.374727  ,
       0.37505916, 0.37712932, 0.37381327, 0.37526134, 0.37405643,
       0.3769198 , 0.3777763 , 0.37794816, 0.37438127, 0.37449712,
       0.37453315, 0.37542728, 0.37634176, 0.37406436, 0.37869942,
       0.37418938, 0.37438348, 0.37501264, 0.3745843 , 0.3738852 ,
       0.3745582 , 0.37410548, 0.37379467, 0.3734857 , 0.37441978,
       0.3765901 , 0.37375388, 0.3749694 , 0.37454957, 0.37372625,
       0.3751138 , 0.37325627, 0.37713167, 0.37721968, 0.373959  ,
       0.3744293 , 0.3757809 , 0.3735659 , 0.37590852, 0.3751923 ,
       0.37420583, 0.37415195, 0.3776373 , 0.37419993, 0.37514806,
       0.37404245, 0.37532565, 0.37416995, 0.37651357, 0.3736476 ,
       0.37594566, 0.3734253 , 0.37425885, 0.37768555, 0.37423792,
       0.37848836, 0.375926  , 0.37940052, 0.37346756, 0.37473643,
       0.3738989 , 0.3746139 , 0.3746897 , 0.3781874 , 0.37397724,
       0.3735661 , 0.37447187, 0.37512127, 0.37479284, 0.37373886,
       0.37399587, 0.37368187, 0.37359372, 0.37769926, 0.3756911 ,
       0.37780023, 0.37818444, 0.3736351 , 0.37588453, 0.37519488,
       0.37411144, 0.37369144, 0.3737458 , 0.37749234, 0.3764013 ,
       0.37494132, 0.37435496, 0.38361707, 0.37398022, 0.37578094,
       0.37587413, 0.37544972, 0.37458712, 0.38611776, 0.37571153,
       0.37344754, 0.37403703, 0.3740526 , 0.3751644 , 0.373776  ,
       0.3765424 , 0.3757253 , 0.37532163, 0.37594992, 0.37515625,
       0.3757093 , 0.37441713, 0.37409836, 0.3753297 , 0.37526548,
       0.37476495, 0.3740599 , 0.37444475, 0.37328014, 0.37482515,
       0.37578672, 0.37343892, 0.37486556, 0.37582776, 0.37370342,
       0.3748591 , 0.375038  , 0.37498885, 0.37406036, 0.3757451 ,
       0.3747645 , 0.37376913, 0.37415174, 0.37450582, 0.37378037,
       0.37527147, 0.37455642, 0.37415674, 0.3740627 , 0.37402794,
       0.37445852, 0.3792307 , 0.37376112, 0.37476277, 0.37368053,
       0.37630272, 0.3742999 , 0.37398654, 0.3750657 , 0.373735  ,
       0.37400374, 0.37835187, 0.37446308, 0.3732299 , 0.37326905,
       0.3747693 , 0.3742761 , 0.3779705 , 0.3731209 , 0.3753056 ,
       0.37799868, 0.37910083, 0.37436402, 0.3769364 , 0.37470305,
       0.37645525, 0.37464738, 0.37426358, 0.373785  , 0.37578765,
       0.3745485 , 0.3743264 , 0.3777577 , 0.37504825, 0.3736937 ,
       0.37368652, 0.37503254, 0.37492183, 0.3760732 , 0.37643003,
       0.3795525 , 0.3748592 , 0.3747286 , 0.37458673, 0.37289277,
       0.3734227 , 0.37511772, 0.3740602 , 0.37574244, 0.37388456,
       0.3739002 , 0.37642282, 0.37347054, 0.37383667, 0.37508804,
       0.37337565, 0.37648958, 0.3741862 , 0.37705058, 0.374776  ,
       0.37462348, 0.37450707, 0.37555307, 0.37475616, 0.37500718,
       0.37689918, 0.3748642 ], dtype=float32)>]

In [ ]: