First TensorFlow Neurons

Load dependencies


In [1]:
import numpy as np
np.random.seed(42)
import tensorflow as tf
tf.set_random_seed(42)
import matplotlib.pyplot as plt
%matplotlib inline

Set number of neurons


In [2]:
n_input = 784
n_dense = 128

Define placeholder Tensor for simulated MNIST digits


In [3]:
x = tf.placeholder(tf.float32, [None, n_input])

Create Variable Tensors for neuron biases b and weight matrix W


In [4]:
b = tf.Variable(tf.zeros([n_dense]))
# W = tf.Variable(tf.random_uniform([n_input, n_dense])) # 1.
# W = tf.Variable(tf.random_normal([n_input, n_dense])) # 2.
W = tf.get_variable('W', [n_input, n_dense], 
                    initializer=tf.contrib.layers.xavier_initializer())

Design the computational graph


In [5]:
z = tf.add(tf.matmul(x, W), b)
a = tf.nn.relu(z) # first with tf.sigmoid(), then stick with tf.tanh() or tf.nn.relu()

Create op for variable initialization


In [6]:
initializer_op = tf.global_variables_initializer()

Execute the graph in a session


In [7]:
with tf.Session() as session:
    session.run(initializer_op)
    
    layer_output = session.run(a, {x: np.random.random([1, n_input])})

In [8]:
layer_output


Out[8]:
array([[ 0.28484702,  0.66296649,  0.48465875,  0.58361048,  0.        ,
         0.02700119,  0.50025719,  0.35395339,  0.33801708,  0.65332055,
         0.        ,  0.10179006,  0.        ,  0.61159277,  0.        ,
         1.29969335,  0.        ,  0.0700454 ,  0.52458507,  0.60666955,
         0.45962104,  0.84671533,  0.10901574,  0.43929845,  0.        ,
         0.33268207,  0.10732757,  0.        ,  0.        ,  0.38768527,
         0.17347404,  0.        ,  0.        ,  0.        ,  0.04758957,
         0.45743263,  0.04072431,  0.        ,  0.27846065,  0.        ,
         0.64920127,  0.        ,  0.        ,  0.        ,  0.        ,
         0.20801233,  0.        ,  0.        ,  0.        ,  0.54332167,
         1.74375212,  0.19492842,  0.25929692,  0.        ,  0.        ,
         0.58499128,  0.36324847,  0.        ,  0.        ,  0.        ,
         0.        ,  0.8660351 ,  0.        ,  0.        ,  0.83429128,
         1.06981802,  0.64549267,  0.        ,  0.        ,  0.        ,
         0.        ,  0.56380248,  0.        ,  0.        ,  0.        ,
         0.24550769,  0.72879362,  0.28983656,  0.        ,  0.22116758,
         0.        ,  0.        ,  0.16525199,  0.92048723,  0.        ,
         0.        ,  0.        ,  0.71630067,  0.50095582,  0.62799764,
         0.18321247,  0.        ,  0.        ,  0.91466761,  0.        ,
         0.07857732,  1.25968564,  0.        ,  0.49644947,  0.00613178,
         0.        ,  0.70885855,  0.22226629,  0.35115728,  0.        ,
         0.22060682,  0.        ,  0.83496559,  0.82810038,  0.05427774,
         0.        ,  0.13187447,  0.        ,  0.02490139,  0.        ,
         0.08315995,  1.03106618,  0.        ,  0.30102578,  0.        ,
         0.        ,  0.        ,  0.05216391,  0.        ,  0.        ,
         0.        ,  0.        ,  0.        ]], dtype=float32)

In [9]:
_ = plt.hist(np.transpose(layer_output))



In [ ]: