Automatic differentiation


In [1]:
import time
import numpy as np
import tensorflow as tf

In [2]:
def reset_graph(seed=42):
    """
    Utility function to reset current tensorflow computation graph
    and set the random seed 
    """
    # to make results reproducible across runs
    tf.reset_default_graph()
    tf.set_random_seed(seed)
    np.random.seed(seed)

In [3]:
def my_func(w,x):
    
    f_0 = tf.exp(w[0,0] + w[0,1]*x)
    f_1 = tf.exp(w[1,0] + w[1,1]*f_0)
    f_2 = tf.exp(w[2,0] + w[2,1]*f_1)
    
    return f_0, f_1, f_2

In [4]:
def my_func(w,x):
    
    with tf.name_scope("f_0_level") as scope_0:
        f_0 = tf.exp(w[0,0] + w[0,1]*x)
        
    with tf.name_scope("f_1_level") as scope_1:
        f_1 = tf.exp(w[1,0] + w[1,1]*f_0)
        
    with tf.name_scope("f_2_level") as scope_2:
        f_2 = tf.exp(w[2,0] + w[2,1]*f_1)
        
    return f_0, f_1, f_2

In [5]:
w_0 = np.vstack( (np.zeros(3), np.ones(3)) ).T

In [6]:
w_0


Out[6]:
array([[ 0.,  1.],
       [ 0.,  1.],
       [ 0.,  1.]])

In [7]:
w = tf.Variable(w_0, name="w", dtype=tf.float32)
x = tf.Variable(1.0, name="x", dtype=tf.float32, trainable=False)

In [8]:
f_0, f_1, f_2 = my_func(w, x)

In [9]:
grads = tf.gradients(f_2, w)

In [10]:
grads


Out[10]:
[<tf.Tensor 'gradients/AddN:0' shape=(3, 2) dtype=float32>]

In [11]:
init = tf.global_variables_initializer()

In [ ]:


In [12]:
t0 = time.time()

with tf.Session() as sess:
    sess.run(init)
    
    gradients, function_vals = sess.run([grads, [f_0, f_1, f_2]])
    
t1 = time.time()
dt = t1-t0
print("Function values: {}".format(function_vals))
print("Gradients: {}".format(gradients))


Function values: [2.7182817, 15.154261, 3814273.0]
Gradients: [array([[  1.57123440e+08,   1.57123440e+08],
       [  5.78024880e+07,   1.57123440e+08],
       [  3.81427300e+06,   5.78024880e+07]], dtype=float32)]

Visualizing the graph


In [13]:
from IPython.display import clear_output, Image, display, HTML

def strip_consts(graph_def, max_const_size=32):
    """Strip large constant values from graph_def."""
    strip_def = tf.GraphDef()
    for n0 in graph_def.node:
        n = strip_def.node.add() 
        n.MergeFrom(n0)
        if n.op == 'Const':
            tensor = n.attr['value'].tensor
            size = len(tensor.tensor_content)
            if size > max_const_size:
                tensor.tensor_content = "<stripped %d bytes>"%size
    return strip_def

def show_graph(graph_def, max_const_size=32):
    """Visualize TensorFlow graph."""
    if hasattr(graph_def, 'as_graph_def'):
        graph_def = graph_def.as_graph_def()
    strip_def = strip_consts(graph_def, max_const_size=max_const_size)
    code = """
        <script>
          function load() {{
            document.getElementById("{id}").pbtxt = {data};
          }}
        </script>
        <link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html" onload=load()>
        <div style="height:600px">
          <tf-graph-basic id="{id}"></tf-graph-basic>
        </div>
    """.format(data=repr(str(strip_def)), id='graph'+str(np.random.rand()))

    iframe = """
        <iframe seamless style="width:1200px;height:620px;border:0" srcdoc="{}"></iframe>
    """.format(code.replace('"', '&quot;'))
    display(HTML(iframe))

In [14]:
show_graph(tf.get_default_graph().as_graph_def())


/home/edy/lib/build/anaconda3/lib/python3.6/site-packages/ipykernel_launcher.py:10: DeprecationWarning: PyUnicode_AsEncodedObject() is deprecated; use PyUnicode_AsEncodedString() to encode from str to bytes or PyCodec_Encode() for generic encoding
  # Remove the CWD from sys.path while we load stuff.

In [ ]: