In [ ]:
import numpy as np
import tensorflow as tf
This is for visualizing a TF graph in an iPython notebook; the details are not interesting. (Borrowed from the DeepDream iPython notebook)
In [ ]:
# This is for graph visualization.
from IPython.display import clear_output, Image, display, HTML
def strip_consts(graph_def, max_const_size=32):
"""Strip large constant values from graph_def."""
strip_def = tf.GraphDef()
for n0 in graph_def.node:
n = strip_def.node.add()
n.MergeFrom(n0)
if n.op == 'Const':
tensor = n.attr['value'].tensor
size = len(tensor.tensor_content)
if size > max_const_size:
tensor.tensor_content = "<stripped %d bytes>"%size
return strip_def
def show_graph(graph_def, max_const_size=32):
"""Visualize TensorFlow graph."""
if hasattr(graph_def, 'as_graph_def'):
graph_def = graph_def.as_graph_def()
strip_def = strip_consts(graph_def, max_const_size=max_const_size)
code = """
<script>
function load() {{
document.getElementById("{id}").pbtxt = {data};
}}
</script>
<link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html" onload=load()>
<div style="height:600px">
<tf-graph-basic id="{id}"></tf-graph-basic>
</div>
""".format(data=repr(str(strip_def)), id='graph'+str(np.random.rand()))
iframe = """
<iframe seamless style="width:1200px;height:620px;border:0" srcdoc="{}"></iframe>
""".format(code.replace('"', '"'))
display(HTML(iframe))
In [ ]:
# This code only creates the graph. No computation is done yet.
tf.reset_default_graph()
x = tf.constant(7.0, name="x")
y = tf.add(x, tf.constant(2.0, name="y"), name="add_op")
z = tf.subtract(x, tf.constant(2.0, name="z"), name="sub_op")
w = tf.multiply(y, tf.constant(3.0)) # If no name is given, TF will chose a unique name for us.
# Visualize the graph.
show_graph(tf.get_default_graph().as_graph_def())
There are many built-in Ops for low-level manipulation of numeric Tensors, e.g.:
Almost every object in TensorFlow is an op. Even things that don't look like they are! TensorFlow uses the op abstraction for a surprising range of things:
This can be confusing at first. For now, remember that because many things are Ops, some things have to be done in a somewhat non-obvious fashion.
A list of TF Ops can be found at https://www.tensorflow.org/api_docs/python/.
In [ ]:
# We can also use shorthand syntax
# Notice the default names TF chooses for us.
tf.reset_default_graph()
x = tf.constant(7.0)
y = x + 2
z = x - 2
w = y * 3
# Visualize the graph.
show_graph(tf.get_default_graph().as_graph_def())
In [ ]:
tf.reset_default_graph()
x = tf.constant(7.0, name="x")
y = tf.add(x, tf.constant(2.0, name="y"), name="add_op")
z = y * 3.0
# Create a session, which is the context for running a graph.
with tf.Session() as sess:
# When we call sess.run(y) the session is computing the value of Tensor y.
print(sess.run(y))
print(sess.run(z))
Session.run()
. tf.Variable
.Session.run()
, and each call updates the model parameters.
In [ ]:
tf.reset_default_graph()
# tf.get_variable returns a tf.Variable object. Creating such objects directly
# is possible, but does not have a sharing mechanism. Hence, tf.get_variable is
# preferred.
x = tf.get_variable("x", shape=[], initializer=tf.zeros_initializer())
assign_x = tf.assign(x, 10, name="assign_x")
z = tf.add(x, 1, name="z")
# Variables in TensorFlow need to be initialized first. The following op
# conveniently takes care of that and initializes all variables.
init = tf.global_variables_initializer()
# Visualize the graph.
show_graph(tf.get_default_graph().as_graph_def())
Take a guess: what is the output of the code below?
In [ ]:
with tf.Session() as sess:
# Assign an initial value to the instance of the variable in this session,
# determined by the initializer provided above.
sess.run(init)
print (sess.run(z))
The output might surprise you: it's 1.0! The op assign_x
is not a dependency of x or z, and hence is never evaluated.
One way to solve this problem is:
In [ ]:
with tf.Session() as sess:
# When we create a new session we need to initialize all Variables again.
sess.run(init)
sess.run(assign_x)
print (sess.run(z))
So far you have seen Variables, but there is a more basic construct: the placeholder. A placeholder is simply a variable that we will assign data to at a later date. It allows us to create our operations and build our computation graph, without needing the data. In TensorFlow terminology, we then feed data into the graph through these placeholders.
In [ ]:
tf.reset_default_graph()
x = tf.placeholder("float", None)
y = x * 2
# Visualize the graph.
show_graph(tf.get_default_graph().as_graph_def())
At execution time, we feed data into the graph using a feed_dict: for each placeholder, it contains the value we want to assign to it. This can be useful for batching up data, as you will see later.
In [ ]:
with tf.Session() as session:
result = session.run(y, feed_dict={x: [1, 2, 3]})
print(result)
Queues are TensorFlow’s primitives for writing asynchronous code.
See the Threading and Queues for more details.
Note: You probably will never need to directly use these low level implementations of queues yourself. Do note, however, that several important operations (for example, reading and batching) are implemented as queues.
In [ ]:
tf.reset_default_graph()
q = tf.FIFOQueue(3, "float", name="q")
initial_enqueue = q.enqueue_many(([0., 0., 0.],), name="init")
x = q.dequeue()
y = x + 1
q_inc = q.enqueue([y])
with tf.Session() as session:
session.run(initial_enqueue)
outputs = []
for _ in range(20):
_, y_val = session.run([q_inc, y])
outputs.append(y_val)
print(outputs)
# Visualize the graph.
show_graph(tf.get_default_graph().as_graph_def())
And now some fun! Collatz conjecture states that after applying the following rule
a finite number of times to any given number, we will end up at $1$ (cf. https://xkcd.com/710/).
Implement the checking routine in TensorFlow (i.e. implement some code that given a number, checks that it satisfies Collatz conjecture). Bonus: use a queue.
In [ ]:
tf.reset_default_graph()
number_to_check = 29
# Define graph.
a = tf.Variable(number_to_check, dtype=tf.int32)
pred = tf.equal(0, tf.mod(a, 2))
b = tf.cast(
tf.cond(
pred,
lambda: tf.div(a, 2),
lambda: tf.add(tf.multiply(a, 3), 1)),
tf.int32)
assign_op = tf.assign(a, b)
with tf.Session() as session:
# 1. Implement graph execution.
pass