This notebook modifies our linear regression example to work with tensorboard. So you can find the changes, I've removed all comments from the previous notebook, and added new ones to highlight the TensorBoard functionality.
In [ ]:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
import numpy as np
import pylab
import tensorflow as tf
%matplotlib inline
In [ ]:
# Create a directory to write summary data
# ./graphs will be created in the same directory
# from which you run this notebook.
# Tip: if you later run into trouble displaying results in
# tensorboard, delete the contents of this directory
# and run the notebook again.
LOGDIR = './graphs'
In [ ]:
tf.reset_default_graph()
sess = tf.Session()
In [ ]:
x_train = np.random.rand(100).astype(np.float32)
noise = np.random.normal(scale=0.01, size=len(x_train))
y_train = x_train * 0.1 + 0.3 + noise
In [ ]:
x_eval = np.random.rand(len(x_train)).astype(np.float32)
noise = np.random.normal(scale=0.01, size=len(x_train))
y_eval = x_eval * 0.1 + 0.3 + noise
In [ ]:
with tf.name_scope('placeholders'):
x = tf.placeholder(shape=[None], dtype=tf.float32, name='x')
y_label = tf.placeholder(shape=[None], dtype=tf.float32, name='labels')
with tf.name_scope('model'):
W = tf.Variable(tf.random_normal([1]), name='weights')
b = tf.Variable(tf.random_normal([1]), name='bias')
y = W * x + b
In [ ]:
with tf.name_scope('training'):
with tf.name_scope('loss'):
loss = tf.reduce_mean(tf.square(y - y_label))
with tf.name_scope('optimizer'):
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
In [ ]:
init = tf.global_variables_initializer()
sess.run(init)
In [ ]:
# Write the graph so we can look at it later in TensorBoard
writer = tf.summary.FileWriter(LOGDIR)
writer.add_graph(sess.graph)
In [ ]:
# Keep track how loss changes, so we can visualize later it in TensorBoard
tf.summary.scalar('loss', loss)
# We might as well keep track of how W and b change as well
# Later, you can see these values under 'histograms' and 'distributions'
tf.summary.histogram('W', W)
tf.summary.histogram('b', b)
# Create an op that will write all of our summary data at once
summary_op = tf.summary.merge_all()
In [ ]:
for step in range(201):
summary_results, _ = sess.run([summary_op, train], feed_dict={x: x_train, y_label: y_train})
writer.add_summary(summary_results, step)
In [ ]:
# To start TensorBoard, run this command from a terminal
# Note: you should run this from the same directory that contains this notebook
# or, provide absolute path to the 'graphs' directory
# $ tensorboard --logdir=graphs
# To open TensorBoard, point your browser to http://localhost:6006
# Then, click on the tabs for 'scalars', 'distributions', 'histograms', and 'graphs'
# to learn more.
In [ ]:
# While we're at it, let's demonstrate saving and restoring a model
In [ ]:
def predict(x_in): return sess.run(y, feed_dict={x: [x_in]})
In [ ]:
# Save the model
saver = tf.train.Saver()
saver.save(sess, os.path.join(LOGDIR,'my_checkpoint.ckpt'))
In [ ]:
# Current prediction
predict(3)
In [ ]:
# Reset the model by running the init op again
sess.run(init)
In [ ]:
# Prediction after variables reinitialized
predict(3)
In [ ]:
saver.restore(sess, os.path.join(LOGDIR,'my_checkpoint.ckpt'))
In [ ]:
# Predictions after variables restored
predict(3)