In [1]:
import tensorflow as tf
import tfgraphviz as tfg
In [2]:
calc_g = tf.Graph()
In [3]:
with calc_g.as_default():
a = tf.constant(1, name="a")
b = tf.constant(2, name="b")
c = tf.add(a, b, name="add")
Visualize a graph with tfg.board(...)
In [4]:
tfg.board(calc_g)
Out[4]:
In [5]:
reg_g = tf.Graph()
In [6]:
with reg_g.as_default():
import numpy as np
x_data = np.random.rand(100).astype(np.float32)
y_data = x_data * 0.1 + 0.3
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W * x_data + b
loss = tf.reduce_mean(tf.square(y - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
In [7]:
tfg.board(reg_g)
Out[7]:
In [8]:
tfg.board(reg_g, depth=2)
Out[8]:
In [9]:
mnist_g = tf.Graph()
In [10]:
with mnist_g.as_default():
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
In [11]:
tfg.board(mnist_g, depth=1)
Out[11]: