In [11]:
import imp, tfutils
imp.reload(tfutils)
from tfutils import *

In [2]:
open('/tmp/caca','w').write('hola')


Out[2]:
4

In [8]:
tf.reset_default_graph()
a = tf.constant(1.2)
b = tf.constant([3.14, 7, -1])
with tf.Session().as_default():
    print((a + b * a).eval())


[ 4.96800041  9.60000038  0.        ]

In [9]:
show_graph(tf.get_default_graph())



In [5]:
g=tf.get_default_graph()

In [6]:
[op.name for op in g.get_operations()]


Out[6]:
['Const', 'Const_1', 'mul', 'add']

In [21]:
tf.reset_default_graph()
batch_size = 100
x = tf.placeholder(tf.float32, [batch_size, 784], name='image')
W = tf.Variable(tf.zeros([784, 10]), name='weights')
b = tf.Variable(tf.zeros([10]), name='bias')
y = tf.matmul(x, W) + b

y_ = tf.placeholder(tf.int32, [batch_size], name='expected')

cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y_, logits=y)
mean = tf.reduce_mean(cross_entropy)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(mean)
show_graph(tf.get_default_graph())



In [ ]:


In [89]:
[(op.name, op.type) for op in tf.get_default_graph().get_operations()]


Out[89]:
[('image', 'Placeholder'),
 ('zeros', 'Const'),
 ('weights', 'Variable'),
 ('weights/Assign', 'Assign'),
 ('weights/read', 'Identity'),
 ('zeros_1', 'Const'),
 ('bias', 'Variable'),
 ('bias/Assign', 'Assign'),
 ('bias/read', 'Identity'),
 ('MatMul', 'MatMul'),
 ('add', 'Add'),
 ('expected', 'Placeholder'),
 ('SparseSoftmaxCrossEntropyWithLogits/Shape', 'Const'),
 ('SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits',
  'SparseSoftmaxCrossEntropyWithLogits'),
 ('Const', 'Const'),
 ('Mean', 'Mean'),
 ('gradients/Shape', 'Const'),
 ('gradients/Const', 'Const'),
 ('gradients/Fill', 'Fill'),
 ('gradients/Mean_grad/Reshape/shape', 'Const'),
 ('gradients/Mean_grad/Reshape', 'Reshape'),
 ('gradients/Mean_grad/Tile/multiples', 'Const'),
 ('gradients/Mean_grad/Tile', 'Tile'),
 ('gradients/Mean_grad/Shape', 'Const'),
 ('gradients/Mean_grad/Shape_1', 'Const'),
 ('gradients/Mean_grad/Const', 'Const'),
 ('gradients/Mean_grad/Prod', 'Prod'),
 ('gradients/Mean_grad/Const_1', 'Const'),
 ('gradients/Mean_grad/Prod_1', 'Prod'),
 ('gradients/Mean_grad/Maximum/y', 'Const'),
 ('gradients/Mean_grad/Maximum', 'Maximum'),
 ('gradients/Mean_grad/floordiv', 'Div'),
 ('gradients/Mean_grad/Cast', 'Cast'),
 ('gradients/Mean_grad/truediv', 'Div'),
 ('gradients/zeros_like', 'ZerosLike'),
 ('gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims/dim',
  'Const'),
 ('gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/ExpandDims',
  'ExpandDims'),
 ('gradients/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits_grad/mul',
  'Mul'),
 ('gradients/add_grad/Shape', 'Const'),
 ('gradients/add_grad/Shape_1', 'Const'),
 ('gradients/add_grad/BroadcastGradientArgs', 'BroadcastGradientArgs'),
 ('gradients/add_grad/Sum', 'Sum'),
 ('gradients/add_grad/Reshape', 'Reshape'),
 ('gradients/add_grad/Sum_1', 'Sum'),
 ('gradients/add_grad/Reshape_1', 'Reshape'),
 ('gradients/add_grad/tuple/group_deps', 'NoOp'),
 ('gradients/add_grad/tuple/control_dependency', 'Identity'),
 ('gradients/add_grad/tuple/control_dependency_1', 'Identity'),
 ('gradients/MatMul_grad/MatMul', 'MatMul'),
 ('gradients/MatMul_grad/MatMul_1', 'MatMul'),
 ('gradients/MatMul_grad/tuple/group_deps', 'NoOp'),
 ('gradients/MatMul_grad/tuple/control_dependency', 'Identity'),
 ('gradients/MatMul_grad/tuple/control_dependency_1', 'Identity'),
 ('GradientDescent/learning_rate', 'Const'),
 ('GradientDescent/update_weights/ApplyGradientDescent',
  'ApplyGradientDescent'),
 ('GradientDescent/update_bias/ApplyGradientDescent', 'ApplyGradientDescent'),
 ('GradientDescent', 'NoOp')]

In [22]:
tf.reset_default_graph()
batch_size = None
x = tf.placeholder(tf.float32, [batch_size, 784], name='image')
W = tf.Variable(tf.zeros([784, 10]), name='weights')
b = tf.Variable(tf.zeros([10]), name='bias')
y = tf.matmul(x, W) + b

y_ = tf.placeholder(tf.int32, [batch_size, 10], name='expected')

cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y)
mean = tf.reduce_mean(cross_entropy)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(mean)
show_graph(tf.get_default_graph())



In [16]:
filename = 'MNIST3Layers.pb'
# use MNIST3LayersNNExamplePlan new graph writeDefToFileNamed: 'MNIST3Layers.pb'
graph = tf.GraphDef()
graph.ParseFromString(open(filename,'rb').read())
show_graph(graph)



In [21]:
filename = 'graph.pb'
graph = tf.GraphDef()
graph.ParseFromString(open(filename,'rb').read())
show_graph(graph)



In [ ]: