In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
In [2]:
%matplotlib inline
In [3]:
a=tf.random_normal([2,20])
In [4]:
sess=tf.Session()
In [5]:
out=sess.run(a)
In [6]:
print(out)
In [7]:
x,y=out
In [8]:
plt.scatter(x,y)
Out[8]:
In [9]:
a=tf.constant(5,name='input_a')
b=tf.constant(3,name='input_b')
c=tf.multiply(a,b,name='mul_c')
d=tf.add(a,b,name='add_d')
e=tf.add(c,d,name='add_e')
In [10]:
sess=tf.Session()
In [11]:
sess.run(e)
Out[11]:
In [16]:
writer=tf.summary.FileWriter('my_graph',sess.graph)
The above command will output graph to my_graph directory Then run the command '$ tensorboard --logdir="my_graph"' type http://localhost:6006 in the browser
In [17]:
writer.close()
In [18]:
sess.close()
In [3]:
a=tf.constant([5,3],name='input_a')
In [6]:
b=tf.reduce_prod(a,name='prod_b')
In [7]:
c=tf.reduce_sum(a,name='sum_c')
In [8]:
d=tf.add(b,c,name='add_d')
In [9]:
sess=tf.Session()
In [10]:
sess.run(d)
Out[10]:
Tensors, as mentioned before, are simply the n-dimensional abstractionof matrices. So a 1-D tensor would be equivalent to a vector, a 2-Dtensor is a matrix, and above that you can just say “N-D tensor”.
Comparing to last example, this one has only one input which is a vector, so simplify the graph.And also reduce to only one dependency to check.
In [12]:
help(tf.placeholder)
In [13]:
a=tf.placeholder(tf.int32,shape=(2),name='input_a')
In [14]:
b=tf.reduce_prod(a,name='prod_b')
In [15]:
c=tf.mul(b,3,name='mul_c')
In [16]:
sess=tf.Session()
In [19]:
input_dict={a:np.array([3,5],dtype=np.int32)}
In [20]:
sess.run(c,feed_dict=input_dict)
Out[20]:
This way create a placeholder which hold variable values