In [1]:
import tensorflow as tf
In [2]:
W = tf.Variable([0.3])
b = tf.Variable([-0.3])
x = tf.placeholder(tf.float32)
linear_model = W * x + b
In [3]:
init = tf.global_variables_initializer()
In [4]:
sess = tf.Session()
In [5]:
sess.run(init)
In [6]:
print(sess.run(linear_model,{x:[1,2,3,4]}))
In [7]:
y = tf.placeholder(tf.float32)
In [8]:
squared_deltas = tf.square(linear_model - y)
loss = tf.reduce_sum(squared_deltas)
In [9]:
print(sess.run(loss,{x:[1,2,3,4],y:[0,-1,-2,-3]}))
In [10]:
fixW = tf.assign(W,[-1])
fixb = tf.assign(b,[1])
sess.run([fixW,fixb])
Out[10]:
In [11]:
print(sess.run(loss,{x:[1,2,3,4],y:[0,-1,-2,-3]}))
In [12]:
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(loss)
In [13]:
sess.run(init)
In [14]:
for i in range(1000):
sess.run(train,{x:[1,2,3,4],y:[0,-1,-2,-3]})
if i % 100 ==0 :
print(sess.run([W,b]))
print("loss:",sess.run(loss,{x:[1,2,3,4],y:[0,-1,-2,-3]}))
In [15]:
tf.summary.histogram("W", W)
tf.summary.histogram("b", b)
Out[15]:
In [16]:
writer = tf.summary.FileWriter("./my_graph_reg",sess.graph)
writer.close()
In [17]:
#sess.close()
In [20]:
!tensorboard --logdir=./my_graph_reg
In [19]:
# !rm -rf ./my_graph_reg
In [ ]: