In [1]:
import tensorflow as tf
import matplotlib.pyplot as plt
tf.set_random_seed(777)

In [2]:
x_train = [1,2,3]
y_train = [1,2,3]

In [3]:
plt.plot(x_train, y_train)
plt.grid()
plt.show()



In [4]:
W = tf.Variable(tf.random_normal([1]), name = 'Weight')
b = tf.Variable(tf.random_normal([1]), name = 'bias')

In [5]:
hypothesis = x_train *W +b

In [6]:
cost = tf.reduce_mean(tf.square(hypothesis - y_train))

In [8]:
optimizer = tf.train.GradientDescentOptimizer(learning_rate = 0.01)
train = optimizer.minimize(cost)

In [9]:
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for step in range(2001):
        sess.run(train)
        if step%100 == 0:
            print(step, sess.run(cost),sess.run(W),sess.run(b))


0 2.82329 [ 2.12867713] [-0.85235667]
100 0.113226 [ 1.39081788] [-0.88840771]
200 0.0699668 [ 1.30721486] [-0.69837117]
300 0.0432353 [ 1.24149871] [-0.54898357]
400 0.0267167 [ 1.18983996] [-0.4315508]
500 0.0165093 [ 1.14923155] [-0.33923826]
600 0.0102017 [ 1.11730957] [-0.26667204]
700 0.00630405 [ 1.0922159] [-0.20962858]
800 0.00389553 [ 1.07249022] [-0.16478711]
900 0.00240721 [ 1.05698395] [-0.12953788]
1000 0.0014875 [ 1.04479456] [-0.10182849]
1100 0.000919187 [ 1.03521252] [-0.08004645]
1200 0.000567998 [ 1.02768016] [-0.06292368]
1300 0.00035099 [ 1.02175927] [-0.04946378]
1400 0.000216891 [ 1.01710474] [-0.03888312]
1500 0.000134025 [ 1.01344585] [-0.03056567]
1600 8.28196e-05 [ 1.01056981] [-0.02402747]
1700 5.11766e-05 [ 1.00830877] [-0.01888768]
1800 3.16242e-05 [ 1.00653136] [-0.01484741]
1900 1.95421e-05 [ 1.00513422] [-0.01167146]
2000 1.20761e-05 [ 1.00403607] [-0.00917497]

In [ ]: