In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
print(tf.__version__)


1.2.0

In [2]:
x_data = [1, 2, 3, 4, 5]
y_data = [1, 2, 3, 4, 5]

In [3]:
x_train = tf.placeholder(dtype=tf.float32, shape=[None])
y_train = tf.placeholder(dtype=tf.float32, shape=[None])

In [4]:
W = tf.Variable(tf.random_normal(shape=[1], stddev=0.01), name = 'weight')
b = tf.Variable(tf.random_normal(shape=[1]), name = 'bias')

In [5]:
hypothesis = x_train * W + b

In [6]:
cost = tf.reduce_mean(tf.square(hypothesis - y_train))

In [7]:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)

In [8]:
train = optimizer.minimize(cost)

In [9]:
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    print("{} {:>20} {:>20} {:>20}".format("step","cost","W","b"))
    for step in range(2001):
        cost_val, w_val, b_val, _ = sess.run([cost, W, b, train], feed_dict={x_train: x_data, y_train: y_data})
        if step % 20 == 0:
            print("{:>5} {:>20} {:>20} {:>20}".format(str(step), str(cost_val), str(w_val), str(b_val)))


step                 cost                    W                    b
    0              11.0983          [ 0.217838]        [ 0.05545817]
   20             0.010447         [ 0.9313693]        [ 0.23527451]
   40           0.00892597        [ 0.93885547]        [ 0.22069477]
   60           0.00779512        [ 0.94287336]        [ 0.20624502]
   80           0.00680751        [ 0.94661468]        [ 0.19273789]
  100           0.00594505        [ 0.95011091]        [ 0.18011533]
  120           0.00519187         [ 0.9533782]        [ 0.16831946]
  140           0.00453409        [ 0.95643157]        [ 0.15729608]
  160           0.00395966        [ 0.95928484]        [ 0.14699459]
  180             0.003458        [ 0.96195132]        [ 0.13736781]
  200            0.0030199        [ 0.96444315]        [ 0.12837151]
  220            0.0026373        [ 0.96677178]        [ 0.11996433]
  240           0.00230317        [ 0.96894789]        [ 0.11210778]
  260           0.00201137         [ 0.9709816]        [ 0.10476574]
  280           0.00175655        [ 0.97288203]        [ 0.09790455]
  300           0.00153401        [ 0.97465795]        [ 0.09149271]
  320           0.00133967        [ 0.97631764]        [ 0.08550078]
  340           0.00116993        [ 0.97786862]        [ 0.07990126]
  360           0.00102172        [ 0.97931796]        [ 0.07466848]
  380          0.000892274        [ 0.98067254]         [ 0.0697784]
  400          0.000779229        [ 0.98193824]        [ 0.06520854]
  420          0.000680501        [ 0.98312116]        [ 0.06093797]
  440          0.000594293        [ 0.98422658]         [ 0.0569471]
  460          0.000518999        [ 0.98525959]        [ 0.05321755]
  480          0.000453242        [ 0.98622495]         [ 0.0497323]
  500          0.000395824        [ 0.98712713]        [ 0.04647529]
  520          0.000345671        [ 0.98797017]        [ 0.04343156]
  540           0.00030188        [ 0.98875797]        [ 0.04058721]
  560          0.000263634         [ 0.9894942]        [ 0.03792914]
  580          0.000230233        [ 0.99018234]        [ 0.03544514]
  600          0.000201066        [ 0.99082524]        [ 0.03312379]
  620          0.000175591        [ 0.99142611]         [ 0.0309545]
  640          0.000153345        [ 0.99198765]        [ 0.02892726]
  660          0.000133917        [ 0.99251235]         [ 0.0270328]
  680          0.000116951        [ 0.99300271]         [ 0.0252624]
  700          0.000102134        [ 0.99346095]        [ 0.02360796]
  720          8.91952e-05        [ 0.99388921]        [ 0.02206187]
  740          7.78944e-05        [ 0.99428934]        [ 0.02061704]
  760          6.80257e-05        [ 0.99466342]        [ 0.01926685]
  780          5.94083e-05        [ 0.99501294]        [ 0.01800502]
  800          5.18811e-05        [ 0.99533951]        [ 0.01682584]
  820          4.53084e-05        [ 0.99564475]         [ 0.0157239]
  840          3.95683e-05        [ 0.99593002]        [ 0.01469413]
  860          3.45557e-05        [ 0.99619651]        [ 0.01373178]
  880          3.01767e-05         [ 0.9964456]        [ 0.01283248]
  900          2.63546e-05        [ 0.99667841]        [ 0.01199205]
  920          2.30145e-05        [ 0.99689591]        [ 0.01120666]
  940          2.00995e-05        [ 0.99709922]        [ 0.01047273]
  960          1.75524e-05        [ 0.99728918]        [ 0.00978686]
  980          1.53288e-05        [ 0.99746674]         [ 0.0091459]
 1000           1.3387e-05        [ 0.99763268]        [ 0.00854692]
 1020          1.16906e-05        [ 0.99778771]        [ 0.00798717]
 1040          1.02097e-05        [ 0.99793261]        [ 0.00746409]
 1060          8.91634e-06        [ 0.99806798]        [ 0.00697525]
 1080          7.78604e-06        [ 0.99819452]        [ 0.00651844]
 1100          6.79978e-06        [ 0.99831271]        [ 0.00609155]
 1120          5.93855e-06        [ 0.99842322]        [ 0.00569264]
 1140          5.18626e-06        [ 0.99852651]        [ 0.00531984]
 1160          4.52932e-06        [ 0.99862301]        [ 0.00497144]
 1180          3.95535e-06        [ 0.99871314]        [ 0.00464585]
 1200          3.45418e-06        [ 0.99879742]        [ 0.00434162]
 1220          3.01651e-06        [ 0.99887615]        [ 0.00405729]
 1240          2.63436e-06        [ 0.99894977]         [ 0.0037916]
 1260          2.30064e-06        [ 0.99901855]        [ 0.00354329]
 1280           2.0093e-06         [ 0.9990828]        [ 0.00331126]
 1300          1.75471e-06        [ 0.99914289]        [ 0.00309441]
 1320          1.53252e-06        [ 0.99919903]        [ 0.00289177]
 1340          1.33834e-06        [ 0.99925148]        [ 0.00270243]
 1360           1.1689e-06        [ 0.99930048]        [ 0.00252547]
 1380           1.0207e-06        [ 0.99934626]        [ 0.00236008]
 1400          8.91491e-07        [ 0.99938911]        [ 0.00220554]
 1420          7.78467e-07        [ 0.99942911]        [ 0.00206113]
 1440          6.79969e-07        [ 0.99946648]        [ 0.00192616]
 1460          5.93811e-07        [ 0.99950141]        [ 0.00180004]
 1480          5.18429e-07        [ 0.99953407]        [ 0.00168218]
 1500          4.52971e-07        [ 0.99956453]        [ 0.00157205]
 1520          3.95553e-07        [ 0.99959308]        [ 0.00146911]
 1540          3.45387e-07        [ 0.99961972]         [ 0.0013729]
 1560          3.01625e-07        [ 0.99964458]          [ 0.001283]
 1580          2.63453e-07        [ 0.99966782]        [ 0.00119902]
 1600          2.30096e-07        [ 0.99968964]        [ 0.00112054]
 1620          2.00964e-07         [ 0.9997099]        [ 0.00104719]
 1640          1.75548e-07        [ 0.99972892]        [ 0.00097867]
 1660          1.53307e-07        [ 0.99974668]        [ 0.00091458]
 1680          1.33817e-07        [ 0.99976325]        [ 0.00085474]
 1700          1.16972e-07        [ 0.99977875]         [ 0.0007988]
 1720          1.02131e-07        [ 0.99979323]        [ 0.00074652]
 1740          8.92106e-08         [ 0.9998067]        [ 0.00069765]
 1760          7.79037e-08         [ 0.9998194]        [ 0.00065197]
 1780          6.80374e-08        [ 0.99983126]         [ 0.0006093]
 1800          5.94215e-08        [ 0.99984223]        [ 0.00056942]
 1820          5.18827e-08        [ 0.99985254]        [ 0.00053214]
 1840          4.53091e-08        [ 0.99986219]        [ 0.00049736]
 1860          3.95873e-08        [ 0.99987125]        [ 0.00046479]
 1880          3.45639e-08        [ 0.99987966]        [ 0.00043439]
 1900          3.01952e-08        [ 0.99988759]        [ 0.00040593]
 1920            2.638e-08        [ 0.99989486]        [ 0.00037939]
 1940          2.30409e-08        [ 0.99990183]        [ 0.00035453]
 1960          2.01113e-08        [ 0.99990821]        [ 0.00033135]
 1980          1.75652e-08        [ 0.99991417]        [ 0.00030971]
 2000          1.53335e-08        [ 0.99991983]        [ 0.00028941]

In [ ]: