In [1]:
import tensorflow as tf
import numpy
import matplotlib.pyplot as plt
rng = numpy.random

In [2]:
# Parameters
learning_rate = 0.0005
training_epochs = 500
display_step = 10

In [3]:
# Training Data
train_X = numpy.asarray([1,2,4,3,5])
train_Y = numpy.asarray([1,3,3,2,5])
n_samples = train_X.shape[0]

In [4]:
# tf Graph Input
X = tf.placeholder("float")
Y = tf.placeholder("float")

# Set model weights
W = tf.Variable(tf.constant(0.0), name="weight")
b = tf.Variable(tf.constant(0.0), name="bias")

In [5]:
# Construct a linear model
pred = tf.add(tf.multiply(X, W), b)

In [6]:
# Mean squared error
cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)
# Gradient descent
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

In [7]:
# Initializing the variables
init = tf.global_variables_initializer()

In [8]:
# Launch the graph
with tf.Session() as sess:
    sess.run(init)
    # Fit all training data
    for epoch in range(training_epochs):
        for (x, y) in zip(train_X, train_Y):
            sess.run(optimizer, feed_dict={X: x, Y: y})

        #Display logs per epoch step
        if (epoch+1) % display_step == 0:
            c = sess.run(cost, feed_dict={X: train_X, Y:train_Y})
            print ("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c), \
                "W=", sess.run(W), "b=", sess.run(b))

    print("Optimization Finished!")
    training_cost = sess.run(cost, feed_dict={X: train_X, Y: train_Y})
    print("Training cost=", training_cost, "W=", sess.run(W), "b=", sess.run(b), '\n')
    #Graphic display
    plt.plot(train_X, train_Y, 'ro', label='Original data')
    plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
    plt.legend()
    plt.show()


Epoch: 0010 cost= 4.290998459 W= 0.0485945 b= 0.0136148
Epoch: 0020 cost= 3.838841200 W= 0.0943931 b= 0.0264548
Epoch: 0030 cost= 3.437181473 W= 0.137557 b= 0.0385645
Epoch: 0040 cost= 3.080378056 W= 0.178236 b= 0.0499859
Epoch: 0050 cost= 2.763422012 W= 0.216575 b= 0.0607586
Epoch: 0060 cost= 2.481863737 W= 0.252708 b= 0.0709198
Epoch: 0070 cost= 2.231750488 W= 0.286761 b= 0.0805048
Epoch: 0080 cost= 2.009568214 W= 0.318854 b= 0.0895465
Epoch: 0090 cost= 1.812199950 W= 0.3491 b= 0.0980763
Epoch: 0100 cost= 1.636874437 W= 0.377605 b= 0.106124
Epoch: 0110 cost= 1.481128097 W= 0.404469 b= 0.113716
Epoch: 0120 cost= 1.342775702 W= 0.429786 b= 0.12088
Epoch: 0130 cost= 1.219874620 W= 0.453646 b= 0.12764
Epoch: 0140 cost= 1.110698700 W= 0.476132 b= 0.134019
Epoch: 0150 cost= 1.013715386 W= 0.497323 b= 0.140039
Epoch: 0160 cost= 0.927563310 W= 0.517294 b= 0.145721
Epoch: 0170 cost= 0.851032913 W= 0.536114 b= 0.151083
Epoch: 0180 cost= 0.783049941 W= 0.55385 b= 0.156146
Epoch: 0190 cost= 0.722659290 W= 0.570564 b= 0.160925
Epoch: 0200 cost= 0.669012606 W= 0.586316 b= 0.165437
Epoch: 0210 cost= 0.621357441 W= 0.601159 b= 0.169698
Epoch: 0220 cost= 0.579024315 W= 0.615147 b= 0.173721
Epoch: 0230 cost= 0.541418254 W= 0.628329 b= 0.177521
Epoch: 0240 cost= 0.508012950 W= 0.64075 b= 0.18111
Epoch: 0250 cost= 0.478338063 W= 0.652456 b= 0.184501
Epoch: 0260 cost= 0.451976866 W= 0.663486 b= 0.187704
Epoch: 0270 cost= 0.428559870 W= 0.67388 b= 0.190731
Epoch: 0280 cost= 0.407757759 W= 0.683674 b= 0.193592
Epoch: 0290 cost= 0.389278650 W= 0.692904 b= 0.196296
Epoch: 0300 cost= 0.372862905 W= 0.7016 b= 0.198852
Epoch: 0310 cost= 0.358280420 W= 0.709795 b= 0.201269
Epoch: 0320 cost= 0.345326453 W= 0.717516 b= 0.203555
Epoch: 0330 cost= 0.333819270 W= 0.724791 b= 0.205717
Epoch: 0340 cost= 0.323596597 W= 0.731645 b= 0.207762
Epoch: 0350 cost= 0.314515531 W= 0.738104 b= 0.209698
Epoch: 0360 cost= 0.306448370 W= 0.744189 b= 0.21153
Epoch: 0370 cost= 0.299281955 W= 0.749922 b= 0.213264
Epoch: 0380 cost= 0.292915702 W= 0.755324 b= 0.214906
Epoch: 0390 cost= 0.287260324 W= 0.760412 b= 0.216462
Epoch: 0400 cost= 0.282236010 W= 0.765206 b= 0.217936
Epoch: 0410 cost= 0.277772784 W= 0.769723 b= 0.219332
Epoch: 0420 cost= 0.273807555 W= 0.773977 b= 0.220656
Epoch: 0430 cost= 0.270285010 W= 0.777985 b= 0.221912
Epoch: 0440 cost= 0.267155498 W= 0.781761 b= 0.223103
Epoch: 0450 cost= 0.264375299 W= 0.785317 b= 0.224233
Epoch: 0460 cost= 0.261905372 W= 0.788666 b= 0.225306
Epoch: 0470 cost= 0.259710878 W= 0.791821 b= 0.226324
Epoch: 0480 cost= 0.257761091 W= 0.794793 b= 0.227292
Epoch: 0490 cost= 0.256028801 W= 0.797591 b= 0.228212
Epoch: 0500 cost= 0.254489601 W= 0.800227 b= 0.229086
Optimization Finished!
Training cost= 0.25449 W= 0.800227 b= 0.229086 


In [ ]: