In [1]:
import tensorflow as tf
import numpy
import matplotlib.pyplot as plt

In [2]:
train_X=numpy.asarray([1,2,4,3,5])
train_Y=numpy.asarray([1,3,3,2,5])
n_samples = train_X.shape[0]

In [3]:
X = tf.placeholder("float")
Y = tf.placeholder("float")
W = tf.Variable(0.0, name="weight")
b = tf.Variable(0.0, name="bias")
pred = tf.add(tf.multiply(X, W), b)
cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)

In [4]:
learning_rate=0.05
    training_epochs=100
    display_step = 50
    print("LEARNING RATE=",learning_rate,"\t Epoch=",training_epochs,"\n\n")
    optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)

    # Fit all training data
        for epoch in range(training_epochs):
            for (x, y) in zip(train_X, train_Y):
                sess.run(optimizer, feed_dict={X: x, Y: y})

        #Display logs per epoch step
            if (epoch+1) % display_step == 0:
                c = sess.run(cost, feed_dict={X: train_X, Y:train_Y})
                print ("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c), \
                "W=", sess.run(W), "b=", sess.run(b))

        print ("Optimization Finished!")
        training_cost = sess.run(cost, feed_dict={X: train_X, Y: train_Y})
        print ("Training cost=", training_cost, "W=", sess.run(W), "b=", sess.run(b), '\n')

    #Graphic display
        plt.plot(train_X, train_Y, 'ro', label='Original data')
        plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
        plt.legend()
        plt.show()


LEARNING RATE= 0.05 	 Epoch= 100 


Epoch: 0050 cost= 0.242040664 W= 0.844209 b= 0.280501
Epoch: 0100 cost= 0.241635278 W= 0.838333 b= 0.303215
Optimization Finished!
Training cost= 0.241635 W= 0.838333 b= 0.303215 


In [5]:
learning_rate=0.005
    training_epochs=500
    display_step = 50
    print("LEARNING RATE=",learning_rate,"\t Epoch=",training_epochs,"\n\n")
    optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)

    # Fit all training data
        for epoch in range(training_epochs):
            for (x, y) in zip(train_X, train_Y):
                sess.run(optimizer, feed_dict={X: x, Y: y})

        #Display logs per epoch step
            if (epoch+1) % display_step == 0:
                c = sess.run(cost, feed_dict={X: train_X, Y:train_Y})
                print ("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c), \
                "W=", sess.run(W), "b=", sess.run(b))

        print ("Optimization Finished!")
        training_cost = sess.run(cost, feed_dict={X: train_X, Y: train_Y})
        print ("Training cost=", training_cost, "W=", sess.run(W), "b=", sess.run(b), '\n')

    #Graphic display
        plt.plot(train_X, train_Y, 'ro', label='Original data')
        plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
        plt.legend()
        plt.show()


LEARNING RATE= 0.005 	 Epoch= 500 


Epoch: 0050 cost= 0.253590971 W= 0.801722 b= 0.229982
Epoch: 0100 cost= 0.242115974 W= 0.840568 b= 0.247629
Epoch: 0150 cost= 0.241937876 W= 0.840941 b= 0.254299
Epoch: 0200 cost= 0.241786718 W= 0.839436 b= 0.260173
Epoch: 0250 cost= 0.241646916 W= 0.837899 b= 0.265778
Epoch: 0300 cost= 0.241518334 W= 0.836421 b= 0.271148
Epoch: 0350 cost= 0.241399959 W= 0.835004 b= 0.276296
Epoch: 0400 cost= 0.241291076 W= 0.833647 b= 0.281229
Epoch: 0450 cost= 0.241190821 W= 0.832345 b= 0.285958
Epoch: 0500 cost= 0.241098478 W= 0.831098 b= 0.290491
Optimization Finished!
Training cost= 0.241098 W= 0.831098 b= 0.290491 


In [6]:
learning_rate=0.005
    training_epochs=1000
    display_step = 50
    print("LEARNING RATE=",learning_rate,"\t Epoch=",training_epochs,"\n\n")
    optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)

    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)

    # Fit all training data
        for epoch in range(training_epochs):
            for (x, y) in zip(train_X, train_Y):
                sess.run(optimizer, feed_dict={X: x, Y: y})

        #Display logs per epoch step
            if (epoch+1) % display_step == 0:
                c = sess.run(cost, feed_dict={X: train_X, Y:train_Y})
                print ("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(c), \
                "W=", sess.run(W), "b=", sess.run(b))

        print ("Optimization Finished!")
        training_cost = sess.run(cost, feed_dict={X: train_X, Y: train_Y})
        print ("Training cost=", training_cost, "W=", sess.run(W), "b=", sess.run(b), '\n')

    #Graphic display
        plt.plot(train_X, train_Y, 'ro', label='Original data')
        plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')
        plt.legend()
        plt.show()


LEARNING RATE= 0.005 	 Epoch= 1000 


Epoch: 0050 cost= 0.253590971 W= 0.801722 b= 0.229982
Epoch: 0100 cost= 0.242115974 W= 0.840568 b= 0.247629
Epoch: 0150 cost= 0.241937876 W= 0.840941 b= 0.254299
Epoch: 0200 cost= 0.241786718 W= 0.839436 b= 0.260173
Epoch: 0250 cost= 0.241646916 W= 0.837899 b= 0.265778
Epoch: 0300 cost= 0.241518334 W= 0.836421 b= 0.271148
Epoch: 0350 cost= 0.241399959 W= 0.835004 b= 0.276296
Epoch: 0400 cost= 0.241291076 W= 0.833647 b= 0.281229
Epoch: 0450 cost= 0.241190821 W= 0.832345 b= 0.285958
Epoch: 0500 cost= 0.241098478 W= 0.831098 b= 0.290491
Epoch: 0550 cost= 0.241013572 W= 0.829903 b= 0.294835
Epoch: 0600 cost= 0.240935415 W= 0.828757 b= 0.298999
Epoch: 0650 cost= 0.240863442 W= 0.827658 b= 0.302989
Epoch: 0700 cost= 0.240797281 W= 0.826606 b= 0.306815
Epoch: 0750 cost= 0.240736172 W= 0.825597 b= 0.310481
Epoch: 0800 cost= 0.240679979 W= 0.82463 b= 0.313995
Epoch: 0850 cost= 0.240628317 W= 0.823703 b= 0.317364
Epoch: 0900 cost= 0.240580559 W= 0.822814 b= 0.320592
Epoch: 0950 cost= 0.240536690 W= 0.821962 b= 0.323687
Epoch: 1000 cost= 0.240496278 W= 0.821146 b= 0.326653
Optimization Finished!
Training cost= 0.240496 W= 0.821146 b= 0.326653 


In [ ]: