Linear Regression Sample

Import library

  • tensorflow
  • numpy
  • matplotlib

In [1]:
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

Initial Data


In [2]:
number_of_points = 200
x_point = []
y_point = []

Initial a, b


In [3]:
a = 0.22
b = 0.78

Generate Data


In [4]:
for i in range(number_of_points):
    x = np.random.normal(0.0, 0.5)
    y = a*x + b + np.random.normal(0.0, 0.1)
    x_point.append([x])
    y_point.append([y])
    pass
plt.plot(x_point, y_point, 'o', label='Input Data')
plt.legend()
plt.show()



Define TensorFlow

Initial A, B Variable


In [5]:
A = tf.Variable(initial_value=tf.random_uniform([1], -1.0, 1.0))
B = tf.Variable(initial_value=tf.zeros([1]))

Define Model


In [8]:
y = A * x_point + B
cost_function = tf.reduce_mean(tf.square(y - y_point))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.5)
train = optimizer.minimize(cost_function)
model = tf.global_variables_initializer()

Start Training


In [9]:
with tf.Session() as session:
    session.run(model)
    for step in range(21):
        session.run(train)
        if (step % 5 == 0):
            cost_ = session.run(cost_function)
            a_ = session.run(A)
            b_ = session.run(B)
            print 'cost: {}, a: {}, b: {}'.format(cost_, a_, b_)
            plt.plot(x_point, y_point, 'o', label='Step = {}'.format(step))
            plt.plot(x_point, a_ * x_point + b_)
            plt.legend()
            plt.show()
            pass
        pass


cost: 0.0685929134488, a: [ 0.67469645], b: [ 0.80707633]
cost: 0.0156673453748, a: [ 0.31391582], b: [ 0.78573364]
cost: 0.0126269413158, a: [ 0.22741772], b: [ 0.78104818]
cost: 0.0124522699043, a: [ 0.20668516], b: [ 0.77992511]
cost: 0.0124422339723, a: [ 0.20171581], b: [ 0.77965593]

In [ ]: