Linear Regression


In [1]:
%matplotlib inline

In [2]:
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns

In [3]:
num_data = 1000
x_data = np.float32(np.random.normal(0.0,0.55,size=num_data))
y_data = np.float32(x_data * 0.1 + 0.3 + np.random.normal(0.0,0.03,size=num_data))

In [4]:
plt.scatter(x_data,y_data)
plt.show()



In [5]:
linear_regression = tf.Graph()

with linear_regression.as_default():
    x = tf.placeholder(tf.float32,shape = None)
    y_ = tf.placeholder(tf.float32,shape = None)
    w = tf.Variable(tf.zeros(1))
    b = tf.Variable(tf.zeros(1))
    y = w * x + b
    loss = tf.reduce_mean(tf.square(y - y_))
    train = tf.train.GradientDescentOptimizer(0.5).minimize(loss)

In [6]:
with tf.Session(graph = linear_regression) as sess:
    sess.run(tf.initialize_all_variables())
    for step in range(15):
        _,l = sess.run([train,loss], feed_dict={x: x_data, y_ : y_data})
        print("loss = %f" % l)
    slope, intercept = sess.run([w,b])
    print(slope,intercept)


loss = 0.093914
loss = 0.002484
loss = 0.001664
loss = 0.001257
loss = 0.001054
loss = 0.000953
loss = 0.000903
loss = 0.000878
loss = 0.000866
loss = 0.000860
loss = 0.000857
loss = 0.000855
loss = 0.000854
loss = 0.000854
loss = 0.000854
[ 0.10108184] [ 0.30092308]

In [7]:
plt.scatter(x_data,y_data)
plt.plot(np.arange(-2,2,0.1),0.09958364 * np.arange(-2,2,0.1) +  0.3003304,color='red')
plt.xlim(-2,2)
plt.show()



In [ ]: