In [20]:
    
%matplotlib inline
import time
from IPython.html import widgets # Widget definitions
from IPython import display # Used to display widgets in the notebook
from IPython.html.widgets.interaction import interact # create interactive UI for a function
import pylab as pl
    
In [21]:
    
import random
from numpy.random import randn
from numpy import *
# y = mx + b
# m is slope, b is y-intercept
def computeErrorForLineGivenPoints(b, m, points):
    totalError = 0
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        totalError += (y - (m * x + b)) ** 2
    return totalError / float(len(points))
def stepGradient(b_current, m_current, points, learning_rate):
    b_gradient = 0
    m_gradient = 0
    N = float(len(points))
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        b_gradient += -(2/N) * (y - ((m_current * x) + b_current))
        m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
    new_b = b_current - (learning_rate * b_gradient)
    new_m = m_current - (learning_rate * m_gradient)
    return [new_b, new_m]
def generate_random_points():
    random.seed(100)
    n = 100
    x = randn(n) * 10 + 50
    y = x * 1.5 + randn(n) * 10
    points = array([x, y]).transpose()
    return x, y, points
    
In [31]:
    
def find_linear_regression_using_gradient_descent(iterations, learning_rate,
                                                  sleep_time, separate_plots=False):
    line_errors = []
    x, y, points = generate_random_points()
    b = 0 # y-intercept
    m = 0 # slope
    for i in range(iterations):
        b, m = stepGradient(b, m, array(points), learning_rate=learning_rate)
        if i % 10 == 0:
            line_errors.append((i, b, m, computeErrorForLineGivenPoints(b, m, points)))
        # cf. http://stackoverflow.com/a/21361994
        pl.scatter(x, y)
        pl.plot(arange(0,100), m*arange(0,100)+b)
        if not separate_plots:
            display.clear_output(wait=True) # prints all plots separately, if removed
        display.display(pl.gcf())
        time.sleep(sleep_time)
    
    for i, b, m, error in line_errors:
        print "iteration {}: b = {}, m = {}, error = {}".format(i, b, m, error)
    
In [32]:
    
interact(find_linear_regression_using_gradient_descent,
         iterations=widgets.IntTextWidget(min=1, max=100, value=10),
         learning_rate=widgets.FloatSliderWidget(min=0.00001, max=0.1, value=0.0001, step=0.0001),
         sleep_time=widgets.FloatTextWidget(value=0.1))
    
    
    
    
In [ ]: