In [20]:
%matplotlib inline
import time
from IPython.html import widgets # Widget definitions
from IPython import display # Used to display widgets in the notebook
from IPython.html.widgets.interaction import interact # create interactive UI for a function
import pylab as pl

In [21]:
import random
from numpy.random import randn
from numpy import *

# y = mx + b
# m is slope, b is y-intercept
def computeErrorForLineGivenPoints(b, m, points):
    totalError = 0
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        totalError += (y - (m * x + b)) ** 2
    return totalError / float(len(points))

def stepGradient(b_current, m_current, points, learning_rate):
    b_gradient = 0
    m_gradient = 0
    N = float(len(points))
    for i in range(0, len(points)):
        x = points[i, 0]
        y = points[i, 1]
        b_gradient += -(2/N) * (y - ((m_current * x) + b_current))
        m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
    new_b = b_current - (learning_rate * b_gradient)
    new_m = m_current - (learning_rate * m_gradient)
    return [new_b, new_m]


def generate_random_points():
    random.seed(100)
    n = 100
    x = randn(n) * 10 + 50
    y = x * 1.5 + randn(n) * 10
    points = array([x, y]).transpose()
    return x, y, points

In [31]:
def find_linear_regression_using_gradient_descent(iterations, learning_rate,
                                                  sleep_time, separate_plots=False):
    line_errors = []
    x, y, points = generate_random_points()

    b = 0 # y-intercept
    m = 0 # slope
    for i in range(iterations):
        b, m = stepGradient(b, m, array(points), learning_rate=learning_rate)
        if i % 10 == 0:
            line_errors.append((i, b, m, computeErrorForLineGivenPoints(b, m, points)))

        # cf. http://stackoverflow.com/a/21361994
        pl.scatter(x, y)
        pl.plot(arange(0,100), m*arange(0,100)+b)
        if not separate_plots:
            display.clear_output(wait=True) # prints all plots separately, if removed
        display.display(pl.gcf())
        time.sleep(sleep_time)
    
    for i, b, m, error in line_errors:
        print "iteration {}: b = {}, m = {}, error = {}".format(i, b, m, error)

In [32]:
interact(find_linear_regression_using_gradient_descent,
         iterations=widgets.IntTextWidget(min=1, max=100, value=10),
         learning_rate=widgets.FloatSliderWidget(min=0.00001, max=0.1, value=0.0001, step=0.0001),
         sleep_time=widgets.FloatTextWidget(value=0.1))


iteration 0: b = 0.0145470101107, m = 0.737070297359, error = 1484.58655741
iteration 10: b = 0.0297140492452, m = 1.47815958573, error = 112.651664898
iteration 20: b = 0.0303296177311, m = 1.47889523108, error = 112.649908144
iteration 30: b = 0.0309304646744, m = 1.47888417577, error = 112.649547005
iteration 40: b = 0.031531251468, m = 1.47887236825, error = 112.649185922
iteration 50: b = 0.0321319929023, m = 1.47886056086, error = 112.648824894
iteration 60: b = 0.0327326889957, m = 1.47884875436, error = 112.64846392
iteration 70: b = 0.0333333397517, m = 1.47883694875, error = 112.648103001
iteration 80: b = 0.0339339451737, m = 1.47882514404, error = 112.647742136
iteration 90: b = 0.034534505265, m = 1.47881334021, error = 112.647381326

In [ ]: