In [75]:
# Useful starting lines
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
%load_ext autoreload
%autoreload 2


The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload

Load the data


In [76]:
import datetime
from helpers import *

height, weight, gender = load_data(sub_sample=False, add_outlier=False)
x, mean_x, std_x = standardize(height)
y, tx = build_model_data(x, weight)

In [77]:
y.shape, tx.shape


Out[77]:
((10000,), (10000, 2))

Computing the Cost Function

Fill in the compute_cost function below:


In [78]:
def calculate_mse(e):
    """Calculate the mse for vector e."""
    return 1/2*np.mean(e**2)


def calculate_mae(e):
    """Calculate the mae for vector e."""
    return np.mean(np.abs(e))


def compute_loss(y, tx, w):
    """Calculate the loss.

    You can calculate the loss using mse or mae.
    """
    e = y - tx.dot(w)
    return calculate_mse(e)

Grid Search

Fill in the function grid_search() below:


In [79]:
# from costs import *

def grid_search(y, tx, w0, w1):
    """Algorithm for grid search."""
    loss = np.zeros((len(w0), len(w1)))
    # compute loss for each combinationof w0 and w1.
    for ind_row, row in enumerate(w0):
        for ind_col, col in enumerate(w1):
            w = np.array([row, col])
            loss[ind_row, ind_col] = compute_loss(y, tx, w)
    return loss

Let us play with the grid search demo now!


In [80]:
from grid_search import generate_w, get_best_parameters
from plots import grid_visualization

# Generate the grid of parameters to be swept
grid_w0, grid_w1 = generate_w(num_intervals=10)

# Start the grid search
start_time = datetime.datetime.now()
grid_losses = grid_search(y, tx, grid_w0, grid_w1)

# Select the best combinaison
loss_star, w0_star, w1_star = get_best_parameters(grid_w0, grid_w1, grid_losses)
end_time = datetime.datetime.now()
execution_time = (end_time - start_time).total_seconds()

# Print the results
print("Grid Search: loss*={l}, w0*={w0}, w1*={w1}, execution time={t:.3f} seconds".format(
      l=loss_star, w0=w0_star, w1=w1_star, t=execution_time))

# Plot the results
fig = grid_visualization(grid_losses, grid_w0, grid_w1, mean_x, std_x, height, weight)
fig.set_size_inches(10.0,6.0)
fig.savefig("grid_plot")  # Optional saving


Grid Search: loss*=42.42448314678248, w0*=66.66666666666669, w1*=16.666666666666686, execution time=0.010 seconds

Gradient Descent

Again, please fill in the functions compute_gradient below:


In [81]:
def compute_gradient(y, tx, w):
    """Compute the gradient."""
    err = y - tx.dot(w)
    grad = -tx.T.dot(err) / len(err)
    return grad, err

Please fill in the functions gradient_descent below:


In [82]:
def gradient_descent(y, tx, initial_w, max_iters, gamma):
    """Gradient descent algorithm."""
    # Define parameters to store w and loss
    ws = [initial_w]
    losses = []
    w = initial_w
    for n_iter in range(max_iters):
        # compute loss, gradient
        grad, err = compute_gradient(y, tx, w)
        loss = calculate_mse(err)
        # gradient w by descent update
        w = w - gamma * grad
        # store w and loss
        ws.append(w)
        losses.append(loss)
        print("Gradient Descent({bi}/{ti}): loss={l}, w0={w0}, w1={w1}".format(
              bi=n_iter, ti=max_iters - 1, l=loss, w0=w[0], w1=w[1]))

    return losses, ws

Test your gradient descent function through gradient descent demo shown below:


In [83]:
# from gradient_descent import *
from plots import gradient_descent_visualization

# Define the parameters of the algorithm.
max_iters = 50
gamma = 0.7

# Initialization
w_initial = np.array([0, 0])

# Start gradient descent.
start_time = datetime.datetime.now()
gradient_losses, gradient_ws = gradient_descent(y, tx, w_initial, max_iters, gamma)
end_time = datetime.datetime.now()

# Print result
exection_time = (end_time - start_time).total_seconds()
print("Gradient Descent: execution time={t:.3f} seconds".format(t=exection_time))


Gradient Descent(0/49): loss=2792.2367127591674, w0=51.30574540147352, w1=9.435798704492393
Gradient Descent(1/49): loss=265.302462108962, w0=66.69746902191565, w1=12.266538315840034
Gradient Descent(2/49): loss=37.87837955044161, w0=71.31498610804833, w1=13.115760199244338
Gradient Descent(3/49): loss=17.410212120174496, w0=72.70024123388814, w1=13.370526764265632
Gradient Descent(4/49): loss=15.568077051450455, w0=73.11581777164008, w1=13.446956733772023
Gradient Descent(5/49): loss=15.402284895265295, w0=73.24049073296567, w1=13.469885724623941
Gradient Descent(6/49): loss=15.38736360120863, w0=73.27789262136332, w1=13.476764421879517
Gradient Descent(7/49): loss=15.38602068474353, w0=73.28911318788263, w1=13.478828031056189
Gradient Descent(8/49): loss=15.385899822261674, w0=73.29247935783842, w1=13.47944711380919
Gradient Descent(9/49): loss=15.385888944638305, w0=73.29348920882516, w1=13.47963283863509
Gradient Descent(10/49): loss=15.3858879656522, w0=73.29379216412119, w1=13.479688556082861
Gradient Descent(11/49): loss=15.385887877543453, w0=73.29388305071, w1=13.479705271317192
Gradient Descent(12/49): loss=15.385887869613667, w0=73.29391031668663, w1=13.479710285887492
Gradient Descent(13/49): loss=15.385887868899983, w0=73.29391849647962, w1=13.479711790258582
Gradient Descent(14/49): loss=15.38588786883575, w0=73.29392095041752, w1=13.479712241569908
Gradient Descent(15/49): loss=15.385887868829974, w0=73.29392168659889, w1=13.479712376963306
Gradient Descent(16/49): loss=15.38588786882945, w0=73.2939219074533, w1=13.479712417581325
Gradient Descent(17/49): loss=15.385887868829403, w0=73.29392197370962, w1=13.479712429766732
Gradient Descent(18/49): loss=15.3858878688294, w0=73.29392199358651, w1=13.479712433422353
Gradient Descent(19/49): loss=15.385887868829398, w0=73.29392199954958, w1=13.47971243451904
Gradient Descent(20/49): loss=15.385887868829398, w0=73.29392200133852, w1=13.479712434848047
Gradient Descent(21/49): loss=15.3858878688294, w0=73.29392200187519, w1=13.479712434946748
Gradient Descent(22/49): loss=15.3858878688294, w0=73.29392200203618, w1=13.479712434976358
Gradient Descent(23/49): loss=15.3858878688294, w0=73.29392200208449, w1=13.479712434985242
Gradient Descent(24/49): loss=15.3858878688294, w0=73.29392200209898, w1=13.479712434987906
Gradient Descent(25/49): loss=15.385887868829398, w0=73.29392200210333, w1=13.479712434988706
Gradient Descent(26/49): loss=15.3858878688294, w0=73.29392200210462, w1=13.479712434988945
Gradient Descent(27/49): loss=15.3858878688294, w0=73.29392200210502, w1=13.479712434989018
Gradient Descent(28/49): loss=15.3858878688294, w0=73.29392200210515, w1=13.47971243498904
Gradient Descent(29/49): loss=15.3858878688294, w0=73.29392200210518, w1=13.479712434989047
Gradient Descent(30/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(31/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(32/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(33/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(34/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(35/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(36/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(37/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(38/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(39/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(40/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(41/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(42/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(43/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(44/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(45/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(46/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(47/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(48/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent(49/49): loss=15.3858878688294, w0=73.29392200210519, w1=13.479712434989048
Gradient Descent: execution time=0.017 seconds

In [84]:
# Time Visualization
from ipywidgets import IntSlider, interact
def plot_figure(n_iter):
    fig = gradient_descent_visualization(
        gradient_losses, gradient_ws, grid_losses, grid_w0, grid_w1, mean_x, std_x, height, weight, n_iter)
    fig.set_size_inches(10.0, 6.0)

interact(plot_figure, n_iter=IntSlider(min=1, max=len(gradient_ws)))


Stochastic gradient descent


In [85]:
def compute_stoch_gradient(y, tx, w):
    """Compute a stochastic gradient from just few examples n and their corresponding y_n labels."""
    err = y - tx.dot(w)
    grad = -tx.T.dot(err) / len(err)
    return grad, err

def stochastic_gradient_descent(
        y, tx, initial_w, batch_size, max_iters, gamma):
    """Stochastic gradient descent."""
    # Define parameters to store w and loss
    ws = [initial_w]
    losses = []
    w = initial_w
    
    for n_iter in range(max_iters):
        for y_batch, tx_batch in batch_iter(y, tx, batch_size=batch_size, num_batches=1):
            # compute a stochastic gradient and loss
            grad, _ = compute_stoch_gradient(y_batch, tx_batch, w)
            # update w through the stochastic gradient update
            w = w - gamma * grad
            # calculate loss
            loss = compute_loss(y, tx, w)
            # store w and loss
            ws.append(w)
            losses.append(loss)

        print("SGD({bi}/{ti}): loss={l}, w0={w0}, w1={w1}".format(
              bi=n_iter, ti=max_iters - 1, l=loss, w0=w[0], w1=w[1]))
    return losses, ws

In [86]:
# from stochastic_gradient_descent import *

# Define the parameters of the algorithm.
max_iters = 50
gamma = 0.7
batch_size = 1

# Initialization
w_initial = np.array([0, 0])

# Start SGD.
start_time = datetime.datetime.now()
sgd_losses, sgd_ws = stochastic_gradient_descent(
    y, tx, w_initial, batch_size, max_iters, gamma)
end_time = datetime.datetime.now()

# Print result
exection_time = (end_time - start_time).total_seconds()
print("SGD: execution time={t:.3f} seconds".format(t=exection_time))


SGD(0/49): loss=2171.6754532724867, w0=48.21683964517567, w1=-47.21393691377801
SGD(1/49): loss=2186.496140444089, w0=44.44584465765311, w1=-45.76561597729197
SGD(2/49): loss=737.2983981025528, w0=88.95568018941348, w1=-21.14014243110949
SGD(3/49): loss=291.97967846843784, w0=63.346378023865554, w1=-7.833052519380567
SGD(4/49): loss=79.7668142066913, w0=84.09813092089178, w1=16.948274430245334
SGD(5/49): loss=16.57675333994607, w0=71.7522489825269, w1=13.550247837005877
SGD(6/49): loss=36.080685971605824, w0=68.35298254822953, w1=9.3594316777241
SGD(7/49): loss=105.37452640280316, w0=80.32086281387237, w1=24.907721005858626
SGD(8/49): loss=112.47256368607047, w0=87.18408663806457, w1=14.591772694339058
SGD(9/49): loss=29.553407276227944, w0=68.56176461258902, w1=15.917277871166
SGD(10/49): loss=59.86542168769982, w0=63.865953396333786, w1=13.748925373626264
SGD(11/49): loss=30.68849378782939, w0=74.86748057380683, w1=8.176022657162271
SGD(12/49): loss=31.65858288316803, w0=72.95917975652453, w1=7.7846849848619755
SGD(13/49): loss=88.05447251203994, w0=84.58556734376003, w1=17.702971219944768
SGD(14/49): loss=20.768141238755277, w0=70.51474475502692, w1=11.735957701898915
SGD(15/49): loss=101.82504455484717, w0=61.52386232933915, w1=7.619336280546204
SGD(16/49): loss=32.23920266889106, w0=73.30615655138313, w1=19.285440633581132
SGD(17/49): loss=26.1707244830374, w0=76.76453825594484, w1=16.565890661695995
SGD(18/49): loss=38.261886472123265, w0=72.04707473025779, w1=20.127822624153666
SGD(19/49): loss=38.32503710743473, w0=72.25307274652928, w1=20.172613995924912
SGD(20/49): loss=30.78456081246016, w0=68.78824361816173, w1=16.71949764202456
SGD(21/49): loss=43.8518729595572, w0=74.61347843027082, w1=20.90875954847153
SGD(22/49): loss=53.414604786026175, w0=76.69469602142713, w1=21.510414141807605
SGD(23/49): loss=21.914067313231214, w0=70.02936379332002, w1=11.93083587752652
SGD(24/49): loss=27.645877475376444, w0=68.35077066271272, w1=13.187763725527066
SGD(25/49): loss=48.90071299664233, w0=65.10677406853495, w1=13.463619906835756
SGD(26/49): loss=54.85732094808015, w0=64.40899423335391, w1=13.51012135759221
SGD(27/49): loss=23.657579181784467, w0=69.51476167655515, w1=11.975940557442891
SGD(28/49): loss=26.183113722398684, w0=73.18657857092249, w1=18.125455550457282
SGD(29/49): loss=26.05230926955071, w0=77.80935727360391, w1=14.451148027474687
SGD(30/49): loss=43.41513655857207, w0=73.73858131925903, w1=20.95371908946519
SGD(31/49): loss=43.946208598434175, w0=73.919377942508, w1=21.011607702836603
SGD(32/49): loss=46.91986251798032, w0=71.30724768129153, w1=21.16873541852352
SGD(33/49): loss=41.1731622329057, w0=68.45056466101147, w1=18.78220603655295
SGD(34/49): loss=100.0912296528572, w0=80.19172199885351, w1=24.5174222868068
SGD(35/49): loss=101.61009846071978, w0=80.56817449178526, w1=24.412857997916685
SGD(36/49): loss=141.79407710074048, w0=62.066491119177044, w1=2.2208859316231795
SGD(37/49): loss=121.20325416606488, w0=81.28330366119529, w1=25.637200386859114
SGD(38/49): loss=89.70697662059416, w0=77.45968618233377, w1=24.937835592742857
SGD(39/49): loss=89.69146088698254, w0=77.5997349659997, w1=24.884585111010658
SGD(40/49): loss=28.505445124434416, w0=68.19226837038075, w1=13.019012171126715
SGD(41/49): loss=37.15004876036899, w0=72.39556532671352, w1=20.01586398363435
SGD(42/49): loss=36.21304950748181, w0=72.55982019757242, w1=19.89184295595371
SGD(43/49): loss=32.13913142988639, w0=68.04532634930892, w1=15.920763601098045
SGD(44/49): loss=27.25196531140997, w0=73.39453380662113, w1=8.609185548711428
SGD(45/49): loss=27.34154749012749, w0=68.7216361144644, w1=11.746068583364067
SGD(46/49): loss=31.366370924681828, w0=67.86483874956318, w1=11.903000302520754
SGD(47/49): loss=29.44288978496201, w0=68.5231624320618, w1=11.165872129975625
SGD(48/49): loss=37.06183295774028, w0=73.99012283356925, w1=6.932408059897687
SGD(49/49): loss=30.226138200550153, w0=71.20346205534395, w1=8.448760450083158
SGD: execution time=0.043 seconds

In [87]:
# Time Visualization
from ipywidgets import IntSlider, interact
def plot_figure(n_iter):
    fig = gradient_descent_visualization(
        sgd_losses, sgd_ws, grid_losses, grid_w0, grid_w1, mean_x, std_x, height, weight, n_iter)
    fig.set_size_inches(10.0, 6.0)

interact(plot_figure, n_iter=IntSlider(min=1, max=len(gradient_ws)))



In [ ]: