In [21]:
% Initial Clean-up
clear; close all; clc
In [22]:
fprintf('Loading Data...\n')
In [23]:
%% Load Data:
data = load('./data/ex1data2.txt');
In [24]:
size(data)
In [25]:
X = data(:, 1:2);
y = data(:, 3);
m = length(y);
In [26]:
length(y)
In [27]:
fprintf('First 10 examples from the dataset: \n');
fprintf('x = [%.0f %.0f], y = %.0f \n', [X(1:10, :) y(1:10, :)])
In [28]:
fprintf('Program paused. Press enter to continue.\n');
In [29]:
fprintf('Normalizing Features ...\n');
[X mu sigma] = featureNormalize(X);
% Add intercept term to X
X = [ones(m, 1) X];
In [30]:
fprintf('Running gradient descent ...\n');
% Learning Rate:
alpha = 0.01;
num_iter = 400;
In [32]:
% Initialize Theta and Run Gradient Descent
theta = zeros(3,1);
[theta, J_history] = gradientDescentMulti(X,y, theta, alpha, num_iter);
In [33]:
% Plotting the convergence graph
figure;
plot(1:numel(J_history), J_history, '-b', 'LineWidth', 2);
xlabel('Number of iterations');
ylabel('Cost J');
In [34]:
% Display gradient descent's result
fprintf('Theta computed from gradient descent: \n');
fprintf(' %f \n', theta);
fprintf('\n');
In [35]:
% Let's estimate a new one
% Estimate the price of a 1650 sq-ft, 3 br house
% Add it to the vector
d = [1650 3];
% normalize it
d = (d - mu) ./ sigma;
d = [ones(1,1), d];
% Estimating price using the minimized theta
price = d * theta;
In [48]:
fprintf(['Predicted price of a 1650 sq-ft, 3 br house (using gradient descent):\n $%.3f\n'], price);
In [37]:
fprintf('Solving with normal equations...\n');
In [39]:
%% Load Data
data = csvread('./data/ex1data2.txt');
X = data(:, 1:2);
y = data(:, 3);
m = length(y);
In [40]:
% Add intercept term to X
X = [ones(m, 1) X];
In [41]:
% Calculate the parameters from the normal equations
theta = normalEqn(X, y);
In [42]:
fprintf('Theta computed from the normal equations: \n');
fprintf('%f \n', theta);
fprintf('\n');
In [44]:
d = [1 1653 3];
price = d * theta ;
In [46]:
fprintf(['Predicted price of a 1650 sq-ft, 3 br house (using normal equations):\n $%.3f\n'], price);
In [ ]: