In [2]:
using Gadfly
In [3]:
data = readdlm("../../../datasets/ml-ex1/ex1data2.txt", ',');
In [4]:
oX = data[:,1:2];
y = data[:,3];
m = length(y);
X = [ones(m, 1) oX];
In [25]:
function feature_normalize(X)
means = mean(X, 1)
stds = std(X, 1)
return (X .- means) ./ stds
end
function compute_cost(X, y, theta)
return sum(((theta' * X')' - y).^2) / (2 * length(y))
end
function gradient_descent(X, y, theta, alpha, iterations)
cost_history = zeros(iterations, 1)
for i in collect(1:iterations)
theta = (theta' - ((alpha / length(y)) * sum((sum((theta' .* X), 2) - y).*X, 1)))'
cost_history[i] = compute_cost(X, y, theta)
end
return theta, cost_history
end
Out[25]:
In [29]:
X = [ones(m, 1) feature_normalize(oX)]
num_features = length(X[1,:]);
theta = zeros(num_features, 1);
iterations = 50;
alpha = 0.1;
initial_cost = compute_cost(X, y, theta);
(ptheta, cost_history) = gradient_descent(X, y, theta, alpha, iterations);
plot(x=1:iterations, y=cost_history[:,1], Geom.line)
Out[29]:
In [33]:
function normal_equation(X, y)
return (pinv(X'*X))*(X'*y)
end
normal_equation(X, y)
Out[33]:
In [ ]: