In [1]:
using Gadfly
In [2]:
# Compute cost function
function compute_cost(X, y, theta)
return sum(((theta' * X')' - y).^2) / (2 * length(y))
end
function gradient_descent(X, y, theta, alpha, iterations)
cost_history = zeros(iterations, 1)
for i in collect(1:iterations)
theta = (theta' - ((alpha / length(y)) * sum((sum((theta' .* X), 2) - y).*X, 1)))'
cost_history[i] = compute_cost(X, y, theta)
end
return theta, cost_history
end
Out[2]:
In [8]:
data = readdlm("../../../datasets/ml-ex1/ex1data1.txt", ',');
oX = data[:,1];
m = length(oX);
X = [ones(m, 1) oX];
y = data[:,2];
plot(x=oX, y=y)
Out[8]:
In [9]:
theta = zeros(2, 1);
iterations = 1500;
alpha = 0.0105;
initial_cost = compute_cost(X, y, theta);
(ptheta, cost_history) = gradient_descent(X, y, theta, alpha, iterations);
plot(x=1:iterations, y=cost_history[:,1], Geom.line)
Out[9]:
In [10]:
predicted_y = (ptheta'*X')'
plot(layer(x=oX, y=y, Geom.point), layer(x=oX,y=predicted_y, Theme(default_color=color("red")), Geom.line))
Out[10]:
In [ ]: