In [23]:
function newtmin_BFGS(p, maxIts, optTol)
# obj is a function that evaluates the objective value,
# gradient and hessian a x.
# x0 is the starting point.
# maxIts = maximum interations
# optTol is the optimality tolerance on the gradient
its = 1;
Tol = 0;
err = 0;
fault_count = 0;
# calculate the gradient from the objective function. In our case,
# these values will be calls to the functions in Toms566
x_k0 = p.x0;
n = p.n;
h_k0 = p.hes(x_k0);
delta = 1;
tolFactor = optTol*norm(p.grd(x_k0));
#initial guess of hessian. Make sure that the matrix is positive definite
if((det(h_k0)<0) || !(rank(h_k0)==3) )
H_diag,H_eigMat = eig(h_k0);
for j=1:length(H_diag)
if H_diag[j]<0
# Replace all negative eigenvalues with their absolute value
H_diag[j]=(abs(H_diag[j]));
elseif H_diag[j]==0
# Replace all zero eigenvalues with delta
H_diag[j]=delta;
end
end
B_k0 = H_eigMat*diagm(H_diag)*H_eigMat';
else B_k0 = h_k0;
end
# Start the Loop
for i=1:maxIts
# check for positive definiteness and full rank
g_k0 = p.grd(x_k0);
Tol = norm(g_k0);
if Tol<tolFactor
break;
end
step_k0 = B_k0\g_k0;
#calculate the step length ----Armijo Backtracking
alpha = 1;
while( p.obj(x_k0-alpha*step_k0) > ( (p.obj(x_k0)) ) )
alpha=.5*alpha;
if alpha < ( 1e-4)
break;
end
end
x_k1 = x_k0 - alpha*(step_k0);
g_k1 = p.grd(x_k1);
y = g_k1 - g_k0;
s = x_k1 - x_k0;
#BFGS Update
dB = (y*y')/dot(s,y) - (B_k0*(s*s')*B_k0)/dot(s,B_k0*s);
B_k0 = B_k0 + dB;
x_k0 = x_k1;
its+=1;
end
x = x_k0;
f = p.obj(x);
B = B_k0;
return(f,its,Tol,B,x);
end
Out[23]:
In [2]:
Pkg.test("Toms566")
In [4]:
using Toms566
In [25]:
p = Problem(2)
f,its,Tol,B,x = newtmin_BFGS(p, 2000, 1e-8)
Out[25]:
In [ ]: