This notebook uses the NLopt package which has well tested routines for constrained optimization.
(The Optim
package has preliminary code for similar functionality, see the IPNewton section of the documentation.)
In [1]:
using Dates, NLopt
include("printmat.jl")
Out[1]:
In [2]:
using Plots
#pyplot(size=(600,400))
gr(size=(480,320))
default(fmt = :svg)
In the example below, we want to choose $(x,y)$ so as to minimize the objective function
$ (x-2)^2 + (4y+3)^2, $
without any constraints. The solution should be $(x,y)=(2,-3/4)$.
We use the algorithm LN_COBYLA
which does not require us to code up the derivatives, but can still handle restrictions.
In [3]:
function lossfun(p,grad,A) #grad is never used, but must be in function def
(x,y) = (p[1],p[2]) #A could be data or other things that the function depends on
L = (x-2)^2 + (4*y+3)^2
return L
end
Out[3]:
In [4]:
nx = 2*41
ny = 2*61
x = range(1,stop=5,length=nx)
y = range(-1,stop=0,length=ny)
loss2d = fill(NaN,(nx,ny)) #matrix with loss fn values
for i = 1:nx, j = 1:ny
loss2d[i,j] = lossfun([x[i];y[j]],[],0)
end
In [5]:
contour( x,y,loss2d', #notice: loss2d'
xlims = (1,5),
ylims = (-1,0),
legend = false,
levels = 21,
title = "Contour plot of loss function",
xlabel = "x",
ylabel = "y" )
scatter!([2],[-0.75],label="optimum",legend=true)
Out[5]:
Tell NLopt what to minimize as follows
min_objective!(opt,(p,g)->lossfun(p,g,0))
The p
will be the parameters (over which we optimize), g
the gradient (which is actually not used but must be there in the call) and 0
corresponds to the extra argument A
in the definition of lossfun
. We here use A=0
, but in other contexts it could clearly be something else.
In [6]:
opt = Opt(:LN_COBYLA, 2) #unconstrained minimization
lower_bounds!(opt,[-100,-100]) #LN_COBYLA seems to need some restrictions
min_objective!(opt,(p,g)->lossfun(p,g,0))
xtol_rel!(opt,1e-8) #set convergence tolerance to a reasonable number
(minf,minx,ret) = optimize(opt,[0.0,0.0])
println("minimum at $minx and $ret")
In [7]:
contour( x,y,loss2d',
xlims = (1,5),
ylims = (-1,0),
legend = false,
levels = 21,
title = "Contour plot of loss function",
xlabel = "x",
ylabel = "y",
annotation = (3.5,-0.7,text("somewhere here",8)) )
plot!([2.75,2.75],[-1,0.5],linecolor=:red,linewidth=2,label="2.75 < x",legend=true)
plot!([1,5],[-0.3,-0.3],linecolor=:black,linewidth=2,label="y < -0.3")
scatter!([2.75],[-0.75],markercolor=:red,label="optimum")
Out[7]:
In [8]:
opt = Opt(:LN_COBYLA, 2) #bounds on the parameters
lower_bounds!(opt,[2.75, -Inf])
upper_bounds!(opt,[Inf, -0.3])
min_objective!(opt,(p,g)->lossfun(p,g,0))
xtol_rel!(opt,1e-8)
(minf,minx,ret) = optimize(opt,[3.0, -0.5])
println("minimum at $minx and $ret")
In [9]:
contour(x,y,loss2d',
xlims = (1,5),
ylims = (-1,0),
legend = false,
levels = 21,
title = "Contour plot of loss function",
xlabel = "x",
ylabel = "y" )
plot!(3 .- 2*y,y,linecolor=:red,linewidth=3,label="restriction",legend=true)
scatter!([4],[-0.5],markercolor=:red,label="optimum")
Out[9]:
In [10]:
function EqConstrfun(p,grad) #equality constraint
(x,y) = (p[1],p[2])
c = x + 2*y - 3
return c
end
opt = Opt(:LN_COBYLA, 2)
equality_constraint!(opt,(p,g) -> EqConstrfun(p,g))
min_objective!(opt,(p,g)->lossfun(p,g,0))
xtol_rel!(opt,1e-8)
(minf,minx,ret) = optimize(opt,[3.0, 0.0])
println("minimum at $minx and $ret")
In [11]:
yRestriction = -(x.-4).^2 #y should be less than this
contour( x,y,loss2d',
legend = false,
levels = 21,
title = "Contour plot of loss function",
xlabel = "x",
ylabel = "y",
xlims = (1,5),
ylims = (-1,0),
annotation = (4.0,-0.7,text("somewhere here",8)) )
plot!(x,yRestriction,linecolor=:red,linewidth=3,label="y is below this curve",legend=:topleft)
scatter!([3.1],[-0.79],markercolor=:red,label="optimum")
Out[11]:
In [12]:
function IneqConstrfun(p,grad) #inequality restriction, <=
(x,y) = (p[1],p[2])
r = y + (x-4)^2
return r
end
opt = Opt(:LN_COBYLA, 2)
inequality_constraint!(opt,(p,g) -> IneqConstrfun(p,g))
min_objective!(opt,(p,g)->lossfun(p,g,0))
xtol_rel!(opt,1e-8)
(minf,minx,ret) = optimize(opt,[4.0, -0.1])
println("minimum at $minx and $ret")
In [ ]: