PyOptWrapper


PyOptWrapper

This notebook contains some examples on how to use pyoptwrapper. First go here: https://github.com/BYUFLOWLab/pyoptsparsewrapper for details on installation and a description of inputs/outputs.

This wrapper makes it easier to use pyoptsparse for our problems. You can use any of the optimizers defined in pyoptsparse.


In [1]:
from pyoptwrapper import optimize
from pyoptsparse import NSGA2, SNOPT, NLPQLP, ALPSO, SLSQP
import numpy as np

Unconstrained, gradients provided


In [2]:
# ---- Rosenbrock with gradients, test different optimizers ------


def rosen(x):

    f = (1 - x[0])**2 + 100*(x[1] - x[0]**2)**2
    c = []
    ceq = []

    gf = [2*(1 - x[0])*-1 + 200*(x[1] - x[0]**2)*-2*x[0], 200*(x[1] - x[0]**2)]
    gc = []
    gceq = []

    return f, c, ceq, gf, gc, gceq


x0 = [4.0, 4.0]
ub = [5.0, 5.0]
lb = [-5.0, -5.0]

In [3]:
optimizer = NSGA2()
optimizer.setOption('maxGen', 200)

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'NSGA2:', xopt, fopt, info


optimizer = SNOPT()

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'SNOPT:', xopt, fopt, info


optimizer = NLPQLP()

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'NLPQLP:', xopt, fopt, info


optimizer = SLSQP()

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'SLSQP:', xopt, fopt, info


optimizer = ALPSO()

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'ALPSO:', xopt, fopt, info


NSGA2: [ 0.9558299   0.91361147] 0.00195099739493 {'max-c-vio': 0.0, 'fcalls': 19316, 'time': 4.309035062789917}
SNOPT: [ 1.  1.] 1.82925621364e-18 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 0.0, 'fcalls': 42, 'time': 0.03094792366027832}
NLPQLP: [ 0.9998208   0.99964098] 3.21551663371e-08 {'max-c-vio': 0.0, 'fcalls': 46, 'time': 0.033833980560302734}
SLSQP: [ 0.99990575  0.99978891] 5.99147933121e-08 {'max-c-vio': 0.0, 'fcalls': 48, 'time': 0.023919105529785156}
ALPSO: [ 1.00024784  1.00053611] 2.24314846063e-07 {'max-c-vio': 0.0, 'fcalls': 1961, 'time': 0.40456700325012207}

Unconstrained, no graidents

The wrapper automatically detects if gradients are provided or not so you don't need to do anything special.


In [4]:
# ---- Rosenbrock with no gradients ------

def rosen(x):

    f = (1 - x[0])**2 + 100*(x[1] - x[0]**2)**2
    c = []
    ceq = []

    return f, c, ceq



optimizer = SNOPT()

xopt, fopt, info = optimize(rosen, x0, lb, ub, optimizer)
print 'SNOPT:', xopt, fopt, info


SNOPT: [ 0.99969953  0.99939864] 9.03093498134e-08 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 0.0, 'fcalls': 122, 'time': 0.0373690128326416}

Linear constraints


In [5]:
# ---- test linear constraints ------


H = np.array([[1.0, -1.0], [-1.0, 2.0]])
f = np.array([-2.0, -6.0])
A = np.array([[1.0, 1.0], [-1.0, 2.0], [2.0, 1.0]])
b = np.array([2.0, 2.0, 3.0])


def obj(x):
    ff = 0.5*np.dot(x, np.dot(H, x)) + np.dot(f, x)
    c = []
    ceq = []

    return ff, c, ceq

x0 = [4.0, 4.0]
ub = [100.0, 100.0]
lb = [0.0, 0.0]


optimizer = SNOPT()

xopt, fopt, info = optimize(obj, x0, lb, ub, optimizer, A, b)
print 'linear constraints:', xopt, fopt, info


linear constraints: [ 0.66666667  1.33333333] -8.22222222222 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 0.0, 'fcalls': 8, 'time': 0.0033330917358398438}

Constrained problem with gradients


In [6]:
# ------ constrained problem ---------

from math import exp

def barnes(x):

    a1 = 75.196
    a3 = 0.12694
    a5 = 1.0345e-5
    a7 = 0.030234
    a9 = 3.5256e-5
    a11 = 0.25645
    a13 = 1.3514e-5
    a15 = -5.2375e-6
    a17 = 7.0e-10
    a19 = -1.6638e-6
    a21 = 0.0005
    a2 = -3.8112
    a4 = -2.0567e-3
    a6 = -6.8306
    a8 = -1.28134e-3
    a10 = -2.266e-7
    a12 = -3.4604e-3
    a14 = -28.106
    a16 = -6.3e-8
    a18 = 3.4054e-4
    a20 = -2.8673

    x1 = x[0]
    x2 = x[1]
    y1 = x1*x2
    y2 = y1*x1
    y3 = x2**2
    y4 = x1**2

    # --- function value ---

    f = a1 + a2*x1 + a3*y4 + a4*y4*x1 + a5*y4**2 + \
        a6*x2 + a7*y1 + a8*x1*y1 + a9*y1*y4 + a10*y2*y4 + \
        a11*y3 + a12*x2*y3 + a13*y3**2 + a14/(x2+1) + \
        a15*y3*y4 + a16*y1*y4*x2 + a17*y1*y3*y4 + a18*x1*y3 + \
        a19*y1*y3 + a20*exp(a21*y1)

    # --- constraints ---

    c = np.zeros(3)
    c[0] = 1 - y1/700.0
    c[1] = y4/25.0**2 - x2/5.0
    c[2] = (x1/500.0- 0.11) - (x2/50.0-1)**2


    # --- derivatives of f ---

    dy1 = x2
    dy2 = y1 + x1*dy1
    dy4 = 2*x1
    dfdx1 = a2 + a3*dy4 + a4*y4 + a4*x1*dy4 + a5*2*y4*dy4 + \
        a7*dy1 + a8*y1 + a8*x1*dy1 + a9*y1*dy4 + a9*y4*dy1 + a10*y2*dy4 + a10*y4*dy2 + \
        a15*y3*dy4 + a16*x2*y1*dy4 + a16*x2*y4*dy1 + a17*y3*y1*dy4 + a17*y3*y4*dy1 + a18*y3 + \
        a19*y3*dy1 + a20*exp(a21*y1)*a21*dy1

    dy1 = x1
    dy2 = x1*dy1
    dy3 = 2*x2
    dfdx2 = a6 + a7*dy1 + a8*x1*dy1 + a9*y4*dy1 + a10*y4*dy2 + \
        a11*dy3 + a12*x2*dy3 + a12*y3 + a13*2*y3*dy3 + a14*-1/(x2+1)**2 + \
        a15*y4*dy3 + a16*y4*y1 + a16*y4*x2*dy1 + a17*y4*y1*dy3 + a17*y4*y3*dy1 + a18*x1*dy3 + \
        a19*y3*dy1 + a19*y1*dy3 + a20*exp(a21*y1)*a21*dy1

    dfdx = np.array([dfdx1, dfdx2])


    # --- derivatives of c ---

    dcdx = np.zeros((3, 2))
    dcdx[0, 0] = -x2/700.0
    dcdx[0, 1] = -x1/700.0
    dcdx[1, 0] = 2*x1/25**2
    dcdx[1, 1] = -1.0/5
    dcdx[2, 0] = 1.0/500
    dcdx[2, 1] = -2*(x2/50.0-1)/50.0

    # dcdx = np.transpose(dcdx)  # matlab format

    return f/30.0, c, [], dfdx/30.0, dcdx, []

In [7]:
x0 = np.array([10.0, 10.0])
lb = np.array([0.0, 0.0])
ub = np.array([65.0, 70.0])

optimizer = SNOPT()
xopt, fopt, info = optimize(barnes, x0, lb, ub, optimizer)
print 'barnes:', xopt, fopt, info


barnes: [ 49.52545372  19.62216397] -1.05456035947 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 1.1140895539440976e-07, 'fcalls': 40, 'time': 0.04505300521850586}

Multiobjective GA (with a constraint just for fun)

Note that the constarint could actually be a bound constraint. It's just there to show that NSGA2 can handle constraints. Because this is a multiobjective problem, xopt contains the entire pareto front, and fopt the objective functions along that front.


In [8]:
# --------- multiobjective ----------

def multiobj(x):

    f = np.zeros(2)
    f[0] = (x[0]+2)**2 + (x[1]+2)**2 - 10
    f[1] = (x[0]-2)**2 + (x[1]-2)**2 + 20

    c = [x[0] - 1]
    ceq = []

    return f, c, ceq

x0 = [4.0, 4.0]
ub = [20.0, 20.0]
lb = [-20.0, -20.0]

optimizer = NSGA2()
optimizer.setOption('maxGen', 200)

xopt, fopt, info = optimize(multiobj, x0, lb, ub, optimizer)
print 'NSGA2:'
print 'xopt =', xopt
print 'fopt =', fopt
print 'info =', info


NSGA2:
xopt = [[ 0.9973342   2.009444  ]
 [-2.000297   -1.999851  ]
 [-0.8424979  -0.9744967 ]
 [ 0.02680356  0.03089153]
 [-0.5519721  -0.4703299 ]
 [ 0.2442713   0.08768983]
 [-1.153866   -1.099569  ]
 [ 0.9913403   0.8927175 ]
 [-1.138731   -0.9416537 ]
 [-0.9940932  -0.9199558 ]
 [-0.5967224  -1.017377  ]
 [-0.4262689  -0.793217  ]
 [-0.5967224  -1.017377  ]
 [ 0.01214987 -0.07410228]
 [ 0.99112     0.6686376 ]
 [ 0.9948042   0.8117054 ]
 [-1.654517   -1.423697  ]
 [ 0.414778   -0.1967281 ]
 [-1.282795   -1.533888  ]
 [-1.614406   -1.540971  ]
 [ 0.9948919   1.946815  ]
 [ 0.7902784   0.932416  ]
 [ 0.6830573   0.4643452 ]
 [ 0.9548193   0.5935744 ]
 [-0.1937757  -0.05834315]
 [-0.5519721  -0.8497754 ]
 [ 0.9948919   1.47879   ]
 [-1.303152   -1.610229  ]
 [ 0.6830573   0.6480824 ]
 [ 0.4844151   0.3555492 ]
 [ 0.9811146   1.135902  ]
 [ 0.9913403   1.015234  ]
 [-1.591907   -1.994926  ]
 [ 0.9972808   1.369168  ]
 [-1.099699   -1.20393   ]
 [ 0.9973342   1.786137  ]
 [ 0.1110038   0.4321063 ]
 [ 0.9972808   1.320684  ]
 [-1.220242   -1.165962  ]
 [-1.856053   -2.012915  ]
 [-1.614406   -1.845954  ]
 [-1.819482   -1.845954  ]
 [-0.4262689  -0.2839553 ]
 [ 0.9683428   1.123468  ]
 [ 0.9806441   1.690237  ]
 [ 0.4682138   0.2776352 ]
 [-1.573466   -1.678628  ]
 [-0.1064255   0.2776352 ]
 [ 0.9678074   1.274204  ]
 [-1.230395   -0.8044647 ]
 [-0.4325716  -0.1912461 ]
 [-1.303152   -1.318025  ]
 [-1.440802   -1.535886  ]
 [-0.4362289  -0.3061611 ]
 [ 0.9973342   1.562358  ]
 [-1.856053   -2.012915  ]
 [ 0.6455867   0.4643452 ]
 [ 0.3433307   0.08988915]
 [-1.856053   -1.897486  ]
 [ 0.6830573   0.5803916 ]
 [ 0.4682138   0.2776352 ]
 [ 0.5037756   0.3730376 ]
 [-0.4325716  -0.7966677 ]
 [ 0.5072434   1.015234  ]
 [-1.230395   -1.309264  ]
 [-1.220242   -1.520204  ]
 [ 0.666032    0.3555492 ]
 [ 0.9972808   1.870887  ]
 [-0.45138     0.03089153]
 [-0.4296123  -0.1670525 ]
 [ 0.666032    0.3555492 ]
 [ 0.9973342   1.786137  ]
 [-1.602588   -1.679427  ]
 [-1.602588   -1.776042  ]
 [-0.6307842  -0.6857076 ]
 [-0.2647143  -0.07410228]
 [-0.403543   -0.1289867 ]
 [ 0.07786064  0.5253304 ]
 [-0.5592517  -0.3843156 ]
 [-1.230395   -1.309264  ]
 [ 0.9972808   1.258003  ]
 [ 0.9972808   1.870887  ]
 [-1.856053   -1.897486  ]
 [ 0.9973342   1.574695  ]
 [-1.220242   -1.212329  ]
 [-0.45138     0.03089153]
 [-0.2647143  -0.07410228]
 [ 0.9546687   1.47879   ]
 [-1.303152   -1.318025  ]
 [-0.4993265  -0.8560301 ]
 [-0.5967224  -0.3843156 ]
 [-0.2810732   0.08768191]
 [-1.777686   -1.845954  ]
 [ 0.07786064  0.5462706 ]
 [ 0.9913403   1.015234  ]
 [-1.220242   -1.520204  ]
 [ 0.5578615   0.8343817 ]
 [ 0.07786064  0.5462706 ]
 [-1.614406   -1.845954  ]
 [ 0.5037756   0.9521957 ]]
fopt = [[ 15.05966    21.00543  ]
 [-10.         52.00118  ]
 [ -7.608532   36.92742  ]
 [ -1.767547   27.77089  ]
 [ -5.563325   32.61509  ]
 [ -0.6047975  26.73951  ]
 [ -8.473281   39.5542   ]
 [  7.315931   22.24347  ]
 [ -8.138119   38.50496  ]
 [ -7.821656   37.49074  ]
 [ -7.065264   35.84753  ]
 [ -6.067045   33.68884  ]
 [ -7.065264   35.84753  ]
 [ -2.242171   28.25345  ]
 [  6.068425   22.79036  ]
 [  6.874539   22.42246  ]
 [ -9.548517   45.0772   ]
 [ -0.9170578  27.33854  ]
 [ -9.268356   43.2651   ]
 [ -9.64061    45.60241  ]
 [ 14.54672    21.01307  ]
 [  6.384717   22.60316  ]
 [  3.271794   24.09257  ]
 [  5.457585   23.07044  ]
 [ -2.967523   29.04943  ]
 [ -6.580198   34.63378  ]
 [ 11.07136    21.2819   ]
 [ -9.362481   43.94456  ]
 [  4.211137   23.56202  ]
 [  1.72093    25.00122  ]
 [  8.720927   21.78479  ]
 [  8.03975    21.98716  ]
 [ -9.833434   48.86123  ]
 [ 10.33499    21.40339  ]
 [ -8.555732   39.8733   ]
 [ 13.31884    21.05108  ]
 [  0.371478   26.0266   ]
 [ 10.01063    21.46692  ]
 [ -8.696358   40.39327  ]
 [ -9.979112   50.97263  ]
 [ -9.827587   47.85529  ]
 [ -9.943683   49.3798   ]
 [ -4.578561   31.10323  ]
 [  8.567112   21.83262  ]
 [ 12.50209    21.13504  ]
 [  1.279701   25.31291  ]
 [ -9.714789   46.30196  ]
 [ -1.226754   27.40357  ]
 [  9.528293   21.5922   ]
 [ -7.978403   38.30047  ]
 [ -4.271578   30.71896  ]
 [ -9.049312   41.9201   ]
 [ -9.471896   44.34161  ]
 [ -4.68553    31.25359  ]
 [ 11.67441    21.19687  ]
 [ -9.979112   50.97263  ]
 [  3.072126   24.19267  ]
 [ -0.1411646  26.39308  ]
 [ -9.96877    50.05954  ]
 [  3.857218   23.74963  ]
 [  1.279701   25.31291  ]
 [  1.9002     24.88569  ]
 [ -6.09516    33.73875  ]
 [  5.377903   23.19809  ]
 [ -8.930592   41.38668  ]
 [ -9.161773   42.76179  ]
 [  2.656339   24.48369  ]
 [ 13.96746    21.02212  ]
 [ -3.477256   29.88665  ]
 [ -4.174186   30.59913  ]
 [  2.656339   24.48369  ]
 [ 13.31884    21.05108  ]
 [ -9.739297   46.51683  ]
 [ -9.791907   47.23714  ]
 [ -6.397884   34.13405  ]
 [ -3.279702   29.43083  ]
 [ -3.950634   30.3096   ]
 [  0.6947985  25.86927  ]
 [ -5.313808   32.23473  ]
 [ -8.930592   41.38668  ]
 [  9.598277   21.55601  ]
 [ 13.96746    21.02212  ]
 [ -9.96877    50.05954  ]
 [ 11.76245    21.18622  ]
 [ -8.771552   40.68902  ]
 [ -3.477256   29.88665  ]
 [ -3.279702   29.43083  ]
 [ 10.83205    21.36438  ]
 [ -9.049312   41.9201   ]
 [ -6.439312   34.40354  ]
 [ -5.420376   32.42793  ]
 [ -2.686875   28.86026  ]
 [ -9.926846   49.06227  ]
 [  0.8009987  25.80795  ]
 [  8.03975    21.98716  ]
 [ -9.161773   42.76179  ]
 [  4.576375   23.43843  ]
 [  0.8009987  25.80795  ]
 [ -9.827587   47.85529  ]
 [  4.984351   23.33658  ]]
info = {'max-c-vio': 0.0, 'fcalls': 19973, 'time': 4.510145902633667}

Passing in Extra Arguments

Although you could handle this by setting parameters in the scope outside of the function, it's often more convenient and clearer, to just pass them in. The wrapper allows this. Note that the optimal $f^*$ is not zero because of the passed in parameters.


In [9]:
def rosenarg(x, a, b):  # note two extra arguments, you can add as many as you want, and they can be whatever type

    f = (1 - x[0])**2 + 100*(x[1] - x[0]**2)**2 + a + b
    c = []
    ceq = []

    return f, c, ceq


x0 = [4.0, 4.0]
ub = [5.0, 5.0]
lb = [-5.0, -5.0]

optimizer = SNOPT()

xopt, fopt, info = optimize(rosenarg, x0, lb, ub, optimizer, args=(3.0, 4.0))  # pass the values for args here in a tuple
print 'args:', xopt, fopt, info


args: [ 0.99969953  0.99939864] 7.00000009031 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 0.0, 'fcalls': 122, 'time': 0.04348182678222656}

Equality Constraints

This example uses the same Barnes function from earlier, but adds some additional equality constraints.


In [10]:
def barneseq(x):

    f, c, [], gf, gc, [] = barnes(x)

    x1 = x[0]
    x2 = x[1]

    ceq = np.zeros(2)
    ceq[0] = x1**2 + x2**2 - 3000
    ceq[1] = x1 + 2*x2 - 120

    return f, c, ceq

x0 = np.array([10.0, 10.0])
lb = np.array([0.0, 0.0])
ub = np.array([65.0, 70.0])

optimizer = SNOPT()
xopt, fopt, info = optimize(barneseq, x0, lb, ub, optimizer)
print 'barneseq:', xopt, fopt, info


barneseq: [ 33.79795897  43.10102051] 0.0254712599953 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 8.6713953351136297e-08, 'fcalls': 26, 'time': 0.011448860168457031}

Equality Constriants with Gradients

This is the same example, but with analytic gradient supplied.


In [11]:
def barneseqgrad(x):

    f, c, [], gf, gc, [] = barnes(x)

    x1 = x[0]
    x2 = x[1]

    ceq = np.zeros(2)
    ceq[0] = x1**2 + x2**2 - 3000
    ceq[1] = x1 + 2*x2 - 120

    gceq = np.zeros((2, 2))
    gceq[0, 0] = 2*x1
    gceq[0, 1] = 2*x2
    gceq[1, 0] = 1.0
    gceq[1, 1] = 2.0

    return f, c, ceq, gf, gc, gceq

x0 = np.array([10.0, 10.0])
lb = np.array([0.0, 0.0])
ub = np.array([65.0, 70.0])

optimizer = SNOPT()
xopt, fopt, info = optimize(barneseqgrad, x0, lb, ub, optimizer)
print 'barneseqgrad:', xopt, fopt, info


barneseqgrad: [ 33.79795897  43.10102051] 0.025471259997 {'code': {'text': 'optimality conditions satisfied', 'value': array([1], dtype=int32)}, 'max-c-vio': 8.3525264926720411e-08, 'fcalls': 10, 'time': 0.010693073272705078}

In [ ]: