Programming Assignment Оптимизация в Python глобальная оптимизация и оптимизация негладкой функции



In [5]:
import numpy as np
import scipy.optimize as opt
from matplotlib import pyplot as plt
import math

In [6]:
def f(x):
    return math.sin(x / 5) * math.exp(x / 10) + 5 * math.exp(-x / 2)

In [9]:
for x in range(1, 31):
    print(x, opt.minimize(f, x).x)


1 [ 4.13628706]
2 [ 4.13627619]
3 [ 4.13623332]
4 [ 4.13625698]
5 [ 4.13628831]
6 [ 4.13629005]
7 [ 4.13628939]
8 [ 4.13628868]
9 [ 4.13630231]
10 [ 4.13630013]
11 [ 25.88019285]
12 [ 25.88019227]
13 [ 88.71203612]
14 [ 25.88019309]
15 [ 24.67766107]
16 [ 25.88019877]
17 [ 25.8801768]
18 [ 25.88019851]
19 [ 25.88017637]
20 [ 25.88018742]
21 [ 25.88020626]
22 [ 25.88019447]
23 [ 25.88019265]
24 [ 25.88019536]
25 [ 25.88018592]
26 [ 25.88019563]
27 [ 25.88019774]
28 [ 25.88019333]
29 [ 25.88020466]
30 [ 25.880193]

In [11]:
opt.minimize(f, 2, method='L-BFGS-B')


Out[11]:
      fun: 1.745268290332287
 hess_inv: <1x1 LbfgsInvHessProduct with dtype=float64>
      jac: array([ -2.88657986e-07])
  message: b'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL'
     nfev: 14
      nit: 6
   status: 0
  success: True
        x: array([ 4.13628675])

In [12]:
opt.minimize(f, 30, method='L-BFGS-B')


Out[12]:
      fun: -11.898894665981299
 hess_inv: <1x1 LbfgsInvHessProduct with dtype=float64>
      jac: array([  1.77635684e-07])
  message: b'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL'
     nfev: 14
      nit: 6
   status: 0
  success: True
        x: array([ 25.88019331])

In [13]:
%matplotlib inline
x = [x for x in range(1, 31)]
y = [f(x) for x in range(1, 31)]
plt.plot(x, y)


Out[13]:
[<matplotlib.lines.Line2D at 0x118b44780>]

In [16]:
result = opt.differential_evolution(f, [(1, 30)])
print(result.x, result.fun)


[ 25.88019344] -11.898894665981272

In [19]:
def h(x):
    return int(f(x))
x = [x for x in range(1, 31)]
y = [h(x) for x in range(1, 31)]
plt.plot(x, y)


Out[19]:
[<matplotlib.lines.Line2D at 0x119024358>]

In [20]:
opt.minimize(h, 30, method='L-BFGS-B')


Out[20]:
      fun: -5
 hess_inv: <1x1 LbfgsInvHessProduct with dtype=float64>
      jac: array([ 0.])
  message: b'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL'
     nfev: 2
      nit: 0
   status: 0
  success: True
        x: array([ 30.])

In [21]:
result = opt.differential_evolution(h, [(1, 30)])
print(result.x, result.fun)


[ 25.86586295] -11.0

In [ ]: