Unconstrained global optimization with Scipy

TODO:

  • Plots:
    1. error w.t. ... => add an option to plot the current solution or the best current solution
    2. error w.t. number of function evaluations + error w.t. total number of function evaluations (i.e. including the number of gradient and hessian evaluations)
    3. (benchmark session ! distinguish the derivative-free to the non-derivative free case) average version of 3., 4., 5. over several runs with random initial state (+ error bar or box plot)
    4. (benchmark session) err w.t. algorithms parameters (plot the iteration or evaluation number or execution time to reach in average an error lower than N% with e.g. N=99%)

Import required modules


In [ ]:
# Init matplotlib

%matplotlib inline

import matplotlib
matplotlib.rcParams['figure.figsize'] = (8, 8)

In [ ]:
# Setup PyAI
import sys
sys.path.insert(0, '/Users/jdecock/git/pub/jdhp/pyai')

In [ ]:
import numpy as np
import time
import warnings

from scipy import optimize

In [ ]:
# Plot functions
from pyai.optimize.utils import plot_contour_2d_solution_space
from pyai.optimize.utils import plot_2d_solution_space

from pyai.optimize.utils import array_list_to_array
from pyai.optimize.utils import plot_fx_wt_iteration_number
from pyai.optimize.utils import plot_err_wt_iteration_number
from pyai.optimize.utils import plot_err_wt_execution_time
from pyai.optimize.utils import plot_err_wt_num_feval

Define the objective function


In [ ]:
## Objective function: Rosenbrock function (Scipy's implementation)
#func = scipy.optimize.rosen

In [ ]:
# Set the objective function
#from pyai.optimize.functions import sphere as func
from pyai.optimize.functions import sphere2d as func
#from pyai.optimize.functions import additive_gaussian_noise as noise
from pyai.optimize.functions import multiplicative_gaussian_noise as noise
#from pyai.optimize.functions import additive_poisson_noise as noise

func.noise = noise      # Comment this line to use a deterministic objective function

xmin = func.bounds[0]
xmax = func.bounds[1]

In [ ]:
print(func)
print(xmin)
print(xmax)
print(func.ndim)
print(func.arg_min)
print(func(func.arg_min))

The "basin-hopping" algorithm

Basin-hopping is a stochastic algorithm which attempts to find the global minimum of a function.

Official documentation:

Basic usage


In [ ]:
from scipy import optimize

x0 = np.random.uniform(-10., 10., size=2)

res = optimize.basinhopping(optimize.rosen,
                            x0,          # The initial point
                            niter=100)   # The number of basin hopping iterations

print("x* =", res.x)
print("f(x*) =", res.fun)
print("Cause of the termination:", ";".join(res.message))
print("Number of evaluations of the objective functions:", res.nfev)
print("Number of evaluations of the jacobian:", res.njev)
print("Number of iterations performed by the optimizer:", res.nit)

In [ ]:
print(res)

Performances analysis


In [ ]:
%%time

it_x_list = []
it_fx_list = []
it_time_list = []
it_num_eval_list = []

def callback(x, f, accept):
    it_x_list.append(x)
    it_fx_list.append(f)
    it_time_list.append(time.time() - init_time)
    if hasattr(func, 'num_eval'):
        it_num_eval_list.append(func.num_eval)
    print(len(it_x_list), x, f, accept, it_num_eval_list[-1])

x_init = np.random.random(func.ndim)   # draw samples in [0.0, 1.0)
min_bounds = func.bounds[0]
max_bounds = func.bounds[1]
x_init *= (max_bounds - min_bounds)
x_init += min_bounds

func.do_eval_logs = True
func.reset_eval_counters()
func.reset_eval_logs()

init_time = time.time()

with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    res = optimize.basinhopping(func,
                                x_init,            # The initial point
                                niter=100,         # The number of basin hopping iterations
                                callback=callback,
                                disp=False)        # Print status messages

func.do_eval_logs = False

eval_x_array = np.array(func.eval_logs_dict['x']).T
eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)

it_x_array = np.array(it_x_list).T
it_error_array = np.array(it_fx_list) - func(func.arg_min)

it_time_array = np.array(it_time_list)
it_num_eval_array = np.array(it_num_eval_list)

print("x* =", res.x)
print("f(x*) =", res.fun)
print("Cause of the termination:", ";".join(res.message))
print("Number of evaluations of the objective functions:", res.nfev)
print("Number of evaluations of the jacobian:", res.njev)
print("Number of iterations performed by the optimizer:", res.nit)

In [ ]:
plot_contour_2d_solution_space(func,
                               xmin=xmin,
                               xmax=xmax,
                               xstar=res.x,
                               xvisited=it_x_array,
                               title="Basin-Hopping");

In [ ]:
plot_contour_2d_solution_space(func,
                               xmin=xmin,
                               xmax=xmax,
                               xstar=res.x,
                               xvisited=eval_x_array,
                               title="Basin-Hopping");

In [ ]:
print(eval_x_array.shape)
print(eval_error_array.shape)
print(it_x_array.shape)
print(it_error_array.shape)
print(it_time_array.shape)
print(it_num_eval_array.shape)

In [ ]:
fig, ax = plt.subplots(nrows=1, ncols=3, squeeze=True, figsize=(15, 5))

ax = ax.ravel()

plot_err_wt_iteration_number(it_error_array, ax=ax[0], x_log=True, y_log=True)
plot_err_wt_execution_time(it_error_array, it_time_array, ax=ax[1], x_log=True, y_log=True)
plot_err_wt_num_feval(it_error_array, it_num_eval_array, ax=ax[2], x_log=True, y_log=True)

plt.tight_layout(); # Fix plot margins errors

In [ ]:
plot_err_wt_num_feval(eval_error_array, x_log=True, y_log=True)

Benchmark


In [ ]:
%%time

eval_error_array_list = []

NUM_RUNS = 100

for run_index in range(NUM_RUNS):
    x_init = np.random.random(func.ndim)   # draw samples in [0.0, 1.0)
    min_bounds = func.bounds[0]
    max_bounds = func.bounds[1]
    x_init *= (max_bounds - min_bounds)
    x_init += min_bounds

    func.do_eval_logs = True
    func.reset_eval_counters()
    func.reset_eval_logs()

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        res = optimize.basinhopping(func,
                                    x_init,            # The initial point
                                    niter=100,         # The number of basin hopping iterations
                                    disp=False)        # Print status messages

    func.do_eval_logs = False

    eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)

    print("x* =", res.x)
    print("f(x*) =", res.fun)
    #print("Cause of the termination:", ";".join(res.message))
    #print("Number of evaluations of the objective functions:", res.nfev)
    #print("Number of evaluations of the jacobian:", res.njev)
    #print("Number of iterations performed by the optimizer:", res.nit)
    
    eval_error_array_list.append(eval_error_array);

In [ ]:
plot_err_wt_num_feval(array_list_to_array(eval_error_array_list), x_log=True, y_log=True, plot_option="mean")

The "Differential Evolution" (DE) algorithm

Differential Evolution is a stochastic algorithm which attempts to find the global minimum of a function.

Official documentation:

More information:

Basic usage


In [ ]:
from scipy import optimize

bounds = [[-10, 10], [-10, 10]]

res = optimize.differential_evolution(optimize.rosen,
                                      bounds,              # The initial point
                                      maxiter=100,         # The number of DE iterations
                                      polish=True)

print("x* =", res.x)
print("f(x*) =", res.fun)
print("Cause of the termination:", res.message)
print("Number of evaluations of the objective functions:", res.nfev)
print("Number of iterations performed by the optimizer:", res.nit)

In [ ]:
print(res)

Performances analysis


In [ ]:
%%time

bounds = func.bounds.T.tolist()

it_x_list = []
it_fx_list = []
it_time_list = []
it_num_eval_list = []

def callback(xk, convergence):
    it_x_list.append(xk)
    it_fx_list.append(func(xk))
    it_time_list.append(time.time() - init_time)
    if hasattr(func, 'num_eval'):
        it_num_eval_list.append(func.num_eval)
    print(len(it_x_list), xk, it_fx_list[-1], convergence, it_num_eval_list[-1])

func.do_eval_logs = True
func.reset_eval_counters()
func.reset_eval_logs()

init_time = time.time()

with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    res = optimize.differential_evolution(func,
                                          bounds,              # The initial point
                                          maxiter=100,         # The number of DE iterations
                                          callback=callback,
                                          polish=False,
                                          disp=False)          # Print status messages

func.do_eval_logs = False

eval_x_array = np.array(func.eval_logs_dict['x']).T
eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)

it_x_array = np.array(it_x_list).T
it_error_array = np.array(it_fx_list) - func(func.arg_min)

it_time_array = np.array(it_time_list)
it_num_eval_array = np.array(it_num_eval_list)

print("x* =", res.x)
print("f(x*) =", res.fun)
print("Cause of the termination:", res.message)
print("Number of evaluations of the objective functions:", res.nfev)
print("Number of iterations performed by the optimizer:", res.nit)

In [ ]:
plot_contour_2d_solution_space(func,
                               xmin=xmin,
                               xmax=xmax,
                               xstar=res.x,
                               xvisited=it_x_array,
                               title="Differential Evolution");

In [ ]:
plot_contour_2d_solution_space(func,
                               xmin=xmin,
                               xmax=xmax,
                               xstar=res.x,
                               xvisited=eval_x_array,
                               title="Differential Evolution");

In [ ]:
fig, ax = plt.subplots(nrows=1, ncols=3, squeeze=True, figsize=(15, 5))

ax = ax.ravel()

plot_err_wt_iteration_number(it_error_array, ax=ax[0], x_log=True, y_log=True)
plot_err_wt_execution_time(it_error_array, it_time_array, ax=ax[1], x_log=True, y_log=True)
plot_err_wt_num_feval(it_error_array, it_num_eval_array, ax=ax[2], x_log=True, y_log=True)

plt.tight_layout(); # Fix plot margins errors

In [ ]:
plot_err_wt_num_feval(eval_error_array, x_log=True, y_log=True);

Benchmark


In [ ]:
%%time

eval_error_array_list = []

NUM_RUNS = 100

for run_index in range(NUM_RUNS):
    bounds = func.bounds.T.tolist()

    func.do_eval_logs = True
    func.reset_eval_counters()
    func.reset_eval_logs()

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        res = optimize.differential_evolution(func,
                                              bounds,              # The initial point
                                              maxiter=100,         # The number of DE iterations
                                              polish=False,
                                              disp=False)          # Print status messages

    func.do_eval_logs = False

    eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)

    print("x* =", res.x)
    print("f(x*) =", res.fun)
    #print("Cause of the termination:", ";".join(res.message))
    #print("Number of evaluations of the objective functions:", res.nfev)
    #print("Number of evaluations of the jacobian:", res.njev)
    #print("Number of iterations performed by the optimizer:", res.nit)
    
    eval_error_array_list.append(eval_error_array);

In [ ]:
plot_err_wt_num_feval(array_list_to_array(eval_error_array_list), x_log=True, y_log=True, plot_option="mean")

The "simulated annealing" algorithm

This algorithm has been replaced by the "basin-hopping" algorithm since Scipy 0.15.

See the official documentation for more details: https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.optimize.anneal.html.