In [ ]:
# Init matplotlib
%matplotlib inline
import matplotlib
matplotlib.rcParams['figure.figsize'] = (8, 8)
In [ ]:
# Setup PyAI
import sys
sys.path.insert(0, '/Users/jdecock/git/pub/jdhp/pyai')
In [ ]:
import numpy as np
import time
import warnings
from scipy import optimize
In [ ]:
# Plot functions
from pyai.optimize.utils import array_list_to_array
from pyai.optimize.utils import plot_err_wt_num_feval
In [ ]:
## Objective function: Rosenbrock function (Scipy's implementation)
#func = scipy.optimize.rosen
In [ ]:
# Set the objective function
#from pyai.optimize.functions import sphere as func
from pyai.optimize.functions import Sphere
#from pyai.optimize.functions import additive_gaussian_noise as noise
from pyai.optimize.functions import multiplicative_gaussian_noise as noise
#from pyai.optimize.functions import additive_poisson_noise as noise
func = Sphere(ndim=4)
func.noise = noise # Comment this line to use a deterministic objective function
In [ ]:
print(func)
print(func.bounds)
print(func.ndim)
print(func.arg_min)
print(func(func.arg_min))
In [ ]:
%%time
bh_eval_error_array_list = []
NUM_RUNS = 100
for run_index in range(NUM_RUNS):
x_init = np.random.random(func.ndim) # draw samples in [0.0, 1.0)
min_bounds = func.bounds[0]
max_bounds = func.bounds[1]
x_init *= (max_bounds - min_bounds)
x_init += min_bounds
func.do_eval_logs = True
func.reset_eval_counters()
func.reset_eval_logs()
init_time = time.time()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
res = optimize.basinhopping(func,
x_init, # The initial point
niter=100, # The number of basin hopping iterations
disp=False) # Print status messages
func.do_eval_logs = False
eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)
print("x* =", res.x)
print("f(x*) =", res.fun)
#print("Cause of the termination:", ";".join(res.message))
#print("Number of evaluations of the objective functions:", res.nfev)
#print("Number of evaluations of the jacobian:", res.njev)
#print("Number of iterations performed by the optimizer:", res.nit)
bh_eval_error_array_list.append(eval_error_array);
In [ ]:
%%time
de_eval_error_array_list = []
NUM_RUNS = 100
for run_index in range(NUM_RUNS):
bounds = func.bounds.T.tolist()
func.do_eval_logs = True
func.reset_eval_counters()
func.reset_eval_logs()
init_time = time.time()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
res = optimize.differential_evolution(func,
bounds, # The initial point
maxiter=100, # The number of DE iterations
polish=False,
disp=False) # Print status messages
func.do_eval_logs = False
eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)
print("x* =", res.x)
print("f(x*) =", res.fun)
#print("Cause of the termination:", ";".join(res.message))
#print("Number of evaluations of the objective functions:", res.nfev)
#print("Number of evaluations of the jacobian:", res.njev)
#print("Number of iterations performed by the optimizer:", res.nit)
de_eval_error_array_list.append(eval_error_array);
In [ ]:
from pyai.optimize import SAES
In [ ]:
%%time
saes_eval_error_array_list = []
NUM_RUNS = 100
for run_index in range(NUM_RUNS):
saes = SAES()
func.do_eval_logs = True
func.reset_eval_counters()
func.reset_eval_logs()
res = saes.minimize(func, init_pop_mu=0., init_pop_sigma=6., mu=3, lmb=6)
func.do_eval_logs = False
eval_error_array = np.array(func.eval_logs_dict['fx']) - func(func.arg_min)
print("x* =", res)
saes_eval_error_array_list.append(eval_error_array);
In [ ]:
fig, ax = plt.subplots(figsize=(12, 8))
#plot_err_wt_num_feval(array_list_to_array(bh_eval_error_array_list), x_log=True, y_log=True, ax=ax, label="Basin Hopping", plot_option="mean")
plot_err_wt_num_feval(array_list_to_array(de_eval_error_array_list), x_log=True, y_log=True, ax=ax, label="Differential Evolution", plot_option="mean")
plot_err_wt_num_feval(array_list_to_array(saes_eval_error_array_list), x_log=True, y_log=True, ax=ax, label="SAES", plot_option="mean")
ax.legend(fontsize='x-large')
plt.show()