In [ ]:
%load_ext autoreload
%autoreload 2

In [ ]:
%matplotlib inline
%config InlineBackend.figure_format = 'svg'
import matplotlib.pyplot as plt
import seaborn as sns # prettify matplotlib

import numpy as np
import sklearn.gaussian_process as gp

In [ ]:
# local modules
import turbo as tb
import turbo.modules as tm
import turbo.plotting as tp
import turbo.gui as tg

In [ ]:
# make deterministic
np.random.seed(100)

Function to optimize:


In [ ]:
f = lambda x: 1 * x * np.cos(x)
xmin, xmax = 0, 12
xs = np.linspace(xmin, xmax, num=200)

ys = f(xs)
best_y = np.min(ys)
best_x = xs[np.argmin(ys)]

In [ ]:
plt.figure(figsize=(12, 4))
plt.plot(xs, ys, 'g-', label='objective')
plt.plot(best_x, best_y, 'bo', label='optima')
plt.legend(loc='upper left')
plt.margins(0.01, 0.1)
plt.xlabel(r'$x$')
plt.ylabel(r'$f(x)$')
plt.show()

In [ ]:
bounds = [
    ('x', xmin, xmax)
]

op = tb.Optimiser(f, 'min', bounds)
op.latent_space = tm.NoLatentSpace()
op.plan = tm.Plan(pre_phase_trials=3)
op.pre_phase_select = tm.random_selector()
op.maximise_acq = tm.random_quasi_newton(num_random=100, grad_restarts=5)
op.async_eval = None
op.surrogate_factory = tm.SciKitGPSurrogate.Factory(gp_params=dict(
    alpha = 1e-3, # larger => more noise. Default = 1e-10
    kernel = 1.0 * gp.kernels.RBF(),
    n_restarts_optimizer = 10,
    normalize_y = True,
    copy_X_train = True # make a copy of the training data
))
op.acq_func_factory = tm.UCB.Factory(beta=3)

rec = tp.PlottingRecorder()
op.register_listener(rec)

In [ ]:
tg.OptimiserProgressBar(op)
op.run(max_trials=20)

In [ ]:
tp.plot_error_over_time(op, true_best=best_y, log_scale=False, fig_ax=plt.subplots(figsize=(8, 3)))

In [ ]:
tp.plot_surrogate_likelihood_over_time(rec, fig_ax=plt.subplots(figsize=(8, 3)))

In [ ]:
tp.plot_surrogate_hyper_params_1D(rec, param_index=1, axes=('trial_num', 'param', 'likelihood'))

In [ ]:
tp.plot_surrogate_hyper_params_1D(rec, param_index=1, axes=('param', 'likelihood', 'trial_num'))

In [ ]:
tp.plot_surrogate_hyper_params_1D(rec, param_index=1, axes=('trial_num', 'likelihood', 'param'))

In [ ]:
tp.plot_surrogate_hyper_params_2D(rec)

In [ ]:
tp.plot_trial_1D(rec, param='x', trial_num=None, true_objective=f)

Try optimising the same function with random search


In [ ]:
ra = op.RandomSearchOptimiser(ranges, maximise_cost=False)
ra.run_sequential(evaluator, max_jobs=15)

In [ ]:
ra.plot_cost_over_time(true_best=best_y).set_size_inches(16, 6)

Plot the samples to compare with the Bayesian samples


In [ ]:
r_xs = [s.config.x for s in ra.samples]
r_ys = [s.cost for s in ra.samples]

plt.figure(figsize=(16,6))
plt.plot(xs, ys, 'g-')
plt.plot(r_xs, r_ys, 'bo', zorder=10)
plt.margins(0.1, 0.1)
plt.xlabel('x')
plt.ylabel('cost')
plt.show()

In [ ]: