In [1]:
import numpy as np
from scipy.optimize import minimize
In [2]:
import modeling
modeling.__version__
Out[2]:
In [3]:
np.random.seed(42)
x = np.sort(np.random.uniform(-5, 5, 10))
y = 0.5 * x - 0.01 + 0.1 * np.random.randn(len(x))
In [4]:
class LineLogLike(modeling.Model):
parameter_names = ("m", "b", "log_sigma")
def get_value(self, x, y):
mu = self.m * x + self.b
r2 = np.sum((y - mu)**2) * np.exp(-2*self.log_sigma)
norm = 2.0 * self.log_sigma
return -0.5 * (r2 + norm)
def compute_gradient(self, x, y):
sigma = np.exp(self.log_sigma)
r = (y - (self.m * x + self.b))
dldm = np.sum(r * x) / sigma**2
dldb = np.sum(r) / sigma**2
dlds = np.sum(r**2) / sigma**2 - 1
return np.array([dldm, dldb, dlds])
def log_prior(self):
lp = super(LineLogLike, self).log_prior()
if not np.isfinite(lp):
return -np.inf
return lp - 1.5 * np.log(1.0 + self.m**2)
model = LineLogLike(
m=0.1, b=0.5, log_sigma=-1.0,
bounds=dict(
m=(-5, 5), b=(-5, 5), log_sigma=(-10, 1),
),
)
modeling.check_gradient(model, x, y)
def nll_and_jac(params, model, x, y):
model.set_parameter_vector(params)
return -model.get_value(x, y), -model.get_gradient(x, y)
In [5]:
initial_params = model.get_parameter_vector()
bounds = model.get_parameter_bounds()
soln = minimize(nll_and_jac, initial_params, bounds=bounds,
jac=True, method="L-BFGS-B",
args=(model, x, y))
model.set_parameter_vector(soln.x)
soln
Out[5]:
In [6]:
%timeit nll_and_jac(initial_params, model, x, y)
In [ ]: