In [1]:
import autograd.numpy as np
from scipy.optimize import minimize
In [2]:
import modeling
modeling.__version__
Out[2]:
In [3]:
np.random.seed(42)
x = np.sort(np.random.uniform(-5, 5, 10))
y = 0.5 * x - 0.01 + 0.1 * np.random.randn(len(x))
In [4]:
class LineLogLike(modeling.AutoGradModel):
parameter_names = ("m", "b", "log_sigma")
def compute_value(self, params, x, y):
m, b, log_sigma = params
mu = m * x + b
r2 = np.sum((y - mu)**2) * np.exp(-2*log_sigma)
norm = 2.0 * log_sigma
return -0.5 * (r2 + norm)
def log_prior(self):
lp = super(LineLogLike, self).log_prior()
if not np.isfinite(lp):
return -np.inf
return lp - 1.5 * np.log(1.0 + self.m**2)
model = LineLogLike(
m=0.1, b=0.5, log_sigma=-1.0,
bounds=dict(
m=(-5, 5), b=(-5, 5), log_sigma=(-10, 1),
),
)
modeling.check_gradient(model, x, y)
def nll_and_jac(params, model, x, y):
model.set_parameter_vector(params)
return -model.get_value(x, y), -model.get_gradient(x, y)
In [5]:
initial_params = model.get_parameter_vector()
bounds = model.get_parameter_bounds()
soln = minimize(nll_and_jac, initial_params, bounds=bounds,
jac=True, method="L-BFGS-B",
args=(model, x, y))
model.set_parameter_vector(soln.x)
soln
Out[5]:
In [6]:
%timeit nll_and_jac(initial_params, model, x, y)
In [ ]: