In [1]:
import autograd.numpy as np

from scipy.optimize import minimize

In [2]:
import modeling
modeling.__version__


Out[2]:
'0.0.1'

In [3]:
np.random.seed(42)
x = np.sort(np.random.uniform(-5, 5, 10))
y = 0.5 * x - 0.01 + 0.1 * np.random.randn(len(x))

In [4]:
class LineLogLike(modeling.AutoGradModel):
    parameter_names = ("m", "b", "log_sigma")
        
    def compute_value(self, params, x, y):
        m, b, log_sigma = params
        mu = m * x + b
        r2 = np.sum((y - mu)**2) * np.exp(-2*log_sigma)
        norm = 2.0 * log_sigma
        return -0.5 * (r2 + norm)
    
    def log_prior(self):
        lp = super(LineLogLike, self).log_prior()
        if not np.isfinite(lp):
            return -np.inf
        return lp - 1.5 * np.log(1.0 + self.m**2)
    
model = LineLogLike(
    m=0.1, b=0.5, log_sigma=-1.0,
    bounds=dict(
        m=(-5, 5), b=(-5, 5), log_sigma=(-10, 1),
    ),
)
modeling.check_gradient(model, x, y)

def nll_and_jac(params, model, x, y):
    model.set_parameter_vector(params)
    return -model.get_value(x, y), -model.get_gradient(x, y)

In [5]:
initial_params = model.get_parameter_vector()
bounds = model.get_parameter_bounds()
soln = minimize(nll_and_jac, initial_params, bounds=bounds,
                jac=True, method="L-BFGS-B",
                args=(model, x, y))
model.set_parameter_vector(soln.x)
soln


Out[5]:
      fun: -0.9306652401969896
 hess_inv: <3x3 LbfgsInvHessProduct with dtype=float64>
      jac: array([  2.78785762e-07,   4.07801687e-07,   1.20898535e-07])
  message: b'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL'
     nfev: 21
      nit: 16
   status: 0
  success: True
        x: array([ 0.49400289, -0.06392406, -1.43066518])

In [6]:
%timeit nll_and_jac(initial_params, model, x, y)


706 µs ± 30.2 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)

In [ ]: