In [3]:
    
# http://pymc-devs.github.io/pymc3/getting_started/#a-motivating-example-linear-regression
import numpy as np
import pylab as pyl
%matplotlib inline
    
In [2]:
    
# initialize random number generator
np.random.seed(123)
    
In [10]:
    
# true parameter values
alpha, sigma = 1, 1
beta = [1, 2.5]
# size of dataset
size = 100
# predictor variable 
X1 = np.linspace(0, 1, size)
X2 = np.linspace(0, 0.2, size)
# simulate outcome variable
Y = alpha + beta[0]*X1 + beta[1]*X2 + np.random.randn(size)*sigma
    
In [13]:
    
fig, axes = pyl.subplots(1, 2, sharex=True, figsize=(10,4))
axes[0].scatter(X1, Y)
axes[1].scatter(X2, Y)
axes[0].set_ylabel('Y')
axes[0].set_xlabel('X1')
axes[1].set_xlabel('X2')
pyl.show()
    
    
In [32]:
    
from pymc3 import Model, Normal, HalfNormal 
from pymc3 import find_MAP, NUTS, sample, traceplot, summary
from scipy import optimize
    
In [16]:
    
basic_model = Model()
with basic_model:
    
    # priors for unknown parameters
    alpha = Normal('alpha', mu=0, sd=10)
    beta = Normal('beta', mu=0, sd=10, shape=2)
    sigma = HalfNormal('sigma', sd=1)
    
    # expected value of outcome
    mu = alpha + beta[0]*X1 + beta[1]*X2
    
    # likelihood (sampling distribution) of observations
    Y_obs = Normal('Y_obs', mu=mu, sd=sigma, observed=Y)
    
In [20]:
    
map_estimate = find_MAP(model=basic_model) #BFGS
print(map_estimate)
    
    
In [22]:
    
map_estimate = find_MAP(model=basic_model, fmin=optimize.fmin_powell)
print(map_estimate)
    
    
In [24]:
    
with basic_model:
    
    # obtain starting values via MAP
    start = find_MAP(fmin=optimize.fmin_powell)
    
    # instantiate sampler
    step = NUTS(scaling=start)
    
    # draw 500 posterior samples
    trace = sample(500, step, start=start)
    
    
    
In [29]:
    
trace['alpha'][-5:]
    
    Out[29]:
In [31]:
    
traceplot(trace)
pyl.show()
    
    
In [33]:
    
summary(trace)
    
    
In [ ]:
    
n = 400