In [1]:
import numpy as np
from scipy.stats import multivariate_normal as mvn

import kombine

Import some cool visualization stuff.


In [2]:
from matplotlib import pyplot as plt
import corner
import prism

%matplotlib inline
%config InlineBackend.figure_format = 'retina'

prism.inline_ipynb()

2-D Gaussian Target Distribution


In [3]:
ndim = 2

Construct a pickleable, callable object to hold the target distribution.


In [4]:
class Target(object):
    def __init__(self, cov):
        self.cov = cov
        self.ndim = self.cov.shape[0]
        
    def logpdf(self, x):
        return mvn.logpdf(x, mean=np.zeros(self.ndim), cov=self.cov)
        
    def __call__(self, x):
        return self.logpdf(x)

Generate a random covariance matrix and construct the target.


In [5]:
A = np.random.rand(ndim, ndim)
cov = A*A.T + ndim*np.eye(ndim);

lnpdf = Target(cov)

Create a uniformly distributed ensemble and burn it in.


In [6]:
nwalkers = 500                    
sampler = kombine.Sampler(nwalkers, ndim, lnpdf)

p0 = np.random.uniform(-10, 10, size=(nwalkers, ndim))
p, post, q = sampler.burnin(p0)

See what burnin did.


In [7]:
prism.corner(sampler.chain)


Out[7]:

Generate some more samples.


In [8]:
p, post, q = sampler.run_mcmc(100)

In [9]:
fig, [ax1, ax2, ax3] = plt.subplots(1, 3, figsize=(15, 5))

ax1.plot(sampler.acceptance_fraction, 'k', alpha=.5, label="Mean Acceptance Rate");

for p, ax in zip(range(sampler.dim), [ax2, ax3]):
    ax.plot(sampler.chain[..., p], alpha=0.1)

ax1.legend(loc='lower right');


Plot independent samples.


In [11]:
acls = np.ceil(2/np.mean(sampler.acceptance[-100:], axis=0) - 1).astype(int)

ind_samps = np.concatenate([sampler.chain[-100::acl, c].reshape(-1, 2) for c, acl in enumerate(acls)])
print("{} independent samples collected with a mean ACL of {}.".format(len(ind_samps), np.mean(acls)))

corner.corner(ind_samps);


25000 independent samples collected with a mean ACL of 2.0.

In [ ]: