First, we create a simple normal distribution
In [1]:
import pints
import pints.toy
import numpy as np
import matplotlib.pyplot as plt
# Create log pdf
log_pdf = pints.toy.GaussianLogPDF([2, 4], [[1, 0], [0, 3]])
# Contour plot of pdf
levels = np.linspace(-3,12,20)
num_points = 100
x = np.linspace(-1, 5, num_points)
y = np.linspace(-0, 8, num_points)
X, Y = np.meshgrid(x, y)
Z = np.zeros(X.shape)
Z = np.exp([[log_pdf([i, j]) for i in x] for j in y])
plt.contour(X, Y, Z)
plt.xlabel('x')
plt.ylabel('y')
plt.show()
Now we set up and run a sampling routine using MALA MCMC
In [2]:
# Choose starting points for 3 mcmc chains
xs = [
[2, 1],
[3, 3],
[5, 4],
]
# Create mcmc routine
mcmc = pints.MCMCController(log_pdf, 3, xs, method=pints.MALAMCMC)
# Add stopping criterion
mcmc.set_max_iterations(2000)
# Set up modest logging
mcmc.set_log_to_screen(True)
mcmc.set_log_interval(100)
# # Update step sizes used by individual samplers (which is then scaled by sigma0)
for sampler in mcmc.samplers():
sampler.set_epsilon([1.5, 1.5])
# Run!
print('Running...')
full_chains = mcmc.run()
print('Done!')
In [3]:
# Show traces and histograms
import pints.plot
pints.plot.trace(full_chains)
plt.show()
In [4]:
# Discard warm up
chains = full_chains[:, 1000:]
# Check convergence using rhat criterion
print('R-hat:')
print(pints.rhat_all_params(chains))
# Check Kullback-Leibler divergence of chains
print(log_pdf.kl_divergence(chains[0]))
print(log_pdf.kl_divergence(chains[1]))
print(log_pdf.kl_divergence(chains[2]))
# Look at distribution in chain 0
pints.plot.pairwise(chains[0], kde=True)
plt.show()
In [5]:
import pints.toy as toy
# Create a wrapper around the logistic model, turning it into a 1d model
class Model(pints.ForwardModel):
def __init__(self):
self.model = toy.LogisticModel()
def simulate(self, x, times):
return self.model.simulate([x[0], 500], times)
def simulateS1(self, x, times):
values, gradient = self.model.simulateS1([x[0], 500], times)
gradient = gradient[:, 0]
return values, gradient
def n_parameters(self):
return 1
# Load a forward model
model = Model()
# Create some toy data
real_parameters = np.array([0.015])
times = np.linspace(0, 1000, 50)
org_values = model.simulate(real_parameters, times)
# Add noise
np.random.seed(1)
noise = 10
values = org_values + np.random.normal(0, noise, org_values.shape)
plt.figure()
plt.plot(times, values)
plt.plot(times, org_values)
plt.show()
In [6]:
# Create an object with links to the model and time series
problem = pints.SingleOutputProblem(model, times, values)
# Create a log-likelihood function
log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise)
# Create a uniform prior over the parameters
log_prior = pints.UniformLogPrior(
[0.01],
[0.02]
)
log_posterior = pints.LogPosterior(log_likelihood, log_prior)
# Choose starting points for mcmc chains
xs = [
real_parameters * 1.01,
real_parameters * 0.9,
real_parameters * 1.15,
]
# Create mcmc routine
mcmc = pints.MCMCController(log_likelihood, len(xs), xs, method=pints.MALAMCMC)
# Add stopping criterion
mcmc.set_max_iterations(2000)
# Set up modest logging
mcmc.set_log_to_screen(True)
mcmc.set_log_interval(100)
# Run!
print('Running...')
chains = mcmc.run()
print('Done!')
In [7]:
# Show trace and histogram
pints.plot.trace(chains)
plt.show()
In [8]:
# Show predicted time series for the first chain
pints.plot.series(chains[0, 200:], problem, real_parameters)
plt.show()
In [9]:
import pints
import pints.toy as toy
import pints.plot
import numpy as np
import matplotlib.pyplot as plt
# Load a forward model
model = toy.LogisticModel()
# Create some toy data
real_parameters = np.array([0.015, 500])
org_values = model.simulate(real_parameters, times)
# Add noise
np.random.seed(1)
noise = 10
values = org_values + np.random.normal(0, noise, org_values.shape)
# Create an object with links to the model and time series
problem = pints.SingleOutputProblem(model, times, values)
# Create a log-likelihood function
log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise)
# Create a uniform prior over the parameters
log_prior = pints.UniformLogPrior(
[0.01, 400],
[0.02, 600]
)
# Create a posterior log-likelihood (log(likelihood * prior))
log_posterior = pints.LogPosterior(log_likelihood, log_prior)
# Choose starting points for 3 mcmc chains
xs = [
real_parameters * 1.01,
real_parameters * 0.9,
real_parameters * 1.1,
]
# Create mcmc routine
mcmc = pints.MCMCController(log_posterior, len(xs), xs, method=pints.MALAMCMC)
# Add stopping criterion
mcmc.set_max_iterations(4000)
# Set up modest logging
mcmc.set_log_to_screen(True)
mcmc.set_log_interval(100)
# Run!
print('Running...')
chains = mcmc.run()
print('Done!')
In [10]:
# Show traces and histograms
pints.plot.trace(chains)
plt.show()
Chains have converged!
In [11]:
# Discard warm up
chains = chains[:, 1000:]
# Check convergence using rhat criterion
print('R-hat:')
print(pints.rhat_all_params(chains))