The following are a series of examples covering each available covariance function, and demonstrating allowed operations. The API will be familiar to users of GPy or GPflow, though ours is simplified. All covariance function parameters can be assigned prior distributions or hardcoded. MCMC methods or optimization methods can be used for inference.
In [1]:
import matplotlib.pyplot as plt
import matplotlib.cm as cmap
%matplotlib inline
import numpy as np
np.random.seed(206)
import theano
import theano.tensor as tt
import pymc3 as pm
In [2]:
X = np.linspace(0,2,200)[:,None]
# function to display covariance matrices
def plot_cov(X, K, stationary=True):
K = K + 1e-8*np.eye(X.shape[0])
x = X.flatten()
fig = plt.figure(figsize=(14,5))
ax1 = fig.add_subplot(121)
m = ax1.imshow(K, cmap="inferno",
interpolation='none',
extent=(np.min(X), np.max(X), np.max(X), np.min(X)));
plt.colorbar(m);
ax1.set_title("Covariance Matrix")
ax1.set_xlabel("X")
ax1.set_ylabel("X")
ax2 = fig.add_subplot(122)
if not stationary:
ax2.plot(x, np.diag(K), "k", lw=2, alpha=0.8)
ax2.set_title("The Diagonal of K")
ax2.set_ylabel("k(x,x)")
else:
ax2.plot(x, K[:,0], "k", lw=2, alpha=0.8)
ax2.set_title("K as a function of x - x'")
ax2.set_ylabel("k(x,x')")
ax2.set_xlabel("X")
fig = plt.figure(figsize=(14,4))
ax = fig.add_subplot(111)
samples = np.random.multivariate_normal(np.zeros(200), K, 5).T;
for i in range(samples.shape[1]):
ax.plot(x, samples[:,i], color=cmap.inferno(i*0.2), lw=2);
ax.set_title("Samples from GP Prior")
ax.set_xlabel("X")
In [3]:
with pm.Model() as model:
l = 0.2
tau = 2.0
b = 0.5
cov = b + tau * pm.gp.cov.ExpQuad(1, l)
K = theano.function([], cov(X))()
plot_cov(X, K)
In [5]:
x1, x2 = np.meshgrid(np.linspace(0,1,10), np.arange(1,4))
X2 = np.concatenate((x1.reshape((30,1)), x2.reshape((30,1))), axis=1)
with pm.Model() as model:
l = np.array([0.2, 1.0])
cov = pm.gp.cov.ExpQuad(input_dim=2, lengthscales=l)
K = theano.function([], cov(X2))()
m = plt.imshow(K, cmap="inferno", interpolation='none'); plt.colorbar(m);
In [6]:
with pm.Model() as model:
l = 0.2
cov = pm.gp.cov.ExpQuad(input_dim=2, lengthscales=l,
active_dims=[True, False])
K = theano.function([], cov(X2))()
m = plt.imshow(K, cmap="inferno", interpolation='none'); plt.colorbar(m);
In [7]:
with pm.Model() as model:
l1 = 0.2
l2 = 1.0
cov1 = pm.gp.cov.ExpQuad(2, l1, active_dims=[True, False])
cov2 = pm.gp.cov.ExpQuad(2, l2, active_dims=[False, True])
cov = cov1 * cov2
K = theano.function([], cov(X2))()
m = plt.imshow(K, cmap="inferno", interpolation='none'); plt.colorbar(m);
In [8]:
with pm.Model() as model:
l = 0.2
sigma2 = 0.05
tau = 0.95
cov_latent = tau * pm.gp.cov.ExpQuad(1, l)
cov_noise = sigma2 * tt.eye(200)
K = theano.function([], cov_latent(X) + cov_noise)()
plot_cov(X, K)
In [9]:
with pm.Model() as model:
alpha = 0.1
l = 0.2
tau = 2.0
cov = tau * pm.gp.cov.RatQuad(1, l, alpha)
K = theano.function([], cov(X))()
plot_cov(X, K)
In [10]:
with pm.Model() as model:
l = 0.2
tau = 2.0
cov = tau * pm.gp.cov.Exponential(1, l)
K = theano.function([], cov(X))()
plot_cov(X, K)
In [11]:
with pm.Model() as model:
l = 0.2
tau = 2.0
cov = tau * pm.gp.cov.Matern52(1, l)
K = theano.function([], cov(X))()
plot_cov(X, K)
In [12]:
with pm.Model() as model:
l = 0.2
tau = 2.0
cov = tau * pm.gp.cov.Matern32(1, l)
K = theano.function([], cov(X))()
plot_cov(X, K)
In [13]:
with pm.Model() as model:
l = 0.2
tau = 2.0
cov = tau * pm.gp.cov.Cosine(1, l)
K = theano.function([], cov(X))()
plot_cov(X, K)