Variational GP Regression with Pyro


In [1]:
import math
import torch
import pyro
import gpytorch
from matplotlib import pyplot as plt

%matplotlib inline
%load_ext autoreload
%autoreload 2

In [2]:
train_x = torch.linspace(0., 1., 21)
train_y = torch.pow(train_x, 2).mul_(3.7)
train_y = train_y.div_(train_y.max())
train_y += torch.randn_like(train_y).mul_(0.02)

fig, ax = plt.subplots(1, 1, figsize=(3, 2))
ax.plot(train_x.numpy(), train_y.numpy(), 'bo')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.legend(['Training data'])


Out[2]:
<matplotlib.legend.Legend at 0x7f61e1f8bc18>

In [3]:
class PVGPRegressionModel(gpytorch.models.PyroVariationalGP):
    def __init__(self, train_x, train_y, likelihood, name_prefix="mixture_gp"):
        # Define all the variational stuff
        variational_distribution = gpytorch.variational.CholeskyVariationalDistribution(
            num_inducing_points=train_y.numel(),
        )
        variational_strategy = gpytorch.variational.VariationalStrategy(
            self, train_x, variational_distribution
        )
        
        # Standard initializtation
        super(PVGPRegressionModel, self).__init__(variational_strategy, likelihood, num_data=train_y.numel())
        self.likelihood = likelihood
        
        # Mean, covar
        self.mean_module = gpytorch.means.ConstantMean()
        self.covar_module = gpytorch.kernels.ScaleKernel(
            gpytorch.kernels.MaternKernel(nu=1.5)
        )

    def forward(self, x):
        mean = self.mean_module(x)  # Returns an n_data vec
        covar = self.covar_module(x)
        return gpytorch.distributions.MultivariateNormal(mean, covar)

In [4]:
model = PVGPRegressionModel(train_x, train_y, gpytorch.likelihoods.GaussianLikelihood())

In [5]:
from pyro import optim
optimizer = optim.Adam({"lr": 0.01})

def train(num_iter=400):
    elbo = pyro.infer.Trace_ELBO(num_particles=256, vectorize_particles=True)
    svi = pyro.infer.SVI(model.model, model.guide, optimizer, elbo)
    model.train()

    for i in range(num_iter):
        model.zero_grad()
        loss = svi.step(train_x, train_y)
        if not (i + 1) % 50:
            print('Iter %d/%d - Loss: %.3f   lengthscale: %.3f   noise: %.3f' % (
                i + 1, num_iter, loss,
                model.covar_module.base_kernel.lengthscale.item(),
                model.likelihood.noise.item()
            ))
        
%time train()


Iter 50/400 - Loss: 24.104   lengthscale: 0.643   noise: 0.474
Iter 100/400 - Loss: 19.331   lengthscale: 0.669   noise: 0.310
Iter 150/400 - Loss: 14.304   lengthscale: 0.709   noise: 0.193
Iter 200/400 - Loss: 6.317   lengthscale: 0.769   noise: 0.111
Iter 250/400 - Loss: -0.906   lengthscale: 0.815   noise: 0.056
Iter 300/400 - Loss: -9.677   lengthscale: 0.826   noise: 0.026
Iter 350/400 - Loss: -15.517   lengthscale: 0.855   noise: 0.013
Iter 400/400 - Loss: -19.656   lengthscale: 0.986   noise: 0.007
CPU times: user 21.1 s, sys: 28.5 s, total: 49.6 s
Wall time: 7.11 s

In [6]:
fig, ax = plt.subplots(1, 1, figsize=(4, 3))
train_data, = ax.plot(train_x.cpu().numpy(), train_y.cpu().numpy(), 'bo')

model.eval()
with torch.no_grad():
    output = model.likelihood(model(train_x))
    
mean = output.mean
lower, upper = output.confidence_region()
line, = ax.plot(train_x.cpu().numpy(), mean.detach().cpu().numpy())
ax.fill_between(train_x.cpu().numpy(), lower.detach().cpu().numpy(),
                upper.detach().cpu().numpy(), color=line.get_color(), alpha=0.5)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.legend([train_data, line], ['Train data', 'Prediction'])


Out[6]:
<matplotlib.legend.Legend at 0x7f61de116320>

In [ ]: