In [10]:
# Load Biospytial modules and etc.
%matplotlib inline
import sys
sys.path.append('/apps/external_plugins/spystats/')
#import django
#django.setup()
import pandas as pd
import matplotlib.pyplot as plt
## Use the ggplot style
plt.style.use('ggplot')
import numpy as np
In [11]:
## Model Specification
import pymc3 as pm
from spystats import tools
In [12]:
sigma=3.5
range_a=10.13
kappa=3.0/2.0
#ls = 0.2
#tau = 2.0
cov = sigma * pm.gp.cov.Matern32(2, range_a,active_dims=[0,1])
In [13]:
n = 10
grid = tools.createGrid(grid_sizex=n,grid_sizey=n,minx=0,miny=0,maxx=50,maxy=50)
In [14]:
K = cov(grid[['Lon','Lat']].values).eval()
sample = pm.MvNormal.dist(mu=np.zeros(K.shape[0]), cov=K).random(size=1)
grid['Z'] = sample
In [15]:
plt.figure(figsize=(14,4))
plt.imshow(grid.Z.values.reshape(n,n),interpolation=None)
Out[15]:
In [16]:
print("sigma: %s, phi: %s"%(sigma,range_a))
In [18]:
## Analysis, GP only one parameter to fit
# The variational method is much beter.
with pm.Model() as model:
#sigma = 1.0
sigma = pm.Uniform('sigma',0,4)
phi = pm.Normal('phi',mu=8,sd=3)
# phi = pm.Uniform('phi',5,10)
cov = sigma * pm.gp.cov.Matern32(2,phi,active_dims=[0,1])
K = cov(grid[['Lon','Lat']].values)
y_obs = pm.MvNormal('y_obs',mu=np.zeros(n*n),cov=K,observed=grid.Z)
#gp = pm.gp.Latent(cov_func=cov,observed=sample)
# Use elliptical slice sampling
#ess_step = pm.EllipticalSlice(vars=[f_sample], prior_cov=K)
#ess_Step = pm.HamiltonianMC()
#%time trace = pm.sample(5000)
## Variational
%time results = pm.fit()
In [19]:
from pymc3 import find_MAP
map_estimate = find_MAP(model=model)
In [20]:
np.random.seed(1234)
sigma=3.5
range_a=10.13
kappa=3.0/2.0
#ls = 0.2
alpha = 0.0
cov = sigma * pm.gp.cov.Matern32(2, range_a,active_dims=[0,1])
n = 20
grid = tools.createGrid(grid_sizex=n,grid_sizey=n,minx=0,miny=0,maxx=20,maxy=20)
K = cov(grid[['Lon','Lat']].values).eval()
pfield = pm.MvNormal.dist(mu=np.zeros(K.shape[0]), cov=K).random(size=1)
poiss_data = np.exp(alpha + pfield)
grid['Z'] = poiss_data
#grid['Z'] = pfield
plt.figure(figsize=(14,4))
plt.imshow(grid.Z.values.reshape(n,n),interpolation=None)
plt.colorbar()
print("sigma: %s, phi: %s"%(sigma,range_a))
In [21]:
## Analysis, GP only one parameter to fit
# The variational method is much beter.
from pymc3.variational.callbacks import CheckParametersConvergence
with pm.Model() as model:
sigma=3.5
range_a=10.13
#sigma = pm.Uniform('sigma',0,4)
#phi = pm.HalfNormal('phi',mu=8,sd=3)
#phi = pm.Uniform('phi',6,12)
phi = pm.Uniform('phi',5,15)
cov = sigma * pm.gp.cov.Matern32(2,phi,active_dims=[0,1])
#K = cov(grid[['Lon','Lat']].values)
#phiprint = tt.printing.Print('phi')(phi)
## The latent function
gp = pm.gp.Latent(cov_func=cov)
## I don't know why this
f = gp.prior("latent_field", X=grid[['Lon','Lat']].values,reparameterize=True)
#f_print = tt.printing.Print('latent_field')(f)
y_obs = pm.Poisson('y_obs',mu=f,observed=grid.Z)
#y_obs = pm.MvNormal('y_obs',mu=np.zeros(n*n),cov=K,observed=grid.Z)
#gp = pm.gp.Latent(cov_func=cov,observed=sample)
# Use elliptical slice sampling
#ess_step = pm.EllipticalSlice(vars=[f_sample], prior_cov=K)
#step = pm.HamiltonianMC()
#step = pm.Metropolis()
#%time trace = pm.sample(5000,step)#,tune=0,chains=1)
## Variational
%time mean_field = pm.fit(method='advi', callbacks=[CheckParametersConvergence()])
ESsta dando un monton de inf en averafe lost
In [22]:
# pm.traceplot(trace)
In [23]:
#for RV in model.basic_RVs:
# print(RV.name, RV.logp(model.test_point))
In [24]:
from pymc3 import find_MAP
map_estimate = find_MAP(model=model)
map_estimate
Out[24]:
In [87]:
plt.imshow(map_estimate['latent_field'].reshape(20,20))
Out[87]:
In [26]:
pm.plot_posterior(mean_field.sample(10), color='LightSeaGreen');