In [ ]:
from __future__ import division
import numpy as np
import numpy.random as npr
import matplotlib.pyplot as plt
from pybasicbayes.distributions import Regression
from pybasicbayes.util.text import progprint_xrange
from autoregressive.distributions import AutoRegression
import pyximport
pyximport.install()
import os
os.chdir('../pylds')
from models import LDS, DefaultLDS
from distributions import Regression_diag
npr.seed(0)
def update(model):
model.EM_step()
return model.log_likelihood()
def init_LDS_model(pars,data):
model = LDS(dynamics_distn=AutoRegression(
nu_0=n+1, S_0=n*np.eye(n), M_0=np.zeros((n, n)), K_0=n*np.eye(n),
A=pars['A'], sigma=pars['Q']),
emission_distn=Regression_diag(
nu_0=p+1, S_0=p*np.eye(p), M_0=np.zeros((p, n)), K_0=p*np.eye(n),
A=pars['C'], sigma=np.diag(pars['R'])))
model.A = pars['A']
model.sigma_states = pars['Q']
model.add_data(data)
return model
def collect_LDS_stats(model):
stats = {'mu_h' : model.states_list[0].smoothed_mus.copy(),
'V_h' : model.states_list[0].smoothed_sigmas.copy(),
'extxtm1': model.states_list[0].E_dynamics_stats[1].copy()}
pars = {'A' : model.A,
'B' : 0,
'Q' : model.sigma_states,
'mu0': model.mu_init,
'V0': model.sigma_init,
'C' : model.C,
'd' : np.zeros(model.C.shape[0]),
'R' : model.sigma_obs}
return stats, pars
#########################
# set some parameters #
#########################
n = 2
p = 3
T = 1000
pars_true = {'A': 0.99*np.array([[np.cos(np.pi/24), -np.sin(np.pi/24)],
[np.sin(np.pi/24), np.cos(np.pi/24)]]),
'Q': 0.01*np.eye(n),
'B': 0,
'mu0': np.array([0.,1.]),
'V0': 0.01*np.eye(n),
'C': np.random.normal(size=(p,n)),
'd' : np.zeros(p),
'R': 0.01*np.ones(p).reshape(p,)}
print(pars_true['B'])
pars_init = {'A' : 0.99*np.eye(n),
'B' : 0,
'Q' : np.eye(n),
'mu0': np.array([0.,1.]),
'V0': 0.01*np.eye(n),
'C' : np.random.randn(p,n),
'd' : np.zeros(p),
'R' : 0.1*np.ones(p)}
###################
# generate data #
###################
truemodel = LDS(
dynamics_distn=AutoRegression(A=pars_true['A'],sigma=pars_true['Q']),
emission_distn=Regression_diag(A=pars_true['C'],sigma=pars_true['R']))
data, stateseq = truemodel.generate(T)
# state after learning with E_step(), i.e. full-matrix version
# get E-step results for init pars
model = init_LDS_model(pars_init, data) # set to initialisation
model.states_list[0].E_step()
stats_init,_ = collect_LDS_stats(model)
# get EM-step results after one iteration
print 'fitting'
likes = [update(model) for _ in progprint_xrange(1)]
stats_first,pars_first = collect_LDS_stats(model)
# get EM-step results after 50 iterations
model = init_LDS_model(pars_init, data) # reset to initialisation
print 'fitting'
likes = [update(model) for _ in progprint_xrange(50)]
stats_hat,pars_hat = collect_LDS_stats(model)
model = init_LDS_model(pars_true, data) # reset to true pars
model.states_list[0].E_step()
stats_true,_ = collect_LDS_stats(model)
save_file = '../../../results/pylds_debug/' + 'pylds_1x2_full'
from scipy.io import savemat # store results for comparison with Matlab code
save_file_m = {'x': model.states_list[0].stateseq,
'y': model.states_list[0].data,
'u' : [],
'll' : likes,
'T' : model.states_list[0].T,
'Trial': len(model.states_list),
'elapsedTime' : 0,
'ifUseB':False,
'ifUseA':True,
'epsilon':0,
'ifRDiagonal':False,
'covConvEps':0,
'truePars':pars_true,
'initPars':pars_init,
'firstPars':pars_first,
'estPars': pars_hat,
'stats_0': stats_init,
'stats_1': stats_first,
'stats_h': stats_hat,
'stats_true': stats_true,
#'Pi':Pi,'Pi_h':Pi_h,'Pi_t':Pi_t,'Pi_t_h': Pi_t_h,
'obsScheme' : []}
savemat(save_file,save_file_m) # does the actual saving
In [ ]:
model.states_list[0].smoothed_mus.shape