Timeseries modelling using Autoregressive Models.

Here I'll try to model a financial time series process using the AR model implemented in PyMC3

I'm following the tutorial: http://docs.pymc.io/notebooks/AR.html


In [1]:
## Import necessary stuff
%matplotlib inline
import sys
sys.path.append('/apps')
import django
django.setup()
import matplotlib.pyplot as plt
import pandas as pd
import itertools as it
import numpy as np
import pymc3 as pm


/opt/conda/envs/biospytial/lib/python2.7/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters

In [2]:
## Fix without file modification (known bug in pandas_datareader)
## From https://stackoverflow.com/questions/50394873/import-pandas-datareader-gives-importerror-cannot-import-name-is-list-like
from datetime import datetime
pd.core.common.is_list_like = pd.api.types.is_list_like
import pandas_datareader.data as web

In [3]:
## Extract data from the OECD API

In [4]:
## But lets look something interesting, like the OECD agricultural outlook 
df = web.DataReader('PPPGDP', 'oecd', end=datetime(2018, 8, 1))
## Select values from a country 
mex_pppgdp = df.xs('Mexico',level=1,axis=1)
## some values in another level, remember the values are hierarchical
ppp_Gdp_global = df.xs('Purchasing Power Parities',level=0,axis=1)

In [5]:
## Extract data from iex api

# Define the instruments to download. We would like to see Apple, Microsoft and the S&P500 index.
tickers = ['TSLA', 'IVV', 'CX']

# We would like all available data from 01/01/2000 until 12/31/2016.
start = datetime(2015, 2, 9)
end = datetime(2018,8, 15)

# User pandas_reader.data.DataReader to load the desired data. As simple as that.
panel_data = web.DataReader(tickers, 'iex', start, end)


5y

In [6]:
## Panel_data is a dictionary
sp5 = panel_data['IVV']
## We se
sp5.index = pd.DatetimeIndex(sp5.index)

In [7]:
fig = plt.figure(figsize=(12,5));
plt.plot(sp5.close)
plt.title('Observations from the S&P 500 ETf')


Out[7]:
<matplotlib.text.Text at 0x7f7c2b051c90>

In [15]:
## Select last 100 points
data = sp5[-100:].open

In [9]:
## Modelling as an AR model
import pymc3 as pm

In [17]:
y = data.values
tau = 1.0
with pm.Model() as ar1:
    beta = pm.Normal('beta', mu=0, sd=tau)
    cosa = pm.AR('y', beta, sd=1.0, observed=y)
    trace = pm.sample(1000, cores=3,tune=100)


Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (3 chains in 3 jobs)
NUTS: [beta]
100%|██████████| 1100/1100 [00:00<00:00, 1492.82it/s]
The acceptance probability does not match the target. It is 0.9194801549874453, but should be close to 0.8. Try to increase the number of tuning steps.
The acceptance probability does not match the target. It is 0.9304276333169894, but should be close to 0.8. Try to increase the number of tuning steps.
The acceptance probability does not match the target. It is 0.9511614285452524, but should be close to 0.8. Try to increase the number of tuning steps.

In [70]:
pm.traceplot(trace)


Out[70]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x7f7bd95bb850>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7bdbf468d0>]],
      dtype=object)

In [21]:
X = np.arange(len(data.values))
Y = data.values

In [22]:
Y = Y.reshape(Y.shape[0],1)
X = X.reshape(X.shape[0],1)
X = np.arange(len(X)).reshape(X.shape[0],1)

In [100]:
## Model as a Gaussian Process MCMC
import theano.tensor as tt
from pymc3.variational.callbacks import CheckParametersConvergence
with pm.Model() as model1:

    l = pm.Gamma("l",alpha=2,beta=1)
    n = pm.HalfCauchy("n",beta=5)
    
    cov = n ** 2 * pm.gp.cov.Matern52(1,l)
    gp = pm.gp.Latent(cov_func=cov)
    
    f = gp.prior("f",X=X)
    
    sigma = pm.HalfCauchy("sigma",beta=5)
    v = pm.Gamma("v",alpha=2,beta=0.1)
    
    #y_ = pm.StudentT("y",mu=f,lam=1.0/sigma,nu=v,observed=y)
    y_ = pm.Normal("y",mu=f,sd=sigma ,observed=Y)
    trace_m1 = pm.sample(100)
    #trace_m = pm.fit(method='advi', n=150000)


Only 100 samples in chain.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
KeyboardInterruptTraceback (most recent call last)
<ipython-input-100-35a5a0f7bc97> in <module>()
     17     #y_ = pm.StudentT("y",mu=f,lam=1.0/sigma,nu=v,observed=y)
     18     y_ = pm.Normal("y",mu=f,sd=sigma ,observed=Y)
---> 19     trace_m1 = pm.sample(100)
     20     #trace_m = pm.fit(method='advi', n=150000)

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/sampling.pyc in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, nuts_kwargs, step_kwargs, progressbar, model, random_seed, live_plot, discard_tuned_samples, live_plot_kwargs, compute_convergence_checks, use_mmap, **kwargs)
    393             start_, step = init_nuts(init=init, chains=chains, n_init=n_init,
    394                                      model=model, random_seed=random_seed,
--> 395                                      progressbar=progressbar, **args)
    396             if start is None:
    397                 start = start_

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/sampling.pyc in init_nuts(init, chains, n_init, model, random_seed, progressbar, **kwargs)
   1386             'Unknown initializer: {}.'.format(init))
   1387 
-> 1388     step = pm.NUTS(potential=potential, model=model, **kwargs)
   1389 
   1390     return start, step

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/step_methods/hmc/nuts.pyc in __init__(self, vars, max_treedepth, early_max_treedepth, **kwargs)
    150         `pm.sample` to the desired number of tuning steps.
    151         """
--> 152         super(NUTS, self).__init__(vars, **kwargs)
    153 
    154         self.max_treedepth = max_treedepth

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/step_methods/hmc/base_hmc.pyc in __init__(self, vars, scaling, step_scale, is_cov, model, blocked, potential, integrator, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **theano_kwargs)
     61 
     62         super(BaseHMC, self).__init__(vars, blocked=blocked, model=model,
---> 63                                       dtype=dtype, **theano_kwargs)
     64 
     65         self.adapt_step_size = adapt_step_size

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/step_methods/arraystep.pyc in __init__(self, vars, model, blocked, dtype, **theano_kwargs)
    213 
    214         self._logp_dlogp_func = model.logp_dlogp_function(
--> 215             vars, dtype=dtype, **theano_kwargs)
    216 
    217     def step(self, point):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/model.pyc in logp_dlogp_function(self, grad_vars, **kwargs)
    706         varnames = [var.name for var in grad_vars]
    707         extra_vars = [var for var in self.free_RVs if var.name not in varnames]
--> 708         return ValueGradFunction(self.logpt, grad_vars, extra_vars, **kwargs)
    709 
    710     @property

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/model.pyc in __init__(self, cost, grad_vars, extra_vars, dtype, casting, **kwargs)
    439             self._cost, grad_vars, self._ordering.vmap)
    440 
--> 441         grad = tt.grad(self._cost_joined, self._vars_joined)
    442         grad.name = '__grad'
    443 

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected, null_gradients)
    603 
    604     rval = _populate_grad_dict(var_to_app_to_idx,
--> 605                                grad_dict, wrt, cost_name)
    606 
    607     for i in xrange(len(rval)):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name)
   1369         return grad_dict[var]
   1370 
-> 1371     rval = [access_grad_cache(elem) for elem in wrt]
   1372 
   1373     return rval

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1019             inputs = node.inputs
   1020 
-> 1021             output_grads = [access_grad_cache(var) for var in node.outputs]
   1022 
   1023             # list of bools indicating if each output is connected to the cost

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
   1324                     for idx in node_to_idx[node]:
   1325 
-> 1326                         term = access_term_cache(node)[idx]
   1327 
   1328                         if not isinstance(term, gof.Variable):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
   1160 
   1161                 input_grads = node.op.L_op(inputs, node.outputs,
-> 1162                                            new_output_grads)
   1163 
   1164                 if input_grads is None:

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/tensor/elemwise.pyc in L_op(self, inputs, outs, ograds)
    562                     new_rval.append(elem)
    563                 else:
--> 564                     elem = ipt.zeros_like()
    565                     if str(elem.type.dtype) not in theano.tensor.continuous_dtypes:
    566                         elem = elem.astype(theano.config.floatX)

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/tensor/var.pyc in zeros_like(model, dtype)
    762 
    763     def zeros_like(model, dtype=None):
--> 764         return theano.tensor.basic.zeros_like(model, dtype=dtype)
    765 
    766     def ones_like(model, dtype=None):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/tensor/basic.pyc in zeros_like(model, dtype, opt)
   2526     if opt and ret.type == model.type:
   2527         return ret
-> 2528     return fill(model, ret)
   2529 
   2530 

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/op.pyc in __call__(self, *inputs, **kwargs)
    668                 # compute output value once with test inputs to validate graph
    669                 thunk = node.op.make_thunk(node, storage_map, compute_map,
--> 670                                            no_recycling=[])
    671                 thunk.inputs = [storage_map[v] for v in node.inputs]
    672                 thunk.outputs = [storage_map[v] for v in node.outputs]

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/op.pyc in make_thunk(self, node, storage_map, compute_map, no_recycling, impl)
    953             try:
    954                 return self.make_c_thunk(node, storage_map, compute_map,
--> 955                                          no_recycling)
    956             except (NotImplementedError, utils.MethodNotDefined):
    957                 # We requested the c code, so don't catch the error.

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/op.pyc in make_c_thunk(self, node, storage_map, compute_map, no_recycling)
    856         _logger.debug('Trying CLinker.make_thunk')
    857         outputs = cl.make_thunk(input_storage=node_input_storage,
--> 858                                 output_storage=node_output_storage)
    859         thunk, node_input_filters, node_output_filters = outputs
    860 

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/cc.pyc in make_thunk(self, input_storage, output_storage, storage_map, keep_lock)
   1215         cthunk, module, in_storage, out_storage, error_storage = self.__compile__(
   1216             input_storage, output_storage, storage_map,
-> 1217             keep_lock=keep_lock)
   1218 
   1219         res = _CThunk(cthunk, init_tasks, tasks, error_storage, module)

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/cc.pyc in __compile__(self, input_storage, output_storage, storage_map, keep_lock)
   1155                                             output_storage,
   1156                                             storage_map,
-> 1157                                             keep_lock=keep_lock)
   1158         return (thunk,
   1159                 module,

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/cc.pyc in cthunk_factory(self, error_storage, in_storage, out_storage, storage_map, keep_lock)
   1618                 node.op.prepare_node(node, storage_map, None, 'c')
   1619             module = get_module_cache().module_from_key(
-> 1620                 key=key, lnk=self, keep_lock=keep_lock)
   1621 
   1622         vars = self.inputs + self.outputs + self.orphans

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/cmodule.pyc in module_from_key(self, key, lnk, keep_lock)
   1131         """
   1132         # Is the module in the cache?
-> 1133         module = self._get_from_key(key)
   1134         if module is not None:
   1135             return module

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/cmodule.pyc in _get_from_key(self, key, key_data)
   1023                 raise ValueError(
   1024                     "Invalid key. key must have form (version, rest)", key)
-> 1025             if key in self.entry_from_key:
   1026                 name = self.entry_from_key[key]
   1027         else:

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/utils.pyc in __hash__(self)
    187                 def __hash__(self):
    188                     return hash((type(self),
--> 189                                  tuple(getattr(self, a) for a in props)))
    190                 dct['__hash__'] = __hash__
    191 

/opt/conda/envs/biospytial/lib/python2.7/site-packages/theano/gof/utils.pyc in <genexpr>(***failed resolving arguments***)
    187                 def __hash__(self):
    188                     return hash((type(self),
--> 189                                  tuple(getattr(self, a) for a in props)))
    190                 dct['__hash__'] = __hash__
    191 

KeyboardInterrupt: 

In [79]:
from pymc3.gp.util import plot_gp_dist
fig = plt.figure(figsize=(12,5)); ax = fig.gca()
plot_gp_dist(ax,trace_m["f"], X);

ax.plot(X, y, 'o', color="k", ms=10);



In [103]:
## Model as a Gaussian Process ADVI / periodic
import theano.tensor as tt
from pymc3.variational.callbacks import CheckParametersConvergence
with pm.Model() as model2:

    ## Periodic things
    # prior for periodic lengthscale, or frequency
    #l_per = pm.Uniform('l_per', lower=1e-5, upper=10)
    l_per = pm.Flat('l_per')
    # uninformative prior on the periodic amplitude
    log_s2_p = pm.Flat('log_s2_p')
    s2_p = pm.Deterministic('s2_p', tt.exp(log_s2_p))

    # the periodic "signal" covariance
    signal_cov = s2_p * pm.gp.cov.Cosine(1, l_per)    
    ######
    tau = pm.HalfNormal('tau',sd=1)

    # uninformative prior on the drift amplitude
    log_s2_d = pm.Uniform('log_s2_d', lower=-10, upper=5)
    s2_d = pm.Deterministic('s2_d', tt.exp(log_s2_d))    
    
    l = pm.Gamma("l",alpha=2,beta=1)
    n = pm.HalfCauchy("n",beta=5)
    
    cov1 = (n ** 2) * pm.gp.cov.Matern52(1,l) * s2_d + tau
    cov = signal_cov + cov1
    
    gp = pm.gp.Latent(cov_func=cov)
    
    f = gp.prior("f",X=X)
    
    sigma = pm.HalfCauchy("sigma",beta=5)
    v = pm.Gamma("v",alpha=2,beta=0.1)
    
    #y_ = pm.StudentT("y",mu=f,lam=1.0/sigma,nu=v,observed=y)
    y_ = pm.Normal("y",mu=f,sd=sigma ,observed=Y)
    trace_m1 = pm.sample(100)
    #trace_m = pm.fit(method='advi', n=25500)
    #trace = trace_m.sample(draws=5000)


Only 100 samples in chain.
Auto-assigning NUTS sampler...
Initializing NUTS using jitter+adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [v_log__, sigma_log__, f_rotated_, n_log__, l_log__, log_s2_d_interval__, tau_log__, log_s2_p, l_per]
100%|██████████| 600/600 [41:16<00:00,  4.13s/it]
The chain contains only diverging samples. The model is probably misspecified.
The acceptance probability does not match the target. It is 0.01791089223126426, but should be close to 0.8. Try to increase the number of tuning steps.
There were 4 divergences after tuning. Increase `target_accept` or reparameterize.
The acceptance probability does not match the target. It is 0.947595610209237, but should be close to 0.8. Try to increase the number of tuning steps.
The gelman-rubin statistic is larger than 1.4 for some parameters. The sampler did not converge.
The number of effective samples is smaller than 10% for some parameters.

In [104]:
from pymc3.gp.util import plot_gp_dist
fig = plt.figure(figsize=(12,5)); ax = fig.gca()
plot_gp_dist(ax,trace_m1["f"], X);

ax.plot(X, y, 'o', color="k", ms=10);



In [29]:
pm.traceplot(trace_m1)


Out[29]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be03bb110>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7be030b990>],
       [<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be0281c10>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7be0260850>],
       [<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be01d2d10>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7c01891150>],
       [<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be08ee290>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7be23a9e50>],
       [<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be23bef50>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7be21c8190>],
       [<matplotlib.axes._subplots.AxesSubplot object at 0x7f7be22d1b50>,
        <matplotlib.axes._subplots.AxesSubplot object at 0x7f7be7284310>]],
      dtype=object)

In [66]:
## Predictions
# 200 new values from x=0 to x=15
n_new = 200
X_new = np.linspace(0, 100, n_new)[:,None]

# add the GP conditional to the model, given the new X values
#with model1:
#    f_pred = gp.conditional("f_pred", X_new)

In [67]:
# Sample from the GP conditional distribution
with model1:
    pred_samples = pm.sample_ppc(trace_m2, vars=[f_pred], samples=100)


100%|██████████| 100/100 [00:02<00:00, 41.16it/s]

In [68]:
from pymc3.gp.util import plot_gp_dist
fig = plt.figure(figsize=(12,5)); ax = fig.gca()
plot_gp_dist(ax,pred_samples["f_pred"], X_new);

ax.plot(X, y, 'o', color="k", ms=10);



In [51]:
## Model 2
## Model as a Gaussian Process
import theano.tensor as tt
from pymc3.variational.callbacks import CheckParametersConvergence
with pm.Model() as model2:

    l = pm.Gamma("l",alpha=2,beta=1)
    n = pm.HalfCauchy("n",beta=5)
    
    cov = n ** 2 * pm.gp.cov.Matern52(1,l)
    gp = pm.gp.Latent(cov_func=cov)
    
    f = gp.prior("f",X=X)
    
    sigma = pm.HalfCauchy("sigma",beta=5)
    #v = pm.Gamma("v",alpha=2,beta=0.1)
    
    #y_ = pm.StudentT("y",mu=f,lam=1.0/sigma,nu=v,observed=y)
    y_ = pm.Normal("y",mu=f,sd=sigma ,observed=y)
    #trace_m2 = pm.sample(100)

In [ ]:


In [18]:
from pymc3.gp.util import plot_gp_dist
fig = plt.figure(figsize=(12,5));
ax = fig.gca()
m1 = plot_gp_dist(ax, trace, X,palette='Reds');


AttributeErrorTraceback (most recent call last)
<ipython-input-18-340e71b8d138> in <module>()
      2 fig = plt.figure(figsize=(12,5));
      3 ax = fig.gca()
----> 4 m1 = plot_gp_dist(ax, trace, X,palette='Reds');

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/gp/util.pyc in plot_gp_dist(ax, samples, x, plot_samples, palette)
     79     percs = np.linspace(51, 99, 40)
     80     colors = (percs - np.min(percs)) / (np.max(percs) - np.min(percs))
---> 81     samples = samples.T
     82     x = x.flatten()
     83     for i, p in enumerate(percs[::-1]):

/opt/conda/envs/biospytial/lib/python2.7/site-packages/pymc3/backends/base.pyc in __getattr__(self, name)
    338             return self.get_sampler_stats(name)
    339         raise AttributeError("'{}' object has no attribute '{}'".format(
--> 340             type(self).__name__, name))
    341 
    342     def __len__(self):

AttributeError: 'MultiTrace' object has no attribute 'T'