In [1]:
import itertools
import matplotlib.pyplot as plt
import matplotlib as mpl
from pymc3 import Model, Normal, Slice
from pymc3 import sample
from pymc3 import traceplot
from pymc3.distributions import Interpolated
import pymc3 as mc
from theano import as_op
import theano.tensor as tt
import numpy as np
from scipy import stats
import tqdm
import pandas as pd
%matplotlib inline
%load_ext version_information
%version_information pymc3, scipy
Out[1]:
Software Version Python 3.6.2 64bit [GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.57)] IPython 6.1.0 OS Darwin 15.6.0 x86_64 i386 64bit pymc3 3.1 scipy 0.19.1 Wed Sep 20 18:40:48 2017 MDT
In [2]:
np.random.seed(8675309)
x = stats.norm.rvs(loc=0, scale=1, size=100)
x = np.append(x, stats.norm.rvs(loc=4, scale=1, size=100))
plt.hist(x, 15, normed=False);
In [15]:
model = Model()
with model:
# Priors are posterior from previous iteration
means = mc.Uniform('means', -10, 10, shape=2)
weights = mc.Uniform('weights', 0, 1, shape=2)
sds=
dat = mc.NormalMixture
# draw 10000 posterior samples
trace = sample(10000)
Optimization terminated successfully.
Current function value: 455.810227
Iterations: 12
Function evaluations: 18
Gradient evaluations: 18
Auto-assigning NUTS sampler...
Initializing NUTS using ADVI...
Average Loss = 604.67: 100%|██████████| 10000/10000 [00:01<00:00, 6372.00it/s]
Finished [100%]: Average Loss = 604.48
100%|██████████| 2000/2000 [00:26<00:00, 75.51it/s]
In [16]:
traceplot(trace, combined=True)
Out[16]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x1257834e0>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12306ea20>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x129896ba8>,
<matplotlib.axes._subplots.AxesSubplot object at 0x125859400>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x120cdb668>,
<matplotlib.axes._subplots.AxesSubplot object at 0x121babbe0>]], dtype=object)
In [27]:
ppc = mc.sample_ppc(trace, samples=5000, model=model, vars='dat')
0%| | 0/5000 [00:00<?, ?it/s]
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-27-3bd6137c8f85> in <module>()
----> 1 ppc = mc.sample_ppc(trace, samples=5000, model=model, vars='dat')
~/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
537 param = trace[idx]
538 for var in vars:
--> 539 vals = var.distribution.random(point=param, size=size)
540 ppc[var.name].append(vals)
541 finally:
AttributeError: 'str' object has no attribute 'distribution'
In [62]:
model = Model()
with model:
# Priors are posterior from previous iteration
mean1 = mc.Uniform('mean1', [-10, -10], [10, 10], shape=2)
# mean2 = mc.Uniform('mean2', -10, 10)
# weight1 = mc.Uniform('weight1', 0, 1)
# weight2 = mc.Uniform('weight2', 0, 1)
sd1 = mc.Uniform('sd1', [0,0], [100,100], shape=2)
# sd2 = mc.Uniform('sd2', 0, 100)
# norm1 = mc.Normal('norm1', mu=mean1, sd=sd1)
# norm2 = mc.Normal('norm2', mu=mean2, sd=sd2)
dat = mc.Normal('dat', mu=mean1, sd=sd1, observed=x)
# draw 10000 posterior samples
start = mc.find_MAP()
trace = mc.sample(1000, start=start, njobs=5, n_init=10000, tune=1000)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-62-2eb35c9879a5> in <module>()
11 # norm2 = mc.Normal('norm2', mu=mean2, sd=sd2)
12
---> 13 dat = mc.Normal('dat', mu=mean1, sd=sd1, observed=x)
14
15 # draw 10000 posterior samples
~/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
37 total_size = kwargs.pop('total_size', None)
38 dist = cls.dist(*args, **kwargs)
---> 39 return model.Var(name, dist, data, total_size)
40 else:
41 raise TypeError("Name needs to be a string but got: {}".format(name))
~/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/model.py in Var(self, name, dist, data, total_size)
543 var = ObservedRV(name=name, data=data,
544 distribution=dist,
--> 545 total_size=total_size, model=self)
546 self.observed_RVs.append(var)
547 if var.missing_values:
~/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/model.py in __init__(self, type, owner, index, name, data, distribution, total_size, model)
968
969 self.missing_values = data.missing_values
--> 970 self.logp_elemwiset = distribution.logp(data)
971 self.total_size = total_size
972 self.model = model
~/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/distributions/continuous.py in logp(self, value)
248 mu = self.mu
249
--> 250 return bound((-tau * (value - mu)**2 + tt.log(tau / np.pi / 2.)) / 2.,
251 sd > 0)
252
~/miniconda3/envs/python3/lib/python3.6/site-packages/theano/tensor/var.py in __sub__(self, other)
145 # and the return value in that case
146 try:
--> 147 return theano.tensor.basic.sub(self, other)
148 except (NotImplementedError, AsTensorError):
149 return NotImplemented
~/miniconda3/envs/python3/lib/python3.6/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
672 thunk.outputs = [storage_map[v] for v in node.outputs]
673
--> 674 required = thunk()
675 assert not required # We provided all inputs
676
~/miniconda3/envs/python3/lib/python3.6/site-packages/theano/gof/op.py in rval()
841
842 def rval():
--> 843 fill_storage()
844 for o in node.outputs:
845 compute_map[o][0] = True
~/miniconda3/envs/python3/lib/python3.6/site-packages/theano/gof/cc.py in __call__(self)
1696 print(self.error_storage, file=sys.stderr)
1697 raise
-> 1698 reraise(exc_type, exc_value, exc_trace)
1699
1700
~/miniconda3/envs/python3/lib/python3.6/site-packages/six.py in reraise(tp, value, tb)
684 if value.__traceback__ is not tb:
685 raise value.with_traceback(tb)
--> 686 raise value
687
688 else:
ValueError: Input dimension mis-match. (input[0].shape[0] = 200, input[1].shape[0] = 2)
In [36]:
traceplot(trace, combined=True)
Out[36]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x11e65af60>,
<matplotlib.axes._subplots.AxesSubplot object at 0x120ce7d68>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12d6af6d8>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12d46aa20>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12d499b70>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12e7dc1d0>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12eaa3470>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12ea8b978>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12f4502e8>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12ed90eb8>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12f71c748>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12f7516d8>]], dtype=object)
In [56]:
# setup model
model = mc.Model()
with model:
# cluster sizes
# p = mc.Dirichlet('p', a=np.array([1., 1.]), shape=2)
p = mc.Uniform('p', [0,0], [1,1], shape=2)
# ensure all clusters have some points
p_min_potential = mc.Potential('p_min_potential', tt.switch(tt.min(p) < .1, -np.inf, 0))
# cluster centers
means = mc.Normal('means', mu=[0, 0], sd=15, shape=2)
# break symmetry
order_means_potential = mc.Potential('order_means_potential',
tt.switch(means[1]-means[0] < 0, -np.inf, 0))
# measurement error
sd = mc.Uniform('sd', lower=0, upper=20)
# latent cluster of each observation
category = mc.Categorical('category',
p=p,
shape=len(x))
# likelihood for each observed value
points = mc.Normal('obs',
mu=means[category],
sd=sd,
observed=x)
In [57]:
with model:
# draw 10000 posterior samples
start = mc.find_MAP()
trace = mc.sample(4000, start=start, njobs=5, n_init=10000, tune=1000)
Warning: Desired error not necessarily achieved due to precision loss.
Current function value: 803.168733
Iterations: 0
Function evaluations: 15
Gradient evaluations: 3
Assigned NUTS to p_interval__
Assigned NUTS to means
Assigned NUTS to sd_interval__
Assigned BinaryGibbsMetropolis to category
97%|█████████▋| 4868/5000 [05:23<00:08, 15.56it/s]/Users/balarsen/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/step_methods/hmc/nuts.py:456: UserWarning: Chain 1 contains 98 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize.
% (self._chain_id, n_diverging))
98%|█████████▊| 4893/5000 [05:25<00:06, 17.70it/s]/Users/balarsen/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/step_methods/hmc/nuts.py:456: UserWarning: Chain 2 contains 97 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize.
% (self._chain_id, n_diverging))
98%|█████████▊| 4912/5000 [05:26<00:04, 20.25it/s]/Users/balarsen/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/step_methods/hmc/nuts.py:456: UserWarning: Chain 4 contains 120 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize.
% (self._chain_id, n_diverging))
98%|█████████▊| 4915/5000 [05:26<00:04, 20.53it/s]/Users/balarsen/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/step_methods/hmc/nuts.py:456: UserWarning: Chain 3 contains 98 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize.
% (self._chain_id, n_diverging))
100%|█████████▉| 4999/5000 [05:30<00:00, 21.08it/s]/Users/balarsen/miniconda3/envs/python3/lib/python3.6/site-packages/pymc3/step_methods/hmc/nuts.py:456: UserWarning: Chain 0 contains 103 diverging samples after tuning. If increasing `target_accept` does not help try to reparameterize.
% (self._chain_id, n_diverging))
100%|██████████| 5000/5000 [05:30<00:00, 15.15it/s]
In [58]:
traceplot(trace, combined=True)
Out[58]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x134304128>,
<matplotlib.axes._subplots.AxesSubplot object at 0x13479e898>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x12d696908>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12b1edba8>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x122a49240>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12d24ccc0>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x125ee5cc0>,
<matplotlib.axes._subplots.AxesSubplot object at 0x12eaf1b38>]], dtype=object)
In [59]:
traceplot(trace[1000:], combined=True)
Out[59]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x133013d68>,
<matplotlib.axes._subplots.AxesSubplot object at 0x136339fd0>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x13636db00>,
<matplotlib.axes._subplots.AxesSubplot object at 0x1363cf860>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x136823160>,
<matplotlib.axes._subplots.AxesSubplot object at 0x1368a55c0>],
[<matplotlib.axes._subplots.AxesSubplot object at 0x1368b6438>,
<matplotlib.axes._subplots.AxesSubplot object at 0x1369722e8>]], dtype=object)
In [60]:
mc.autocorrplot(trace[:], varnames=['sd'])
Out[60]:
array([[<matplotlib.axes._subplots.AxesSubplot object at 0x136412ba8>,
<matplotlib.axes._subplots.AxesSubplot object at 0x137137b00>,
<matplotlib.axes._subplots.AxesSubplot object at 0x158d3ebe0>,
<matplotlib.axes._subplots.AxesSubplot object at 0x159043fd0>,
<matplotlib.axes._subplots.AxesSubplot object at 0x15907c8d0>]], dtype=object)
In [61]:
mc.summary(trace)
means:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
0.111 0.119 0.001 [-0.126, 0.341]
4.025 0.125 0.001 [3.787, 4.273]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
-0.122 0.032 0.110 0.191 0.348
3.779 3.941 4.025 4.110 4.268
category:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
0.006 0.077 0.001 [0.000, 0.000]
0.010 0.100 0.001 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.000 0.010 0.000 [0.000, 0.000]
0.000 0.010 0.000 [0.000, 0.000]
0.000 0.020 0.000 [0.000, 0.000]
0.000 0.012 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.010 0.099 0.001 [0.000, 0.000]
0.002 0.039 0.000 [0.000, 0.000]
0.009 0.093 0.001 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.000 0.012 0.000 [0.000, 0.000]
0.005 0.073 0.000 [0.000, 0.000]
0.002 0.048 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.001 0.035 0.000 [0.000, 0.000]
0.000 0.017 0.000 [0.000, 0.000]
0.000 0.016 0.000 [0.000, 0.000]
0.017 0.130 0.001 [0.000, 0.000]
0.059 0.235 0.001 [0.000, 1.000]
0.000 0.010 0.000 [0.000, 0.000]
0.001 0.038 0.000 [0.000, 0.000]
0.017 0.129 0.001 [0.000, 0.000]
0.003 0.057 0.000 [0.000, 0.000]
0.000 0.010 0.000 [0.000, 0.000]
0.002 0.041 0.000 [0.000, 0.000]
0.127 0.332 0.002 [0.000, 1.000]
0.001 0.028 0.000 [0.000, 0.000]
0.001 0.022 0.000 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.001 0.035 0.000 [0.000, 0.000]
0.037 0.190 0.001 [0.000, 0.000]
0.005 0.074 0.001 [0.000, 0.000]
0.002 0.042 0.000 [0.000, 0.000]
0.003 0.054 0.000 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.004 0.062 0.000 [0.000, 0.000]
0.003 0.057 0.000 [0.000, 0.000]
0.165 0.371 0.002 [0.000, 1.000]
0.005 0.069 0.000 [0.000, 0.000]
0.001 0.024 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.000 0.010 0.000 [0.000, 0.000]
0.003 0.057 0.000 [0.000, 0.000]
0.095 0.293 0.002 [0.000, 1.000]
0.000 0.000 0.000 [0.000, 0.000]
0.000 0.017 0.000 [0.000, 0.000]
0.000 0.010 0.000 [0.000, 0.000]
0.038 0.190 0.001 [0.000, 0.000]
0.002 0.047 0.000 [0.000, 0.000]
0.003 0.055 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.004 0.067 0.000 [0.000, 0.000]
0.000 0.014 0.000 [0.000, 0.000]
0.001 0.023 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.001 0.027 0.000 [0.000, 0.000]
0.020 0.140 0.001 [0.000, 0.000]
0.000 0.017 0.000 [0.000, 0.000]
0.135 0.342 0.002 [0.000, 1.000]
0.001 0.037 0.000 [0.000, 0.000]
0.001 0.024 0.000 [0.000, 0.000]
0.000 0.014 0.000 [0.000, 0.000]
0.001 0.032 0.000 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.004 0.061 0.000 [0.000, 0.000]
0.004 0.066 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.015 0.120 0.001 [0.000, 0.000]
0.000 0.016 0.000 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.003 0.056 0.000 [0.000, 0.000]
0.002 0.042 0.000 [0.000, 0.000]
0.000 0.012 0.000 [0.000, 0.000]
0.009 0.095 0.001 [0.000, 0.000]
0.021 0.144 0.001 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.005 0.072 0.001 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.045 0.208 0.001 [0.000, 0.000]
0.000 0.007 0.000 [0.000, 0.000]
0.001 0.025 0.000 [0.000, 0.000]
0.000 0.019 0.000 [0.000, 0.000]
0.004 0.059 0.000 [0.000, 0.000]
0.002 0.045 0.000 [0.000, 0.000]
0.000 0.016 0.000 [0.000, 0.000]
0.278 0.448 0.002 [0.000, 1.000]
0.603 0.489 0.002 [0.000, 1.000]
0.000 0.000 0.000 [0.000, 0.000]
0.338 0.473 0.002 [0.000, 1.000]
0.014 0.116 0.001 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.008 0.088 0.001 [0.000, 0.000]
0.020 0.141 0.001 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.000 0.000 0.000 [0.000, 0.000]
0.001 0.032 0.000 [0.000, 0.000]
0.024 0.152 0.001 [0.000, 0.000]
0.999 0.032 0.000 [1.000, 1.000]
0.974 0.159 0.001 [1.000, 1.000]
0.935 0.246 0.002 [0.000, 1.000]
0.994 0.079 0.001 [1.000, 1.000]
0.995 0.073 0.000 [1.000, 1.000]
0.998 0.047 0.000 [1.000, 1.000]
0.999 0.023 0.000 [1.000, 1.000]
0.997 0.053 0.000 [1.000, 1.000]
0.872 0.334 0.002 [0.000, 1.000]
0.997 0.055 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.990 0.098 0.001 [1.000, 1.000]
0.991 0.093 0.001 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
1.000 0.012 0.000 [1.000, 1.000]
0.999 0.023 0.000 [1.000, 1.000]
0.999 0.032 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.890 0.313 0.002 [0.000, 1.000]
0.999 0.031 0.000 [1.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
0.999 0.023 0.000 [1.000, 1.000]
0.999 0.035 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.961 0.193 0.001 [1.000, 1.000]
1.000 0.014 0.000 [1.000, 1.000]
0.997 0.055 0.000 [1.000, 1.000]
0.998 0.045 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.988 0.111 0.001 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.992 0.087 0.001 [1.000, 1.000]
0.578 0.494 0.002 [0.000, 1.000]
0.999 0.028 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.999 0.027 0.000 [1.000, 1.000]
0.609 0.488 0.002 [0.000, 1.000]
0.996 0.064 0.000 [1.000, 1.000]
0.998 0.049 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.929 0.257 0.002 [0.000, 1.000]
0.999 0.028 0.000 [1.000, 1.000]
0.965 0.185 0.001 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
0.293 0.455 0.002 [0.000, 1.000]
0.940 0.238 0.001 [0.000, 1.000]
0.991 0.092 0.001 [1.000, 1.000]
0.991 0.093 0.001 [1.000, 1.000]
0.828 0.377 0.002 [0.000, 1.000]
0.999 0.024 0.000 [1.000, 1.000]
0.985 0.120 0.001 [1.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
0.989 0.102 0.001 [1.000, 1.000]
0.999 0.029 0.000 [1.000, 1.000]
0.465 0.499 0.002 [0.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.985 0.122 0.001 [1.000, 1.000]
0.999 0.030 0.000 [1.000, 1.000]
0.999 0.039 0.000 [1.000, 1.000]
0.531 0.499 0.002 [0.000, 1.000]
0.999 0.036 0.000 [1.000, 1.000]
0.986 0.116 0.001 [1.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
1.000 0.021 0.000 [1.000, 1.000]
0.999 0.037 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.995 0.069 0.001 [1.000, 1.000]
1.000 0.020 0.000 [1.000, 1.000]
1.000 0.017 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.543 0.498 0.002 [0.000, 1.000]
0.999 0.032 0.000 [1.000, 1.000]
0.433 0.496 0.002 [0.000, 1.000]
0.806 0.395 0.002 [0.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
0.980 0.141 0.001 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.995 0.072 0.001 [1.000, 1.000]
0.580 0.494 0.002 [0.000, 1.000]
0.879 0.326 0.002 [0.000, 1.000]
0.307 0.461 0.002 [0.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
1.000 0.000 0.000 [1.000, 1.000]
0.986 0.118 0.001 [1.000, 1.000]
1.000 0.021 0.000 [1.000, 1.000]
0.999 0.031 0.000 [1.000, 1.000]
1.000 0.016 0.000 [1.000, 1.000]
0.859 0.348 0.002 [0.000, 1.000]
0.469 0.499 0.002 [0.000, 1.000]
0.991 0.095 0.001 [1.000, 1.000]
0.986 0.116 0.001 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
0.996 0.064 0.000 [1.000, 1.000]
1.000 0.017 0.000 [1.000, 1.000]
0.999 0.026 0.000 [1.000, 1.000]
1.000 0.010 0.000 [1.000, 1.000]
1.000 0.007 0.000 [1.000, 1.000]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 1.000 1.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
0.000 0.000 0.000 0.000 0.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 0.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 0.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 0.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 0.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
0.000 0.000 0.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
0.000 1.000 1.000 1.000 1.000
0.000 0.000 0.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
1.000 1.000 1.000 1.000 1.000
p:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
0.660 0.225 0.004 [0.250, 1.000]
0.613 0.217 0.004 [0.229, 0.999]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
0.198 0.493 0.692 0.851 0.982
0.182 0.453 0.634 0.785 0.969
sd:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
1.047 0.061 0.001 [0.934, 1.169]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
0.939 1.005 1.044 1.086 1.175
In [ ]:
In [ ]:
Content source: balarsen/pymc_learning
Similar notebooks: