In [2]:
# http://twiecki.github.io/blog/2013/09/12/bayesian-glms-1/
In [3]:
from pymc3 import *
import numpy as np
import matplotlib.pyplot as plt
import spacepy.plot as spp
/Users/balarsen/miniconda3/envs/python3/lib/python3.5/site-packages/matplotlib/__init__.py:878: UserWarning: axes.color_cycle is deprecated and replaced with axes.prop_cycle; please use the latter.
warnings.warn(self.msg_depr % (key, alt_key))
In [4]:
size = 200
true_intercept = 1
true_slope = 2
x = np.linspace(0, 1, size)
# y = a + b*x
true_regression_line = true_intercept + true_slope * x
# add noise
y = true_regression_line + np.random.normal(scale=.5, size=size)
data = dict(x=x, y=y)
In [5]:
fig = plt.figure(figsize=(7, 7))
ax = fig.add_subplot(111, xlabel='x', ylabel='y', title='Generated data and underlying model')
ax.plot(x, y, 'x', label='sampled data')
ax.plot(x, true_regression_line, label='true regression line', lw=2.)
plt.legend(loc=0);
In [5]:
with Model() as model: # model specifications in PyMC3 are wrapped in a with-statement
# Define priors
sigma = Uniform('sigma', 0, 20)
intercept = Normal('Intercept', 0, sd=20)
x_coeff = Normal('x', 0, sd=20)
# Define likelihood
likelihood = Normal('y', mu=intercept + x_coeff * x, sd=sigma, observed=y)
# Inference!
start = find_MAP() # Find starting value by optimization
step = NUTS(state=start) # Instantiate MCMC sampling algorithm
trace = sample(2000, step, start=start, progressbar=True) # draw 2000 posterior samples using NUTS sampling
Applied interval-transform to sigma and added transformed sigma_interval_ to model.
[-----------------100%-----------------] 2000 of 2000 complete in 4.4 sec
In [6]:
with Model() as model:
# specify glm and pass in data. The resulting linear model, its likelihood and
# and all its parameters are automatically added to our model.
glm.glm('y ~ x', data)
step = NUTS() # Instantiate MCMC sampling algorithm
trace = sample(2000, step, progressbar=True) # draw 2000 posterior samples using NUTS sampling
Applied log-transform to sd and added transformed sd_log_ to model.
[-----------------100%-----------------] 2000 of 2000 complete in 3.3 sec
In [7]:
plt.figure(figsize=(7, 7))
traceplot(trace)
plt.tight_layout();
<matplotlib.figure.Figure at 0x115ac4a20>
In [8]:
plt.figure(figsize=(7, 7))
plt.plot(x, y, 'x', label='data')
glm.plot_posterior_predictive(trace, samples=100,
label='posterior predictive regression lines')
plt.plot(x, true_regression_line, label='true regression line', lw=3., c='y')
plt.title('Posterior predictive regression lines')
plt.legend(loc=0)
plt.xlabel('x')
plt.ylabel('y');
In [8]:
with Model() as model: # model specifications in PyMC3 are wrapped in a with-statement
# Define priors
sigma = Uniform('sigma', 0, 20)
intercept = Normal('Intercept', 0, sd=20, shape=(len(y),))
x_coeff = Normal('x', 0, sd=20)
# Define likelihood
likelihood = Normal('y', mu=intercept + x_coeff * x, sd=sigma, observed=y)
# Inference!
start = find_MAP() # Find starting value by optimization
step = NUTS(state=start) # Instantiate MCMC sampling algorithm
trace = sample(2000, step, start=start, progressbar=True) # draw 2000 posterior samples using NUTS sampling
plt.figure(figsize=(7, 7))
traceplot(trace)
plt.tight_layout();
Applied interval-transform to sigma and added transformed sigma_interval_ to model.
[-----------------100%-----------------] 2000 of 2000 complete in 40161.2 sec
<matplotlib.figure.Figure at 0x1165e8080>
In [15]:
summary(trace)
Intercept:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
1.318 0.000 0.000 [1.318, 1.318]
0.881 0.004 0.000 [0.874, 0.887]
1.288 0.007 0.001 [1.273, 1.299]
1.168 0.011 0.001 [1.146, 1.185]
0.223 0.014 0.001 [0.194, 0.245]
1.588 0.018 0.002 [1.552, 1.616]
0.344 0.022 0.002 [0.301, 0.378]
0.484 0.025 0.002 [0.433, 0.523]
0.528 0.029 0.003 [0.470, 0.573]
1.625 0.032 0.003 [1.559, 1.675]
0.992 0.036 0.004 [0.920, 1.049]
1.441 0.039 0.004 [1.361, 1.503]
0.702 0.043 0.004 [0.615, 0.770]
1.360 0.047 0.005 [1.265, 1.433]
-0.102 0.050 0.005 [-0.204, -0.024]
0.772 0.054 0.005 [0.662, 0.856]
0.067 0.057 0.006 [-0.050, 0.157]
1.072 0.061 0.006 [0.948, 1.167]
0.520 0.065 0.006 [0.389, 0.621]
0.382 0.068 0.007 [0.243, 0.489]
-0.016 0.072 0.007 [-0.162, 0.096]
0.393 0.075 0.007 [0.240, 0.511]
-0.412 0.079 0.008 [-0.572, -0.289]
0.915 0.083 0.008 [0.748, 1.044]
1.420 0.086 0.009 [1.245, 1.555]
1.098 0.090 0.009 [0.916, 1.239]
0.260 0.093 0.009 [0.070, 0.406]
1.559 0.097 0.010 [1.363, 1.711]
1.456 0.101 0.010 [1.252, 1.613]
0.527 0.104 0.010 [0.316, 0.690]
1.277 0.108 0.011 [1.059, 1.446]
0.058 0.111 0.011 [-0.168, 0.232]
0.542 0.115 0.011 [0.309, 0.722]
1.398 0.118 0.012 [1.157, 1.583]
1.328 0.122 0.012 [1.080, 1.519]
1.356 0.126 0.012 [1.101, 1.553]
-0.136 0.129 0.013 [-0.398, 0.067]
0.435 0.133 0.013 [0.165, 0.643]
1.244 0.136 0.014 [0.967, 1.457]
-0.228 0.140 0.014 [-0.512, -0.008]
1.026 0.144 0.014 [0.735, 1.251]
0.165 0.147 0.015 [-0.134, 0.395]
-0.005 0.151 0.015 [-0.311, 0.231]
0.109 0.154 0.015 [-0.204, 0.351]
0.495 0.158 0.016 [0.174, 0.742]
-0.680 0.162 0.016 [-1.008, -0.427]
-0.505 0.165 0.016 [-0.840, -0.247]
0.968 0.169 0.017 [0.626, 1.232]
-0.489 0.172 0.017 [-0.839, -0.220]
0.067 0.176 0.017 [-0.290, 0.342]
0.285 0.180 0.018 [-0.080, 0.566]
-0.455 0.183 0.018 [-0.827, -0.169]
0.151 0.187 0.019 [-0.228, 0.443]
0.176 0.190 0.019 [-0.210, 0.474]
-0.100 0.194 0.019 [-0.494, 0.203]
-0.257 0.197 0.020 [-0.658, 0.052]
1.467 0.201 0.020 [1.059, 1.782]
0.720 0.205 0.020 [0.305, 1.041]
0.885 0.208 0.021 [0.462, 1.210]
-0.113 0.212 0.021 [-0.543, 0.219]
0.200 0.215 0.021 [-0.237, 0.537]
-1.680 0.219 0.022 [-2.124, -1.337]
-0.902 0.223 0.022 [-1.354, -0.554]
-0.376 0.226 0.022 [-0.835, -0.022]
-0.425 0.230 0.023 [-0.892, -0.066]
-0.402 0.233 0.023 [-0.876, -0.037]
0.805 0.237 0.023 [0.324, 1.176]
0.784 0.241 0.024 [0.296, 1.161]
-1.018 0.244 0.024 [-1.514, -0.636]
0.199 0.248 0.025 [-0.304, 0.586]
-0.537 0.251 0.025 [-1.047, -0.144]
1.576 0.255 0.025 [1.058, 1.975]
-0.304 0.258 0.026 [-0.828, 0.101]
0.557 0.262 0.026 [0.025, 0.967]
0.188 0.266 0.026 [-0.351, 0.604]
-0.466 0.269 0.027 [-1.013, -0.045]
0.547 0.273 0.027 [-0.006, 0.974]
-0.736 0.276 0.027 [-1.297, -0.303]
-0.882 0.280 0.028 [-1.451, -0.444]
0.387 0.284 0.028 [-0.189, 0.831]
0.326 0.287 0.028 [-0.257, 0.776]
-0.082 0.291 0.029 [-0.672, 0.373]
-0.052 0.294 0.029 [-0.649, 0.409]
-0.987 0.298 0.030 [-1.592, -0.521]
-0.589 0.302 0.030 [-1.201, -0.117]
-0.337 0.305 0.030 [-0.956, 0.141]
-0.295 0.309 0.031 [-0.922, 0.188]
0.279 0.312 0.031 [-0.355, 0.768]
-0.385 0.316 0.031 [-1.026, 0.109]
0.699 0.320 0.032 [0.051, 1.199]
-0.312 0.323 0.032 [-0.968, 0.193]
0.539 0.327 0.032 [-0.124, 1.050]
-0.848 0.330 0.033 [-1.519, -0.331]
0.257 0.334 0.033 [-0.420, 0.780]
1.105 0.337 0.033 [0.420, 1.633]
0.297 0.341 0.034 [-0.396, 0.830]
-0.703 0.345 0.034 [-1.403, -0.164]
-1.591 0.348 0.035 [-2.298, -1.046]
-0.379 0.352 0.035 [-1.093, 0.172]
-1.640 0.355 0.035 [-2.361, -1.084]
-0.496 0.359 0.036 [-1.224, 0.066]
-0.056 0.363 0.036 [-0.792, 0.511]
-0.318 0.366 0.036 [-1.061, 0.255]
-0.042 0.370 0.037 [-0.793, 0.536]
0.082 0.373 0.037 [-0.676, 0.666]
0.227 0.377 0.037 [-0.539, 0.816]
-0.586 0.381 0.038 [-1.358, 0.010]
0.115 0.384 0.038 [-0.665, 0.716]
0.630 0.388 0.038 [-0.157, 1.237]
-0.733 0.391 0.039 [-1.527, -0.121]
-0.489 0.395 0.039 [-1.290, 0.129]
-0.092 0.399 0.040 [-0.901, 0.531]
-0.557 0.402 0.040 [-1.373, 0.072]
0.119 0.406 0.040 [-0.705, 0.754]
-0.881 0.409 0.041 [-1.711, -0.240]
-0.291 0.413 0.041 [-1.129, 0.355]
-0.927 0.416 0.041 [-1.772, -0.275]
-0.877 0.420 0.042 [-1.730, -0.220]
-0.138 0.424 0.042 [-0.998, 0.525]
-0.900 0.427 0.042 [-1.767, -0.232]
-0.720 0.431 0.043 [-1.594, -0.046]
0.225 0.434 0.043 [-0.657, 0.905]
-1.211 0.438 0.043 [-2.100, -0.526]
-0.722 0.442 0.044 [-1.618, -0.031]
-0.061 0.445 0.044 [-0.965, 0.635]
-0.707 0.449 0.044 [-1.618, -0.005]
-0.353 0.452 0.045 [-1.272, 0.355]
-0.987 0.456 0.045 [-1.912, -0.273]
-0.640 0.460 0.046 [-1.573, 0.079]
-0.911 0.463 0.046 [-1.851, -0.186]
-1.299 0.467 0.046 [-2.247, -0.569]
-0.892 0.470 0.047 [-1.847, -0.156]
0.187 0.474 0.047 [-0.775, 0.929]
-1.771 0.478 0.047 [-2.740, -1.024]
-0.580 0.481 0.048 [-1.557, 0.172]
-0.558 0.485 0.048 [-1.542, 0.201]
-1.016 0.488 0.048 [-2.007, -0.252]
-0.596 0.492 0.049 [-1.594, 0.174]
-0.390 0.495 0.049 [-1.396, 0.385]
0.030 0.499 0.049 [-0.983, 0.811]
-1.466 0.503 0.050 [-2.486, -0.679]
-0.886 0.506 0.050 [-1.913, -0.093]
-0.162 0.510 0.051 [-1.196, 0.636]
-0.852 0.513 0.051 [-1.894, -0.048]
-0.912 0.517 0.051 [-1.962, -0.104]
-0.567 0.521 0.052 [-1.624, 0.248]
-0.557 0.524 0.052 [-1.621, 0.263]
-1.006 0.528 0.052 [-2.077, -0.180]
-0.201 0.531 0.053 [-1.280, 0.630]
-1.738 0.535 0.053 [-2.824, -0.901]
-0.688 0.539 0.053 [-1.781, 0.155]
-0.732 0.542 0.054 [-1.832, 0.116]
-0.678 0.546 0.054 [-1.785, 0.176]
-0.914 0.549 0.054 [-2.029, -0.054]
-1.031 0.553 0.055 [-2.154, -0.166]
-0.932 0.556 0.055 [-2.061, -0.061]
-0.785 0.560 0.056 [-1.922, 0.091]
-1.106 0.564 0.056 [-2.250, -0.224]
-1.217 0.567 0.056 [-2.369, -0.330]
-1.107 0.571 0.057 [-2.265, -0.213]
-0.207 0.574 0.057 [-1.373, 0.692]
-1.107 0.578 0.057 [-2.280, -0.202]
-1.918 0.582 0.058 [-3.098, -1.008]
-0.839 0.585 0.058 [-2.027, 0.076]
-0.264 0.589 0.058 [-1.459, 0.657]
-1.571 0.592 0.059 [-2.774, -0.645]
-1.271 0.596 0.059 [-2.481, -0.339]
-1.007 0.600 0.059 [-2.224, -0.068]
-1.363 0.603 0.060 [-2.587, -0.419]
-1.514 0.607 0.060 [-2.745, -0.564]
-0.657 0.610 0.061 [-1.896, 0.298]
-0.878 0.614 0.061 [-2.125, 0.082]
-1.460 0.618 0.061 [-2.713, -0.493]
-0.892 0.621 0.062 [-2.152, 0.080]
-1.431 0.625 0.062 [-2.699, -0.454]
-1.607 0.628 0.062 [-2.883, -0.624]
-0.389 0.632 0.063 [-1.671, 0.600]
-1.745 0.635 0.063 [-3.035, -0.750]
-1.397 0.639 0.063 [-2.694, -0.397]
-1.511 0.643 0.064 [-2.816, -0.506]
-1.196 0.646 0.064 [-2.507, -0.184]
-1.338 0.650 0.064 [-2.657, -0.321]
-1.490 0.653 0.065 [-2.816, -0.467]
-2.267 0.657 0.065 [-3.600, -1.239]
-1.296 0.661 0.065 [-2.637, -0.262]
-0.470 0.664 0.066 [-1.818, 0.569]
-1.743 0.668 0.066 [-3.098, -0.698]
-1.499 0.671 0.067 [-2.861, -0.448]
-1.213 0.675 0.067 [-2.583, -0.157]
-1.320 0.679 0.067 [-2.697, -0.258]
-2.144 0.682 0.068 [-3.528, -1.076]
-1.731 0.686 0.068 [-3.123, -0.658]
-1.541 0.689 0.068 [-2.940, -0.462]
-2.067 0.693 0.069 [-3.474, -0.983]
-0.428 0.697 0.069 [-1.841, 0.662]
-0.997 0.700 0.069 [-2.418, 0.099]
-1.630 0.704 0.070 [-3.058, -0.529]
-1.207 0.707 0.070 [-2.643, -0.100]
-1.324 0.711 0.070 [-2.767, -0.212]
-2.185 0.714 0.071 [-3.635, -1.067]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
1.318 1.318 1.318 1.318 1.318
0.874 0.880 0.882 0.883 0.887
1.273 1.286 1.289 1.291 1.299
1.146 1.165 1.170 1.173 1.185
0.194 0.219 0.225 0.230 0.245
1.552 1.583 1.591 1.597 1.616
0.301 0.339 0.347 0.355 0.378
0.433 0.477 0.487 0.496 0.524
0.470 0.521 0.532 0.542 0.574
1.559 1.616 1.629 1.640 1.676
0.920 0.983 0.997 1.010 1.049
1.361 1.430 1.446 1.460 1.503
0.615 0.691 0.708 0.723 0.770
1.265 1.348 1.366 1.382 1.434
-0.204 -0.116 -0.096 -0.078 -0.023
0.662 0.757 0.779 0.798 0.857
-0.050 0.052 0.075 0.094 0.157
0.948 1.055 1.080 1.101 1.168
0.389 0.503 0.529 0.551 0.622
0.243 0.364 0.391 0.415 0.489
-0.162 -0.035 -0.006 0.018 0.097
0.240 0.373 0.403 0.429 0.512
-0.572 -0.433 -0.401 -0.374 -0.288
0.748 0.893 0.926 0.955 1.045
1.245 1.397 1.431 1.461 1.556
0.916 1.075 1.111 1.142 1.240
0.070 0.235 0.272 0.305 0.407
1.363 1.534 1.572 1.606 1.712
1.252 1.429 1.469 1.504 1.614
0.316 0.499 0.541 0.577 0.691
1.059 1.249 1.292 1.329 1.447
-0.168 0.029 0.073 0.112 0.234
0.309 0.512 0.558 0.598 0.723
1.157 1.366 1.414 1.454 1.584
1.080 1.295 1.344 1.386 1.520
1.101 1.323 1.373 1.416 1.554
-0.398 -0.170 -0.118 -0.073 0.068
0.165 0.400 0.453 0.499 0.644
0.967 1.207 1.262 1.309 1.459
-0.512 -0.265 -0.209 -0.160 -0.007
0.735 0.988 1.045 1.095 1.252
-0.134 0.125 0.184 0.235 0.397
-0.311 -0.045 0.016 0.068 0.233
-0.204 0.068 0.130 0.183 0.353
0.174 0.453 0.516 0.571 0.744
-1.008 -0.723 -0.658 -0.603 -0.426
-0.840 -0.549 -0.483 -0.426 -0.245
0.626 0.923 0.991 1.049 1.234
-0.839 -0.535 -0.466 -0.407 -0.218
-0.290 0.020 0.091 0.152 0.344
-0.080 0.237 0.309 0.371 0.568
-0.827 -0.504 -0.430 -0.367 -0.167
-0.228 0.101 0.176 0.240 0.445
-0.210 0.125 0.202 0.268 0.476
-0.494 -0.152 -0.074 -0.007 0.205
-0.658 -0.310 -0.231 -0.163 0.054
1.059 1.413 1.494 1.563 1.784
0.305 0.666 0.748 0.819 1.043
0.462 0.829 0.913 0.985 1.213
-0.543 -0.169 -0.084 -0.011 0.221
-0.237 0.143 0.229 0.304 0.540
-2.124 -1.738 -1.650 -1.575 -1.335
-1.354 -0.962 -0.872 -0.796 -0.552
-0.835 -0.436 -0.345 -0.267 -0.019
-0.892 -0.487 -0.394 -0.315 -0.063
-0.876 -0.464 -0.370 -0.290 -0.034
0.324 0.742 0.837 0.919 1.178
0.296 0.720 0.817 0.900 1.163
-1.514 -1.083 -0.985 -0.901 -0.634
-0.304 0.133 0.232 0.318 0.589
-1.047 -0.604 -0.503 -0.416 -0.141
1.058 1.508 1.610 1.698 1.977
-0.828 -0.373 -0.269 -0.180 0.104
0.025 0.487 0.592 0.682 0.969
-0.351 0.117 0.224 0.316 0.607
-1.013 -0.538 -0.430 -0.337 -0.042
-0.006 0.475 0.584 0.678 0.977
-1.297 -0.809 -0.698 -0.603 -0.300
-1.451 -0.957 -0.845 -0.748 -0.441
-0.189 0.311 0.425 0.523 0.834
-0.257 0.250 0.365 0.464 0.779
-0.672 -0.159 -0.043 0.058 0.376
-0.649 -0.130 -0.012 0.090 0.412
-1.592 -1.066 -0.947 -0.844 -0.517
-1.201 -0.669 -0.548 -0.444 -0.114
-0.956 -0.418 -0.295 -0.190 0.144
-0.922 -0.377 -0.253 -0.147 0.191
-0.355 0.196 0.321 0.429 0.771
-1.026 -0.469 -0.343 -0.233 0.113
0.051 0.614 0.742 0.853 1.203
-0.968 -0.398 -0.269 -0.157 0.197
-0.124 0.452 0.583 0.696 1.054
-1.519 -0.936 -0.804 -0.690 -0.328
-0.420 0.168 0.302 0.418 0.783
0.420 1.015 1.150 1.267 1.636
-0.396 0.206 0.343 0.460 0.834
-1.403 -0.795 -0.657 -0.538 -0.160
-2.298 -1.684 -1.544 -1.424 -1.042
-1.093 -0.472 -0.331 -0.210 0.176
-2.361 -1.734 -1.592 -1.469 -1.080
-1.224 -0.591 -0.447 -0.323 0.070
-0.792 -0.153 -0.007 0.118 0.515
-1.061 -0.415 -0.268 -0.142 0.259
-0.793 -0.141 0.007 0.135 0.540
-0.676 -0.018 0.132 0.261 0.670
-0.539 0.126 0.277 0.408 0.820
-1.358 -0.687 -0.535 -0.403 0.014
-0.665 0.013 0.167 0.300 0.720
-0.157 0.527 0.682 0.816 1.241
-1.527 -0.837 -0.680 -0.545 -0.117
-1.290 -0.594 -0.435 -0.299 0.134
-0.901 -0.199 -0.039 0.099 0.536
-1.373 -0.664 -0.503 -0.364 0.077
-0.705 0.011 0.173 0.314 0.758
-1.711 -0.990 -0.825 -0.684 -0.236
-1.129 -0.401 -0.235 -0.093 0.360
-1.772 -1.038 -0.871 -0.727 -0.271
-1.730 -0.989 -0.820 -0.675 -0.215
-0.998 -0.251 -0.081 0.066 0.530
-1.767 -1.014 -0.843 -0.695 -0.227
-1.594 -0.835 -0.662 -0.513 -0.041
-0.657 0.109 0.284 0.434 0.909
-2.100 -1.328 -1.152 -1.001 -0.521
-1.618 -0.839 -0.662 -0.510 -0.026
-0.965 -0.180 -0.001 0.152 0.640
-1.618 -0.827 -0.647 -0.492 -0.000
-1.272 -0.474 -0.292 -0.136 0.359
-1.912 -1.108 -0.925 -0.768 -0.268
-1.573 -0.763 -0.578 -0.419 0.084
-1.851 -1.035 -0.849 -0.689 -0.181
-2.247 -1.424 -1.237 -1.075 -0.564
-1.847 -1.017 -0.829 -0.666 -0.151
-0.775 0.061 0.251 0.415 0.934
-2.740 -1.898 -1.706 -1.541 -1.018
-1.557 -0.709 -0.515 -0.349 0.178
-1.542 -0.687 -0.493 -0.325 0.206
-2.007 -1.146 -0.950 -0.781 -0.247
-1.594 -0.727 -0.529 -0.359 0.179
-1.396 -0.522 -0.324 -0.152 0.390
-0.983 -0.103 0.097 0.270 0.816
-2.486 -1.600 -1.398 -1.224 -0.674
-1.913 -1.020 -0.817 -0.642 -0.088
-1.196 -0.298 -0.093 0.083 0.642
-1.894 -0.988 -0.782 -0.605 -0.043
-1.962 -1.050 -0.843 -0.664 -0.098
-1.624 -0.706 -0.497 -0.317 0.253
-1.621 -0.697 -0.487 -0.306 0.268
-2.077 -1.147 -0.935 -0.753 -0.175
-1.280 -0.343 -0.130 0.054 0.636
-2.824 -1.881 -1.666 -1.481 -0.895
-1.781 -0.831 -0.615 -0.429 0.161
-1.832 -0.876 -0.659 -0.472 0.122
-1.785 -0.823 -0.604 -0.415 0.182
-2.029 -1.060 -0.840 -0.650 -0.048
-2.154 -1.179 -0.957 -0.766 -0.160
-2.061 -1.080 -0.857 -0.664 -0.055
-1.922 -0.935 -0.710 -0.516 0.097
-2.250 -1.256 -1.030 -0.835 -0.218
-2.369 -1.369 -1.141 -0.945 -0.324
-2.265 -1.259 -1.030 -0.832 -0.207
-1.373 -0.360 -0.130 0.069 0.698
-2.280 -1.261 -1.029 -0.829 -0.196
-3.098 -2.073 -1.839 -1.638 -1.001
-2.027 -0.995 -0.760 -0.558 0.083
-1.459 -0.421 -0.185 0.019 0.664
-2.774 -1.729 -1.492 -1.287 -0.638
-2.481 -1.430 -1.191 -0.985 -0.332
-2.224 -1.166 -0.926 -0.719 -0.062
-2.587 -1.524 -1.282 -1.073 -0.412
-2.745 -1.676 -1.432 -1.222 -0.558
-1.896 -0.820 -0.575 -0.364 0.304
-2.125 -1.042 -0.796 -0.584 0.089
-2.713 -1.624 -1.376 -1.163 -0.487
-2.152 -1.057 -0.808 -0.593 0.087
-2.699 -1.598 -1.347 -1.131 -0.447
-2.883 -1.775 -1.523 -1.306 -0.617
-1.671 -0.557 -0.304 -0.085 0.607
-3.035 -1.914 -1.659 -1.439 -0.743
-2.694 -1.567 -1.311 -1.090 -0.390
-2.816 -1.683 -1.425 -1.203 -0.499
-2.507 -1.368 -1.109 -0.885 -0.177
-2.657 -1.511 -1.250 -1.026 -0.314
-2.816 -1.664 -1.402 -1.176 -0.460
-3.600 -2.442 -2.178 -1.951 -1.232
-2.637 -1.472 -1.207 -0.979 -0.255
-1.818 -0.647 -0.380 -0.151 0.577
-3.098 -1.921 -1.653 -1.422 -0.691
-2.861 -1.677 -1.408 -1.176 -0.441
-2.583 -1.393 -1.122 -0.889 -0.150
-2.697 -1.501 -1.229 -0.994 -0.251
-3.528 -2.326 -2.052 -1.816 -1.069
-3.123 -1.914 -1.639 -1.402 -0.651
-2.940 -1.725 -1.448 -1.210 -0.455
-3.474 -2.252 -1.974 -1.734 -0.976
-1.841 -0.613 -0.334 -0.093 0.670
-2.418 -1.183 -0.902 -0.660 0.106
-3.058 -1.818 -1.535 -1.292 -0.522
-2.643 -1.396 -1.112 -0.867 -0.093
-2.767 -1.514 -1.228 -0.983 -0.204
-3.635 -2.375 -2.089 -1.842 -1.059
x:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
4.652 0.714 0.071 [3.534, 6.102]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
3.526 4.309 4.556 4.843 6.102
sigma:
Mean SD MC Error 95% HPD interval
-------------------------------------------------------------------
0.000 0.000 0.000 [0.000, 0.000]
Posterior quantiles:
2.5 25 50 75 97.5
|--------------|==============|==============|--------------|
0.000 0.000 0.000 0.000 0.000
In [10]:
plt.figure(figsize=(7, 7))
plt.plot(x, y, 'x', label='data')
plt.plot(x, true_regression_line, label='true regression line', lw=3., c='y')
plt.title('Posterior predictive regression lines')
plt.legend(loc=0)
plt.xlabel('x')
plt.ylabel('y');
In [18]:
ppc = sample_ppc(trace, samples=500, model=model, size=100)
In [ ]:
Content source: balarsen/pymc_learning
Similar notebooks: