In [68]:
!date
In [69]:
import matplotlib.pyplot as plt
import numpy as np
import scipy
import spacepy.toolbox as tb
import spacepy.plot as spp
from pprint import pprint
import pymc
%matplotlib inline
I am looking to work out how to handle Bayesian estimation in a rate counting system.
In [70]:
samples = tb.logspace(1, 10000, 10)
In [71]:
fore = [np.random.binomial(100, 0.50, size=v) for v in samples]
print(tb.logspace(1, 1000, 10))
for v in fore[::-1]:
h = np.histogram(v, 20)
plt.hist(v, 20, label='{0}'.format(np.floor(len(v))))
plt.yscale('log')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.title('Foreground')
Out[71]:
In [72]:
back = [np.random.binomial(1e3, 0.01, size=v) for v in samples]
for v in back[::-1]:
h = np.histogram(v, 20)
plt.hist(v, 20, label='{0}'.format(np.floor(len(v))))
plt.yscale('log')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.title('Background')
Out[72]:
In [73]:
alpha = 10.0 # make the same as the cetner of the Background, no real reason
In [74]:
syn = []
for i in range(len(fore)):
syn.append(fore[i] + back[i] + alpha)
syn[5]
Out[74]:
Now it is time to create the Bayesian model that will estimate these parameters.
Matching the above we will have:
In [83]:
foreN = pymc.Uniform('foreN', lower=0, upper=1e6)
backN = pymc.Uniform('backN', lower=0, upper=1e6) # , observed=True, value=[1e3], plot=True)
foreP = pymc.Uniform('foreP', lower=0, upper=1) # , observed=True, value=[0.5, 0.4, 0.55])
backP = pymc.Uniform('backP', lower=0, upper=1)
foreB = pymc.Binomial('fore', n=foreN, p=foreP, observed=True, value=fore[-1])
backB = pymc.Binomial('back', n=backN, p=backP, observed=True, value=back[-1])
# @pymc.stochastic
# def alphaB(value=100):
# return value
M = pymc.MCMC([foreB, backB, foreN, backN, foreP, backP])
In [84]:
M.sample(iter=2e4, burn=1e3, thin=4)
pymc.Matplot.plot(M)
In [ ]:
In [85]:
pprint(M.stats())
In [ ]:
In [ ]: