In [1]:
import matplotlib
%matplotlib inline
In [2]:
from threeML import *
In [3]:
get_available_plugins()
In [4]:
#The following definitions are for convenience
triggerName = 'bn090217206'
ra = 204.9
dec = -8.4
#Data are in the current directory
datadir = os.path.abspath('.')
In [5]:
#LAT data are under ./data.
#These files have been downloaded from the LAT public data server,
#and have been prepared for the analysis with the official Fermi software.
#In the future, both these operations will be handled by the LAT plugin
eventFile = os.path.join( datadir, 'lat', 'gll_ft1_tr_bn090217206_v00_filt.fit' )
ft2File = os.path.join( datadir, 'lat', 'gll_ft2_tr_bn090217206_v00.fit' )
#The following files have been prepared with LAT standard software. In the future,
#it will be possible to produce them directly using the plugin
expomap = os.path.join( datadir, 'lat', 'gll_ft1_tr_bn090217206_v00_filt_expomap.fit' )
ltcube = os.path.join( datadir, 'lat', 'gll_ft1_tr_bn090217206_v00_filt_ltcube.fit' )
#Let's create an instance of the plugin, if it is available
if is_plugin_available("FermiLATLike"):
LAT = FermiLATLike( "LAT", eventFile, ft2File, ltcube, 'unbinned', expomap )
else:
print("Plugin for Fermi/LAT is not available")
In [6]:
2.3#The .pha, .bak and .rsp files have been prepared with the Fermi
#official software. In the future it will be possible to create
#them directly from the plugin
#Create an instance of the GBM plugin for each detector
#Data files
obsSpectrum = os.path.join( datadir, "bn090217206_n6_srcspectra.pha{1}" )
bakSpectrum = os.path.join( datadir, "bn090217206_n6_bkgspectra.bak{1}" )
rspFile = os.path.join( datadir, "bn090217206_n6_weightedrsp.rsp{1}" )
#Plugin instance
NaI6 = FermiGBMLike( "NaI6", obsSpectrum, bakSpectrum, rspFile )
#Choose energies to use (in this case, I exclude the energy
#range from 30 to 40 keV to avoid the k-edge, as well as anything above
#950 keV, where the calibration is uncertain)
NaI6.setActiveMeasurements( "10.0-30.0", "40.0-950.0" )
#Now repeat for the other GBM detectors
obsSpectrum = os.path.join( datadir, "bn090217206_n9_srcspectra.pha{1}" )
bakSpectrum = os.path.join( datadir, "bn090217206_n9_bkgspectra.bak{1}" )
rspFile = os.path.join( datadir, "bn090217206_n9_weightedrsp.rsp{1}" )
#Plugin instance
NaI9 = FermiGBMLike( "NaI9", obsSpectrum, bakSpectrum, rspFile )
#Choose chanels to use
NaI9.setActiveMeasurements( "10.0-30.0", "40.0-950.0" )
obsSpectrum = os.path.join( datadir, "bn090217206_b1_srcspectra.pha{1}" )
bakSpectrum = os.path.join( datadir, "bn090217206_b1_bkgspectra.bak{1}" )
rspFile = os.path.join( datadir, "bn090217206_b1_weightedrsp.rsp{1}" )
#Plugin instance
BGO1 = FermiGBMLike( "BGO1", obsSpectrum, bakSpectrum, rspFile )
#Choose chanels to use (in this case, from 200 keV to 10 MeV)
BGO1.setActiveMeasurements( "200-10000" )
In [7]:
#This declares which data we want to use. In our case, all that we have already created.
data_list = DataList( NaI6, NaI9, BGO1, LAT )
In [8]:
#Let's use a Band model, a phenomenological model typically used for GRBs
band = Band()
#Let's have a look at what we just created
print(band)
In [9]:
#We can modify the initial values for the parameters,
#as well as their bounds and the delta,
#like this:
band.alpha = -0.8
band.alpha.setBounds(-2,2)
band.alpha.setDelta(0.08)
#We could also use this:
# band.alpha.fix()
#to fix a parameter
#Let's verify that the changes have been applied
print(band)
In [10]:
#The GRB is a point source. Let's create its model. We will use triggerName as
#its name, and the position declared at the beginning, as well as the band
#model we just modified as its spectrum
GRB = PointSource( triggerName, ra, dec, band )
#Let's have a look at what we just created
print(GRB)
Now define the likelihood model. The likelihood model can have as many sources as you need to model your region of interest. In this case we use only one source (the GRB). It is important to note that the background for the different instruments is handled by the respective plugins, and must therefore not be included here.
In [11]:
model = LikelihoodModel( GRB )
#We could define as many sources (pointlike or extended) as we need, and
#add them to the model as:
# model = LikelihoodModel ( GRB, OtherSource, OtherSource2, etc ...)
In [9]:
#This will create the object which will allow to fit
#the model.
#We need to pass in the model we want to fit, as well as the
#data we want to use in the fit (through the datalist created
#before)
jl = JointLikelihood( model, data_list )
#During initialization, you might see
#messages from the plugins while they set up their
#interpretation of the model
In [13]:
#As easy as it gets!
res = jl.fit(pre_fit=True)
In [14]:
#Now let's compute the errors on the best fit parameters
res = jl.get_errors()
In [15]:
#We might also want to look at the profile of the likelihood for
#each parameter.
res = jl.get_contours('bn090217206','alpha',-0.9,-0.7,20)
In [16]:
#Or we might want to produce a contour plot
res = jl.get_contours('bn090217206','alpha',-0.9,-0.7,20,'bn090217206','beta',-3.0,-2.4,20)
In [17]:
print(GRB.spectralModel)
In [18]:
bayes = BayesianAnalysis(model, data_list)
In [25]:
# Note that n_samples is the number of samples *per walker*, so you will get n_samples * n_walers samples
# at the end
samples = bayes.sample(n_walkers=20,burn_in=100, n_samples=1000)
In [26]:
credible_intervals = bayes.get_credible_intervals()
The dictionary can be used to access the numbers for later use:
In [21]:
# Get the lower bound, upper bound of the credible interval for alpha and the median
alpha_lower_bound = credible_intervals['bn090217206']['alpha']['lower bound']
alpha_upper_bound = credible_intervals['bn090217206']['alpha']['upper bound']
alpha_median = credible_intervals['bn090217206']['alpha']['median']
print("Credible interval for alpha: %s - %s" % (alpha_lower_bound, alpha_upper_bound))
print("Median for alpha: %s" % alpha_median)
You can also access the samples by using a dictionary access, such as:
In [22]:
alpha_samples = bayes.samples['bn090217206']['alpha']
or get all the samples as a matrix, useful if you want to use third-party software for futher processing:
In [23]:
my_samples = bayes.raw_samples
print(my_samples.shape)
We can also easily produce a triangle plot which show all the monodimensional and bidimensional marginal distribution, with the latter being the equivalent of countour plots in frequentist analysis:
In [24]:
# (red lines in the marginal distributions are the priors)
corner_figure = bayes.corner_plot()