In [1]:
from threeML import *
import matplotlib
%matplotlib inline
In [2]:
# Let's interrogate the 3FGL to get the sources in a radius of 20.0 deg around the Crab
lat_catalog = FermiLATSourceCatalog()
ra, dec, table = lat_catalog.search_around_source("Crab", radius=20.0)
table
Out[2]:
In [3]:
# This gets a 3ML model (a Model instance) from the table above, where every source
# in the 3FGL becomes a Source instance. Note that by default all parameters of all
# sources are fixed
model = lat_catalog.get_model()
# Let's free all the normalizations within 3 deg from the center
model.free_point_sources_within_radius(3.0, normalization_only=True)
model.display()
In [4]:
# but then let's fix the sync and the IC components of the Crab
# (cannot fit them with just one day of data)
# (these two methods are equivalent)
model['Crab_IC.spectrum.main.Powerlaw.K'].fix = True
model.Crab_synch.spectrum.main.Powerlaw.K.fix = True
# However, let's free the index of the Crab
model.PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.index.free = True
model.display()
In [5]:
# Download data from Jan 01 2010 to Jan 7 2010
tstart = '2010-01-01 00:00:00'
tstop = '2010-02-01 00:00:00'
# Note that this will understand if you already download these files, and will
# not do it twice unless you change your selection or the outdir
evfile, scfile = download_LAT_data(ra, dec, 20.0,
tstart, tstop, time_type='Gregorian',
destination_directory='Crab_data')
In [6]:
# Configuration for Fermipy
config = FermipyLike.get_basic_config(evfile=evfile, scfile=scfile, ra=ra, dec=dec)
# See what we just got
config.display()
In [7]:
# You can of course modify the configuration as a dictionary
config['selection']['emax'] = 300000.0
# and even add sections
config['gtlike'] = {'edisp': False}
config.display()
In [8]:
#Let's create an instance of the plugin
# Note that here no processing is made, because fermipy still doesn't know
# about the model you want to use
LAT = FermipyLike("LAT", config)
# The plugin modifies the configuration as needed to get the output files
# in a unique place, which will stay the same as long as your selection does not change
config.display()
In [9]:
data = DataList(LAT)
# Here is where the fermipy processing happens (the .setup method)
jl = JointLikelihood(model, data)
In [10]:
jl.set_minimizer("ROOT")
res = jl.fit()
In [11]:
#Now let's compute the errors on the best fit parameters
res = jl.get_errors()
In [12]:
#We might also want to look at the profile of the likelihood for
#each parameter.
res = jl.get_contours(model.PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.index,-2.2,-1.4,30)
In [14]:
res[-1]
Out[14]:
In [15]:
#Or we might want to produce a contour plot
%prun jl.get_contours('PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.K',0.3e-12,1e-12, 10, 'PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.index',-2.3,-1.3, 10)
In [12]:
res[-1]
Out[12]:
In [ ]:
# We can also do a bayesian analysis
# This will set priors based on the current defined min-max (log-uniform or uniform)
model.PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.K.set_uninformative_prior(Log_uniform_prior)
model.PSR_J0534p2200.spectrum.main.Cutoff_powerlaw.index.set_uninformative_prior(Uniform_prior)
model._3FGL_J0526d4p2247.spectrum.main.Powerlaw.K.set_uninformative_prior(Log_uniform_prior)
model._3FGL_J0544d7p2239.spectrum.main.Powerlaw.K.set_uninformative_prior(Log_uniform_prior)
In [15]:
bayes = BayesianAnalysis(model, data)
# Sample with emcee
# Note that n_samples is the number of samples *per walker*, so you will get n_samples * n_walers samples
# at the end
samples = bayes.sample(n_walkers=20,burn_in=100, n_samples=1000)
In [16]:
credible_intervals = bayes.get_credible_intervals()
In [17]:
# (red lines in the marginal distributions are the priors)
corner_figure = bayes.corner_plot()
In [ ]: