Here we verify that the algorithm used for dead time filtering is behaving as expected.
We also compare the results with the algorithm for paralyzable dead time, for reference.
In [1]:
%load_ext autoreload
%autoreload 2
%matplotlib notebook
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib.gridspec import GridSpec
import matplotlib as mpl
sns.set_context('talk')
sns.set_style("whitegrid")
sns.set_palette("colorblind")
mpl.rcParams['font.size'] = 18.0
mpl.rcParams['xtick.labelsize'] = 18.0
mpl.rcParams['ytick.labelsize'] = 18.0
mpl.rcParams['axes.labelsize'] = 18.0
mpl.rcParams['axes.labelsize'] = 18.0
from hendrics.fake import filter_for_deadtime
import numpy as np
np.random.seed(1209432)
In [2]:
def simulate_events(rate, length, deadtime=2.5e-3, **filter_kwargs):
events = np.random.uniform(0, length, np.int(rate * length))
events = np.sort(events)
events_dt = filter_for_deadtime(events, deadtime, **filter_kwargs)
return events, events_dt
In [3]:
rate = 1000
length = 1000
events, events_dt = simulate_events(rate, length)
diff = np.diff(events)
diff_dt = np.diff(events_dt)
In [4]:
dt = 2.5e-3/20 # an exact fraction of deadtime
bins = np.arange(0, np.max(diff), dt)
hist = np.histogram(diff, bins=bins, density=True)[0]
hist_dt = np.histogram(diff_dt, bins=bins, density=True)[0]
bins_mean = bins[:-1] + dt/2
plt.figure()
plt.title('Non-Paralyzable dead time')
plt.fill_between(bins_mean, 0, hist, alpha=0.5, label='No dead time');
plt.fill_between(bins_mean, 0, hist_dt, alpha=0.5, label='With dead time');
plt.xlim([0, 0.02]);
# plt.ylim([0, 100]);
plt.axvline(2.5e-3, color='r', ls='--')
plt.xlabel(r'Time between subsequent photons $T_{i+1} - T_{i}$')
plt.ylabel('Probability density')
plt.legend();
Exactly as expected, the output distribution of the distance between the events follows an exponential distribution cut at 2.5 ms.
The measured rate is expected to go as $$r_{det} = \frac{r_{in}}{1 + r_{in}\tau_d}$$ (Zhang+95, eq. 29). Let's check it.
In [5]:
plt.figure()
plt.title('Non-Paralyzable dead time - input rate {} ct/s'.format(rate))
deadtimes = np.arange(0, 0.015, 0.0005)
deadtimes_plot = np.arange(0, 0.015, 0.0001)
for d in deadtimes:
events_dt = filter_for_deadtime(events, d)
new_rate = len(events_dt) / length
plt.scatter(d, new_rate, color='b')
plt.plot(deadtimes_plot, rate / (1 + rate * deadtimes_plot),
label=r'$\frac{r_{in}}{1 + r_{in}\tau_d}$')
plt.xlim([0, None])
plt.xlabel('Dead time')
plt.ylabel('Output rate')
plt.semilogy()
plt.legend();
In [6]:
rate = 1000
length = 1000
events, events_dt = simulate_events(rate, length, paralyzable=True)
diff = np.diff(events)
diff_dt = np.diff(events_dt)
In [7]:
dt = 2.5e-3/20 # an exact fraction of deadtime
bins = np.arange(0, np.max(diff_dt), dt)
hist = np.histogram(diff, bins=bins, density=True)[0]
hist_dt = np.histogram(diff_dt, bins=bins, density=True)[0]
bins_mean = bins[:-1] + dt/2
plt.figure()
plt.title('Paralyzable dead time')
plt.fill_between(bins_mean, 0, hist, alpha=0.5, label='No dead time');
plt.fill_between(bins_mean, 0, hist_dt, alpha=0.5, label='With dead time');
plt.xlim([0, 0.02]);
# plt.ylim([0, 100]);
plt.axvline(2.5e-3, color='r', ls='--')
plt.xlabel(r'Time between subsequent photons $T_{i+1} - T_{i}$')
plt.ylabel('Probability density')
plt.legend();
Non-paralyzable dead time has a distribution for the time between consecutive counts that plateaus between $\tau_d$ and $2\tau_d$, then decreases. The exact form is complicated (e.g. )
The measured rate is expected to go as $$r_{det} = r_{in}e^{-r_{in}\tau_d}$$ (Zhang+95, eq. 16). Let's check it.
In [8]:
plt.figure()
plt.title('Paralyzable dead time - input rate {} ct/s'.format(rate))
deadtimes = np.arange(0, 0.008, 0.0005)
deadtimes_plot = np.arange(0, 0.008, 0.0001)
for d in deadtimes:
events_dt = filter_for_deadtime(events, d, paralyzable=True)
new_rate = len(events_dt) / length
plt.scatter(d, new_rate, color='b')
plt.plot(deadtimes_plot, rate * np.exp(-rate * deadtimes_plot),
label=r'$r_{in}e^{-r_{in}\tau_d}$')
plt.xlim([0, None])
plt.xlabel('Dead time')
plt.ylabel('Output rate')
plt.semilogy()
plt.legend();
Perfect.
In [9]:
from stingray.lightcurve import Lightcurve
from stingray.powerspectrum import AveragedPowerspectrum
import tqdm
import deadtime_model_zhang as dz
nevents = 200000
rates = np.logspace(2, np.log10(3000), 6)
bintime = 0.001
deadtime = 2.5e-3
plt.figure()
plt.title(f'bin time = 1 ms; dead time = 2.5 ms')
for r in tqdm.tqdm(rates):
label = f'{r} ct/s'
length = nevents / r
events, events_dt = simulate_events(r, length)
# lc = Lightcurve.make_lightcurve(events, 1/4096, tstart=0, tseg=length)
lc_dt = Lightcurve.make_lightcurve(events_dt, bintime, tstart=0, tseg=length)
pds = AveragedPowerspectrum(lc_dt, 2, norm='leahy')
plt.plot(pds.freq, pds.power, label=label)
zh_f, zh_p = dz.pds_model_zhang(1000, r, deadtime, bintime)
plt.plot(zh_f, zh_p, color='b')
plt.plot(zh_f, zh_p, color='b', label='Zhang+95 prediction')
plt.axhline(2, ls='--')
plt.xlabel('Frequency (Hz)')
plt.ylabel('Power (Leahy)')
plt.legend();
In [16]:
from stingray.lightcurve import Lightcurve
from stingray.powerspectrum import AveragedPowerspectrum
import tqdm
import deadtime_model_zhang as dz
nevents = 200000
rates = np.logspace(2, 3, 5)
deadtime = 2.5e-3
bintime = 2 * deadtime
plt.figure()
plt.title(f'bin time = 5 ms; dead time = 2.5 ms')
for r in tqdm.tqdm(rates):
label = f'{r} ct/s'
length = nevents / r
events, events_dt = simulate_events(r, length)
# lc = Lightcurve.make_lightcurve(events, 1/4096, tstart=0, tseg=length)
lc_dt = Lightcurve.make_lightcurve(events_dt, bintime, tstart=0, tseg=length)
pds = AveragedPowerspectrum(lc_dt, 2, norm='leahy')
plt.plot(pds.freq, pds.power, label=label)
zh_f, zh_p = dz.pds_model_zhang(2000, r, deadtime, bintime)
plt.plot(zh_f, zh_p, color='b')
plt.plot(zh_f, zh_p, color='b', label='Zhang+95 prediction')
plt.axhline(2, ls='--')
plt.xlabel('Frequency (Hz)')
plt.ylabel('Power (Leahy)')
plt.legend();
In [17]:
from stingray.lightcurve import Lightcurve
from stingray.powerspectrum import AveragedPowerspectrum
import tqdm
import deadtime_model_zhang as dz
bintime = 1e-6
deadtime = 1e-5
length = 40
fftlen = 0.01
plt.figure()
plt.title(f'bin time = 1 us; dead time = 10 us')
r = 20000
label = f'{r} ct/s'
events, events_dt = simulate_events(r, length, deadtime=deadtime)
# lc = Lightcurve.make_lightcurve(events, 1/4096, tstart=0, tseg=length)
lc_dt = Lightcurve.make_lightcurve(events_dt, bintime, tstart=0, tseg=length)
pds = AveragedPowerspectrum(lc_dt, fftlen, norm='leahy')
plt.plot(pds.freq / 1000, pds.power, label=label, drawstyle='steps-mid')
zh_f, zh_p = dz.pds_model_zhang(2000, r, deadtime, bintime)
plt.plot(zh_f / 1000, zh_p, color='r', label='Zhang+95 prediction', zorder=10)
plt.axhline(2, ls='--')
plt.xlabel('Frequency (kHz)')
plt.ylabel('Power (Leahy)')
plt.legend();
Ok.
An additional note on the Zhang model: it is a numerical model, with multiple nested summations that are prone to numerical errors. The assumptions made in the Zhang paper (along the line of "in practice the number of terms needed is very small…") are assuming the case of RXTE, where 1/dead time was low with respect to the incident rate. This is true in the simulation in figure 4 of Zhang+95: 20,000 ct/s incident rate, 1/dead time = 100,000. However, this is not true in NuSTAR, depicted in our simulation below where the incident rate (2,000) is much larger than 1/dead time (400). A thorough estimate of the needed level of detail (that implies increasing the number of summed terms) versus increase of numerical errors has to be done. This is a quite long procedure, and I did not go into so much detail. This is the reason of the “wiggles” that can be seen in the model in red in the plot below.
In [25]:
bintime = 1/4096
deadtime = 2.5e-3
length = 8000
fftlen = 5
r = 2000
plt.figure()
plt.title(f'bin time = {bintime} s; dead time = {deadtime} s')
label = f'{r} ct/s'
events, events_dt = simulate_events(r, length, deadtime=deadtime)
# lc = Lightcurve.make_lightcurve(events, 1/4096, tstart=0, tseg=length)
lc_dt = Lightcurve.make_lightcurve(events_dt, bintime, tstart=0, tseg=length)
pds = AveragedPowerspectrum(lc_dt, fftlen, norm='leahy')
plt.plot(pds.freq / 1000, pds.power, label=label, drawstyle='steps-mid')
zh_f, zh_p = dz.pds_model_zhang(1000, r, deadtime, bintime)
plt.plot(zh_f / 1000, zh_p, color='r', label='Zhang+95 prediction', zorder=10)
plt.axhline(2, ls='--')
plt.xlabel('Frequency (kHz)')
plt.ylabel('Power (Leahy)')
plt.legend();
The script check_A checks visually the number of ks to calculate before going to the approximate value r0**2*tb**2. The default is 60, but in this case the presence of additional modulation for k=60 tells us that we need to increase the limit of calculated A_k to at least 150.
In [28]:
from deadtime_model_zhang import A, check_A
def safe_A(k, r0, td, tb, tau, limit=60):
if k > limit:
return r0 ** 2 * tb**2
return A(k, r0, td, tb, tau)
check_A(r, deadtime, bintime, max_k=200)
So, we had better repeat the procedure by using limit_k=150 this time.
In [29]:
bintime = 1/4096
deadtime = 2.5e-3
length = 8000
fftlen = 5
r = 2000
plt.figure()
plt.title(f'bin time = {bintime} s; dead time = {deadtime} s')
label = f'{r} ct/s'
events, events_dt = simulate_events(r, length, deadtime=deadtime)
# lc = Lightcurve.make_lightcurve(events, 1/4096, tstart=0, tseg=length)
lc_dt = Lightcurve.make_lightcurve(events_dt, bintime, tstart=0, tseg=length)
pds = AveragedPowerspectrum(lc_dt, fftlen, norm='leahy')
plt.plot(pds.freq / 1000, pds.power, label=label, drawstyle='steps-mid')
zh_f, zh_p = dz.pds_model_zhang(1000, r, deadtime, bintime, limit_k=150)
plt.plot(zh_f / 1000, zh_p, color='r', label='Zhang+95 prediction', zorder=10)
plt.axhline(2, ls='--')
plt.xlabel('Frequency (kHz)')
plt.ylabel('Power (Leahy)')
plt.legend();
In [ ]: