In [1]:
# PyABC imports
from pyabc import (ABCSMC, Distribution, RV,
History, MedianEpsilon)
from pyabc.populationstrategy import ConstantPopulationSize, AdaptivePopulationSize
from pyabc.epsilon import MedianEpsilon
from pyabc.sampler import MulticoreEvalParallelSampler
In [2]:
# Custom imports
from ionchannelABC import (ion_channel_sum_stats_calculator,
IonChannelAcceptor,
IonChannelDistance,
EfficientMultivariateNormalTransition,
calculate_parameter_sensitivity,
plot_parameter_sensitivity,
plot_regression_fit,
plot_parameters_kde,
plot_sim_results,
plot_distance_weights)
In [3]:
# Other necessary imports
import numpy as np
import subprocess
import pandas as pd
import io
import os
import tempfile
In [4]:
# Plotting imports
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib inline
%config Inline.Backend.figure_format = 'retina'
In [5]:
from channels.ikr_generic import ikr as model
test = model.sample({})
with pd.option_context('display.max_rows', None):
print(test)
In [6]:
measurements = model.get_experiment_data()
obs = measurements.to_dict()['y']
exp = measurements.to_dict()['exp']
errs = measurements.to_dict()['errs']
In [7]:
limits = dict(g_Kr=(0,10),
Vhalf_x=(-100,0),
k_x=(0,50),
c_bxf=(0,10),
c_axf=(0,1000),
c_bxs=(0,100),
c_axs=(0,2000),
Vmax_x=(-100,100),
sigma_x=(0,100),
Vhalf_A=(-100,100),
k_A=(-500,0),
Vhalf_r=(-100,0),
k_r=(-50,0),
c_br=(0,10),
c_ar=(0,100),
sigma_r=(0,100),
Vmax_r=(-100,100))
prior = Distribution(**{key: RV("uniform", a, b - a)
for key, (a,b) in limits.items()})
In [8]:
len(prior)
Out[8]:
In [9]:
test = model.sample(prior.rvs())
with pd.option_context('display.max_rows', None):
print(test)
In [11]:
parameters = ['ikr.'+k for k in limits.keys()]
In [12]:
distance_fn=IonChannelDistance(
obs=obs,
exp_map=exp,
err_bars=errs,
err_th=0.1)
In [13]:
sns.set_context('talk')
g = plot_distance_weights(model, distance_fn)
In [12]:
g.savefig('results/ikr-generic/dist_weights.pdf')
In [13]:
fitted, regression_fit, r2 = calculate_parameter_sensitivity(
model,
parameters,
distance_fn,
sigma=0.05,
n_samples=1000)
In [14]:
sns.set_context('talk')
grid1 = plot_parameter_sensitivity(fitted, plot_cutoff=0.05)
In [15]:
grid2 = plot_regression_fit(regression_fit, r2)
In [14]:
grid1.savefig('results/ikr-generic/sensitivity.pdf')
grid2.savefig('results/ikr-generic/sensitivity_fit.pdf')
In [19]:
# Finding insensitive parameters
cutoff = 0.05
fitted_pivot = fitted.pivot(index='param',columns='exp')
insensitive_params = fitted_pivot[(abs(fitted_pivot['beta'][0])<cutoff) & (abs(fitted_pivot['beta'][1])<cutoff) &
(abs(fitted_pivot['beta'][2])<cutoff) & (abs(fitted_pivot['beta'][3])<cutoff) &
(abs(fitted_pivot['beta'][4])<cutoff)].index.values
In [20]:
insensitive_limits = dict((k, limits[k[4:]]) for k in insensitive_params)
insensitive_prior = Distribution(**{key: RV("uniform", a, b - a)
for key, (a,b) in insensitive_limits.items()})
In [21]:
# Generate random samples for insensitive parameters
def generate_sample(insensitive_prior, n):
samples = [dict() for i in range(n)]
for i in range(n):
parameters = insensitive_prior.rvs()
sample = {key: value for key, value in parameters.items()}
samples[i].update(sample)
return samples
In [22]:
samples = generate_sample(insensitive_prior, 1000)
In [23]:
model.add_external_par_samples(samples)
In [24]:
limits = dict((k, limits[k]) for k in limits if k[4:] not in insensitive_params)
In [9]:
prior = Distribution(**{key: RV("uniform", a, b - a)
for key, (a,b) in limits.items()})
In [10]:
db_path = ('sqlite:///' +
os.path.join(tempfile.gettempdir(), "hl-1_ikr-generic.db"))
print(db_path)
In [11]:
# Let's log all the sh!t
import logging
logging.basicConfig()
abc_logger = logging.getLogger('ABC')
abc_logger.setLevel(logging.DEBUG)
eps_logger = logging.getLogger('Epsilon')
eps_logger.setLevel(logging.DEBUG)
cv_logger = logging.getLogger('CV Estimation')
cv_logger.setLevel(logging.DEBUG)
In [12]:
from pyabc.populationstrategy import ConstantPopulationSize
In [13]:
abc = ABCSMC(models=model,
parameter_priors=prior,
distance_function=IonChannelDistance(
obs=obs,
exp_map=exp,
err_bars=errs,
err_th=0.1),
population_size=ConstantPopulationSize(5000),
#population_size=AdaptivePopulationSize(
# start_nr_particles=1000,
# mean_cv=0.2,
# max_population_size=1000,
# min_population_size=100),
summary_statistics=ion_channel_sum_stats_calculator,
transitions=EfficientMultivariateNormalTransition(),
eps=MedianEpsilon(),
sampler=MulticoreEvalParallelSampler(n_procs=12),
acceptor=IonChannelAcceptor())
In [14]:
abc_id = abc.new(db_path, obs)
In [ ]:
history = abc.run(minimum_epsilon=0.05, max_nr_populations=30, min_acceptance_rate=0.01)
In [ ]:
history = abc.run(minimum_epsilon=0.05, max_nr_populations=30, min_acceptance_rate=0.001)
In [16]:
#db_path = 'sqlite:////scratch/cph211/ion-channel-ABC/docs/examples/results/ikr-generic/hl-1_ikr-generic.db'
db_path = 'sqlite:////scratch/cph211/tmp/hl-1_ikr-generic.db'
history = History(db_path)
history.all_runs()
Out[16]:
In [17]:
history.id = 6
In [18]:
sns.set_context('talk')
evolution = history.get_all_populations()
grid = sns.relplot(x='t', y='epsilon', size='samples', data=evolution[evolution.t>=0])
#grid.savefig('results/ikr-generic/eps_evolution.pdf')
In [33]:
df, w = history.get_distribution(m=0)
In [34]:
df.describe()
Out[34]:
In [35]:
g = plot_parameters_kde(df, w, limits, aspect=12, height=0.6)
In [52]:
g.savefig('results/ikr-generic/parameters_kde.pdf')
In [36]:
# Generate parameter samples
n_samples = 100
df, w = history.get_distribution(m=0)
th_samples = df.sample(n=n_samples, weights=w, replace=True).to_dict(orient='records')
In [37]:
# Generate sim results samples
samples = pd.DataFrame({})
for i, th in enumerate(th_samples):
output = model.sample(pars=th, n_x=20)
output['sample'] = i
output['distribution'] = 'post'
samples = samples.append(output, ignore_index=True)
In [38]:
from ionchannelABC import plot_sim_results
sns.set_context('talk')
g = plot_sim_results(samples, obs=measurements)
# Set axis labels
xlabels = ["voltage, mV", "voltage, mV", "voltage, mV", "voltage, mV",
"voltage, mV", "voltage, mV"]
ylabels = ["current density, pA/pF", "activation", "activation time constant, ms",
"inactivation time constant, ms", "inactivation"]
for ax, xl in zip(g.axes.flatten(), xlabels):
ax.set_xlabel(xl)
for ax, yl in zip(g.axes.flatten(), ylabels):
ax.set_ylabel(yl)
In [39]:
g.savefig('results/ikr-generic/ikr_sim_results.pdf')
In [84]:
def plot_sim_results_all(samples: pd.DataFrame):
with sns.color_palette("gray"):
grid = sns.relplot(x='x', y='y',
col='exp',
units='sample',
kind='line',
data=samples,
estimator=None, lw=0.5,
alpha=0.5,
#estimator=np.median,
facet_kws={'sharex': 'col',
'sharey': 'col'})
return grid
In [85]:
grid2 = plot_sim_results_all(samples)
In [42]:
grid2.savefig('results/ikr-generic/ikr_sim_results-all.pdf')
In [40]:
# Activation fit to Boltzmann equation
from scipy.optimize import curve_fit
grouped = samples[samples['exp']==1].groupby('sample')
def fit_boltzmann(group):
def boltzmann(V, Vhalf, K):
return 1/(1+np.exp((Vhalf-V)/K))
guess = (-30, 10)
popt, _ = curve_fit(boltzmann, group.x, group.y)
return popt
output = grouped.apply(fit_boltzmann).apply(pd.Series)
In [41]:
import scipy.stats as st
Vhalf = output[0].tolist()
rv = st.rv_discrete(values=(Vhalf, [1/len(Vhalf),]*len(Vhalf)))
print("median: {}".format(rv.median()))
print("95% CI: {}".format(rv.interval(0.95)))
In [42]:
slope = output[1].tolist()
rv = st.rv_discrete(values=(slope, [1/len(slope),]*len(slope)))
print("median: {}".format(rv.median()))
print("95% CI: {}".format(rv.interval(0.95)))
In [43]:
# Inactivation fit to Boltzmann equation
from scipy.optimize import curve_fit
grouped = samples[samples['exp']==2].groupby('sample')
def fit_boltzmann(group):
def boltzmann(V, Vhalf, K):
return 1-1/(1+np.exp((Vhalf-V)/K))
guess = (-40, 30)
popt, _ = curve_fit(boltzmann, group.x, group.y)
return popt
output = grouped.apply(fit_boltzmann).apply(pd.Series)
In [44]:
Vhalf = output[0].tolist()
rv = st.rv_discrete(values=(Vhalf, [1/len(Vhalf),]*len(Vhalf)))
print("median: {}".format(rv.median()))
print("95% CI: {}".format(rv.interval(0.95)))
In [45]:
slope = output[1].tolist()
rv = st.rv_discrete(values=(slope, [1/len(slope),]*len(slope)))
print("median: {}".format(rv.median()))
print("95% CI: {}".format(rv.interval(0.95)))