This notebook presents how to perform maximum-likelihood parameter estimation for a single neuron.


In [1]:
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import random
%matplotlib inline

In [2]:
import sys
import os
sys.path.append(os.path.join(os.getcwd(),".."))
sys.path.append(os.path.join(os.getcwd(),"..","code"))
data_path = os.path.join(os.getcwd(),"..",'data')
sys.path.append(data_path)

In [3]:
import filters
import likelihood_functions as lk
import PoissonProcessClasses as PP
import auxiliary_functions as auxfun

In [4]:
# Reloading modules which are in development
import imp
imp.reload(filters)
imp.reload(auxfun)
imp.reload(lk)
imp.reload(PP)


Out[4]:
<module 'PoissonProcessClasses' from '/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py'>

Reading input-output data:


In [5]:
# reading stimulus
Stim = np.array(pd.read_csv(os.path.join(data_path,'Stim.csv'),header = None))
# reading location of spikes
tsp = np.hstack(np.array(pd.read_csv(os.path.join(data_path,'tsp.csv'),header = None)))

Extracting a spike train from spike positions:


In [6]:
dt = 0.01
tsp_int = np.ceil((tsp - dt*0.001)/dt)
tsp_int = np.reshape(tsp_int,(tsp_int.shape[0],1))
tsp_int = tsp_int.astype(int)
y = np.array([item in tsp_int for item in np.arange(Stim.shape[0]/dt)+1]).astype(int)

Displaying a subset of the spike train:


In [7]:
fig, ax = plt.subplots(figsize=(16, 2))
fig = ax.matshow(np.reshape(y[:1000],(1,len(y[:1000]))),cmap = 'Greys',aspect = 15)


Creating filters:


In [8]:
# create a stimulus filter
kpeaks = np.array([0,round(20/3)])
pars_k = {'neye':5,'n':5,'kpeaks':kpeaks,'b':3}
K,K_orth,kt_domain = filters.createStimulusBasis(pars_k, nkt = 20)

In [9]:
# create a post-spike filter
hpeaks = np.array([0.1,2])
pars_h = {'n':5,'hpeaks':hpeaks,'b':.4,'absref':0.}
H,H_orth,ht_domain = filters.createPostSpikeBasis(pars_h,dt)

In [10]:
# Interpolate Post Spike Filter
MSP = auxfun.makeInterpMatrix(len(ht_domain),1)
MSP[0,0] = 0
H_orth = np.dot(MSP,H_orth)


(2,)
(486, 486)

In [11]:
MSP.shape


Out[11]:
(486, 486)

Conditional Intensity (spike rate):

$$\lambda_{\beta} = \exp(K(\beta_k)*Stim + H(\beta_h)*y + dc)$$

($\beta_k$ and $\beta_h$ are the unknown coefficients of the filters and $dc$ is the direct current).

Since the convolution is a linear operation the intensity can be written in the following form:

$$\lambda_{\beta} = \exp(M_k \beta_k + M_h\beta_h + \textbf{1}dc),$$

where $M_k$ and $M_h$ are matrices depending on the stimulus and the response correspondingly and $\textbf{1}$ is a vector of ones.

Creating a matrix of covariates:


In [12]:
M_k = lk.construct_M_k(Stim,K,dt)

In [13]:
M_h = lk.construct_M_h(tsp,H_orth,dt,Stim)

Combining $M_k$, $M_h$ and $\textbf{1}$ into one covariate matrix:


In [14]:
M = np.hstack((M_k,M_h,np.ones((M_h.shape[0],1))))

The conditional intensity becomes: $$ \lambda_{\beta} = \exp(M\beta) $$

($\beta$ contains all the unknown parameters).

Create a Poisson process model with this intensity:


In [15]:
model = PP.PPModel(M.T,dt = dt/100)

Setting initial parameters for optimization:


In [16]:
coeff_k0 = np.array([-0.02304,
   0.12903,
   0.35945,
   0.39631,
   0.27189,
   0.22003,
  -0.17457,
   0.00482,
  -0.09811,
   0.04823])
coeff_h0 = np.zeros((5,))
dc0 = 0

pars0 = np.hstack((coeff_k0,coeff_h0,dc0))

In [17]:
# pars0 = np.hstack((np.zeros((10,)),np.ones((5,)),0))

Fitting the likelihood (here using Limited Memory BFGS method with 500 iterations):


In [18]:
res = model.fit(y,start_coef = pars0,maxiter = 500,method = 'L-BFGS-B')


-389.09828112
-1371.83034726
273841.02813
-1618.47383527
-1662.80305679
-1186.54105026
-1790.30108482
-1873.08211713
-1923.51526402
-1955.66643226
-1997.71741105
-2024.69276077
-2036.65395162
-2040.12838983
-2043.61678464
-2048.564233
-2054.77501312
-2071.33859401
-2108.09463066
-2161.68744978
-2250.96307752
-2322.86549294
-2389.22303221
-2366.66813707
-2403.26031904
-2417.66550327
-2428.8369058
-2448.1148988
-2479.28621294
-2528.90692195
-2513.75713527
-2544.03635611
-2554.98728472
-2557.40523849
-2558.69013435
-2560.37624644
-2559.74938385
-2560.80156666
-2561.26020304
-2561.56490122
-2561.80868238
-2562.39125944
-2563.43379348
-2564.72111833
-2566.22047037
-2567.44056538
-2569.21843472
-2572.01096182
-2569.49245482
-2573.92307356
-2577.62142258
-2581.42373597
-2586.48112564
-2592.27051534
-2595.53465805
-2598.88749007
-2599.96924845
-2601.08820727
-2601.9151204
-2603.38811719
-2604.26326276
-2605.27384205
-2607.52175411
-2600.29251498
-2608.77383571
-2611.64404424
-2613.63098266
-2614.86744094
-2614.27046995
-2615.03307361
-2615.12977634
-2615.18634065
-2615.21271649
-2615.25284471
-2615.32153403
-2615.24738687
-2615.34291158
-2615.37868378
-2615.3850172
-2615.39437624
-2615.39912995
-2615.41105186
-2615.42999683
-2615.47047602
-2615.54716836
-2615.68360147
-2616.02962119
-2616.50372241
-2616.74696196
-2617.1345526
-2617.35741616
-2617.52674804
-2617.63706731
-2617.67974719
-2617.69958192
-2617.74477354
-2617.8266086
-2617.93137041
-2618.03519873
-2618.13174762
-2618.09904421
-2618.16691567
-2618.16521982
-2618.18737833
-2618.192545
-2618.19418496
-2618.19633573
-2618.19770193
-2618.1989979
-2618.19957884
-2618.1999831
-2618.20172494
-2618.20504204
-2618.21230939
-2618.20500658
-2618.21626778
-2618.22535685
-2618.25711513
-2618.27189836
-2618.32433249
-2618.16650921
-2618.33127003
-2618.34779711
-2618.35543851
-2618.36224537
-2618.372859
-2618.37614925
-2618.3803103
-2618.38113176
-2618.3817042
-2618.38189437
-2618.38253864
-2618.38333631
-2618.38382868
-2618.38522571
-2618.3863625
-2618.38840839
-2618.39234502
-2618.38406095
-2618.39436267
-2618.39951636
-2618.40940435
-2618.42595069
-2618.45396841
-2618.48091476
-2618.50736653
-2618.51518245
-2618.52380461
-2618.52974247
-2618.53556075
-2618.53915497
-2618.54050964
-2618.54331391
-2618.54704669
-2618.56622383
-2618.59650541
-2618.63285328
-2618.68801996
-2618.72953626
-2618.77219945
-2618.81289563
-2618.26668239
-2618.81864101
-2618.83813755
-2618.84340253
-2618.85632546
-2618.86184696
-2618.87117135
-2618.88763013
-2618.936646
-2619.03419794
-2619.18593829
-2619.10819809
-2619.28786855
-2619.42554892
-2619.49528721
-2619.53733712
-2619.55801896
-2619.60102064
-2619.62806528
-2619.66581681
-2619.70692035
-2619.72484217
-2619.77983951
-2619.85511789
-2619.9356201
-2620.02559261
-2620.11568427
-2620.1707771
-2620.22173504
-2620.28088169
-2620.32773555
-2620.08336516
-2620.34909202
-2620.39059073
-2620.42396239
-2620.45538517
-2620.48880521
-2620.50774867
-2620.51180106
-2620.52143568
-2620.52617338
-2620.53530924
-2620.54594864
-2620.55328377
-2620.56453378
-2620.57274126
-2620.58577192
-2620.6253095
-2620.69425989
-2620.79343861
-2620.89379323
-2620.96424726
-2621.0024535
-2621.05971555
-2621.06879143
-2621.10584274
-2621.11723906
-2621.13140106
-2621.16701225
-2621.23720793
-2621.35493669
-2621.13055505
-2621.39587609
-2621.49640568
-2621.5336332
-2621.54358124
-2621.55058719
-2621.55598418
-2621.53842038
-2621.55686275
-2621.55761446
-2621.55948769
-2621.56739027
-2621.5776678
-2621.59439529
-2621.61722445
-2621.63116621
-2621.66249213
-2621.67638279
-2621.71897845
-2621.73219505
-2621.76701124
-2621.77552337
-2621.79418849
-2621.8113411
-2621.84398454
-2621.85384963
-2621.88257706
-2621.89681271
-2621.83515538
-2621.90160194
-2621.91562093
-2621.92616058
-2621.92980199
-2621.93680937
-2621.93988034
-2621.94537229
-2621.95306875
-2621.97336148
-2621.97410455
-2621.98522063
-2622.00710001
-2622.02091348
-2622.01388578
-2622.02422233
-2622.02712792
-2622.02972103
-2622.03084439
-2622.03178355
-2622.03233288
-2622.03356104
-2622.03452794
-2622.0351481
-2622.03604665
-2622.036639
-2622.03708357
-2622.03536989
-2622.03716942
-2622.03725866
-2622.03739677
-2622.03754687
-2622.03773367
-2622.03798067
-2622.03837264
-2622.03938836
-2622.04137866
-2622.04253069
-2622.04675524
-2622.04877933
-2622.04949346
-2622.05023348
-2622.05048273
-2622.05102755
-2622.05134342
-2622.05142937
-2622.05174419
-2622.05187411
-2622.05210511
-2622.05232347
-2622.05250985
-2622.05263497
-2622.05250963
-2622.05265652
-2622.05268328
-2622.05270386
-2622.05271994
-2622.05273435
-2622.05275573
-2622.05277223
-2622.05279581
-2622.05283113
-2622.05284012
-2622.0528463
-2622.05285312
-2622.05287052
-2622.0528879
-2622.05291567
-2622.05295025
-2622.05297319
-2622.0530216
-2622.05307388
-2622.05311644
-2622.05313639
-2622.05315588
-2622.05316203
-2622.05316836
-2622.05317807
-2622.05318078
/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py:169: FutureWarning: comparison to `None` will result in an elementwise object comparison in the future.
  if start_coef==None:
/Users/val/anaconda/lib/python3.4/site-packages/scipy/optimize/_minimize.py:362: RuntimeWarning: Method L-BFGS-B does not use Hessian information (hess).
  RuntimeWarning)
/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py:171: OptimizeWarning: Unknown solver options: maxfev
  res = optimize.minimize(self.negLogL,start_coef,jac = self.gradNegLogL,hess = self.hessNegLogL, args = y, options = opts, method = method)

Optimization results:


In [19]:
print(res)


     nit: 303
  status: 0
 message: b'CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH'
     fun: -2622.0531807765924
     jac: array([ 0.02454943, -0.02685258, -0.02488868, -0.00412308, -0.01858206,
        0.03924595,  0.16784614,  0.21752727,  0.13450727,  0.00195742,
       -0.01022792, -0.00494685, -0.0183314 ,  0.02328966,  0.01133687,
       -0.00944875])
    nfev: 329
       x: array([  4.37068505e-02,   2.57996778e-01,   8.88368183e-01,
         1.24323609e+00,   9.56581074e-01,   4.63212194e-01,
        -8.40953345e-01,  -2.09171826e-03,   7.39851459e-02,
        -1.23367823e-01,  -1.82937408e+01,   6.08852195e+01,
        -1.02165778e+02,  -3.89551012e+01,  -1.01257541e+01,
         2.91769697e+00])
 success: True

Creating the predicted filters:


In [20]:
k_coeff_predicted = res.x[:10]
h_coeff_predicted = res.x[10:15]

In [21]:
kfilter_predicted = np.dot(K,k_coeff_predicted)
hfilter_predicted = np.dot(H_orth,h_coeff_predicted)

In [22]:
k_coeff = np.array([ 0.061453,0.284916,0.860335,1.256983,0.910615,0.488660,-0.887091,0.097441,0.026607,-0.090147])
h_coeff = np.array([-15.18,38.24,-67.58,-14.06,-3.36])

kfilter_true = np.dot(K,k_coeff)
hfilter_true = np.dot(H_orth,h_coeff)

In [23]:
plt.plot(-kt_domain[::-1],kfilter_predicted,color = "r",label = 'predicted')
plt.hold(True)
plt.plot(-kt_domain[::-1],kfilter_true,color= "blue",label = 'true')
plt.hold(True)
plt.plot(-kt_domain[::-1],np.dot(K,coeff_k0),color = "g",label = 'initial')
plt.legend(loc = 'upper left')


Out[23]:
<matplotlib.legend.Legend at 0x10c10f0b8>

In [ ]:


In [24]:
plt.plot(ht_domain,hfilter_predicted,color = "r",label = 'predicted')
plt.hold(True)
plt.plot(ht_domain,hfilter_true,color = "b",label = 'true')
plt.hold(True)
plt.plot(ht_domain,np.dot(H_orth,coeff_h0),color = "g",label = 'initial')
plt.legend(loc = 'lower right')


Out[24]:
<matplotlib.legend.Legend at 0x10c115780>

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]: