This notebook presents how to perform maximum-likelihood parameter estimation for multiple neurons.

For testing the concept we use 2 neurons for which we have stored the responses to a given stimulus.


In [1]:
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import random
import csv
%matplotlib inline

In [2]:
import sys
import os
sys.path.append(os.path.join(os.getcwd(),".."))
sys.path.append(os.path.join(os.getcwd(),"..","code"))

In [3]:
import filters
import likelihood_functions as lk
import PoissonProcessClasses as PP
import auxiliary_functions as auxfun

In [4]:
import imp
imp.reload(filters)
imp.reload(lk)
imp.reload(auxfun)
imp.reload(PP)


Out[4]:
<module 'PoissonProcessClasses' from '/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py'>

In [5]:
# Number of neurons
nofCells = 2

Reading input-output data:


In [6]:
# creating the path to the data
data_path = os.path.join(os.getcwd(),'..','data')

# reading stimulus
Stim = np.array(pd.read_csv(os.path.join(data_path,'Stim2.csv'),header = None))

# reading location of spikes
# (lengths of tsp sequences are not equal so reading them line by line)
tsp_list = []
with open(os.path.join(data_path,'tsp2.csv')) as csvfile:
    tspreader = csv.reader(csvfile)
    for row in tspreader:
        tsp_list.append(row)

Extracting a spike train from spike positions:


In [ ]:


In [7]:
dt = 0.01
y_list = []
for tsp in tsp_list:
    tsp = np.array(tsp).astype(np.float)
    tsp_int = np.ceil((tsp - dt*0.001)/dt)
    tsp_int = np.reshape(tsp_int,(tsp_int.shape[0],1))
    tsp_int = tsp_int.astype(int)
    y_list.append(np.array([item in tsp_int for item in np.arange(Stim.shape[0]/dt)+1]).astype(int))

In [ ]:

Creating filters:


In [8]:
# create a stimulus filter
kpeaks = np.array([0,round(20/3)])
pars_k = {'neye':5,'n':5,'kpeaks':kpeaks,'b':3}
K,K_orth,kt_domain = filters.createStimulusBasis(pars_k, nkt = 20)

In [9]:
# create a post-spike filter
hpeaks = np.array([0.1,2])
pars_h = {'n':5,'hpeaks':hpeaks,'b':.4}
H,H_orth,ht_domain = filters.createPostSpikeBasis(pars_h,dt)

In [10]:
# Interpolate Post Spike Filter
MSP = auxfun.makeInterpMatrix(len(ht_domain),1)
MSP[0,0] = 0
H_orth = np.dot(MSP,H_orth)


(2,)
(486, 486)

Conditional Intensity (spike rate):

$$\lambda_{\beta}(i) = \exp(K(\beta_k)*Stim + H(\beta_h)*y_i + \sum_{j\ne i}I({\beta_{I}}_j)*y_j + dc)$$

$I(\beta_{I_j})$ corresponds to an interpolating filter with basis $I$ and coefficients $\beta_{I_j}$ associated with the $y_j$ output. It is to assume that the post-spike history basis and the interpolating bases are the same: $H = I$. The intensity can then be rewritten as

$$\lambda_{\beta}(i) = \exp\left(M_k \beta_k + \sum_{j=1}^n M_h {\beta_h}_j+\textbf{1}dc\right)$$

Creating a matrix of covariates:


In [11]:
M_k = lk.construct_M_k(Stim,K,dt)

In [12]:
# creating a H-matrix for each response and merging them in one covariate matrix M_h
M_h_list = []
for tsp in tsp_list:
    tsp = np.array(tsp).astype(np.float)
    M_h_list.append(lk.construct_M_h(tsp,H_orth,dt,Stim))
    

#M_h_list[1] = np.zeros(M_h_list[0].shape)
M_h = np.hstack(tuple(M_h_list))

In [13]:
# combining all covariate matrices
M = np.hstack((M_k,M_h,np.ones((M_h.shape[0],1))))

Conditional intensity as a function of the covariates: $$ \lambda_{\beta} = \exp(M\beta) $$

Create a Poisson process model with this intensity:


In [14]:
model = PP.PPModel(M.T,dt = dt/100)

Setting initial parameters:


In [15]:
coeff_k0 = np.array([ 0.061453,0.284916,0.860335,1.256983,0.910615,0.488660,-0.887091,0.097441,0.026607,-0.090147])

coeff_h0 = np.zeros((5*nofCells,))

dc0 = 3

pars0 = np.hstack((coeff_k0,coeff_h0,dc0))

In [16]:
print(pars0)
pars0 = np.array([0.15587,
    0.52019,
    1.09108,
    1.42535,
    0.83192,
    0.37496,
   -1.35405,
    0.45916,
   -0.01016,
   -0.03577,
  -15.18000,
   38.24000,
  -67.58000,
  -14.06000,
   -3.36000,
    0.00000,
    0.00000,
    0.00000,
    0.00000,
    0.00000, 3])
pars0[15:] =  np.zeros((6,))


pars0 = np.zeros((21,))
#pars0 = np.hstack((np.ones((20,)),np.ones((1,))))
print(pars0)


[ 0.061453  0.284916  0.860335  1.256983  0.910615  0.48866  -0.887091
  0.097441  0.026607 -0.090147  0.        0.        0.        0.        0.
  0.        0.        0.        0.        0.        3.      ]
[ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.]

In [ ]:

Fitting the likelihood for each neuron:


In [17]:
res_list = []
for y in y_list:
    print('Initial likelihood is '+str(model.negLogL(pars0,y)))
    res_list.append(model.fit(y,start_coef = pars0, maxiter = 500, method = 'L-BFGS-B'))


25.0
Initial likelihood is 25.0
25.0
-2373.00563403
-2653.92086857
-2590.11509921
-2792.01243916
-2945.95017003
-3065.69041638
-3143.08712395
-3219.70729097
-3263.73395283
-3383.76534942
-3438.70225974
-3491.77714663
-3531.78628264
-3600.0231482
-3667.63558198
-3618.53807814
-3692.20075822
-3718.184831
-3740.34870711
-3746.12110661
-3749.01798164
-3752.00444697
-3758.41348685
-3772.47086905
-3798.67740566
-3839.58764274
-3883.56469404
-3928.16203083
-3979.89291786
-4074.49264438
-4244.57383985
-4536.64177959
-4663.81651049
-4874.38179401
-4920.23517649
-4941.95209738
-4958.86602001
-4965.34525525
-4968.02586041
-4968.87386213
-4969.37255443
-4969.91999967
-4970.59727022
-4971.7258474
-4973.37999927
-4975.77461528
-4977.46133111
-4982.16244209
-4985.60933573
-4989.01554112
-4995.12700858
-5006.04626527
-4927.86841136
-5010.65109573
-5026.28216477
-5040.37478642
-5052.24856876
-5059.9852565
-5062.01813216
-5065.7640983
-5067.379629
-5070.16942417
-5074.9530221
-5080.93110135
-5086.59104177
-5089.38765473
-5090.7585342
-5091.64580335
-5092.38737872
-5093.32852757
-5094.67100989
-5091.07555669
-5095.18265495
-5096.37834225
-5097.38172143
-5098.58102939
-5100.34227401
-5102.49387823
-5107.01701804
-5111.83151999
-5122.43987335
-5134.0122412
-5153.98646507
-5194.67096111
-5189.23142447
-5208.14327804
-5217.34639073
-5219.96556142
-5220.1448048
-5220.26648826
-5220.35852094
-5220.41040077
-5220.5480737
-5220.6375207
-5220.73293801
-5220.90620807
-5221.2422594
-5221.55212823
-5222.19013643
-5222.8859489
-5223.62623447
-5224.60338067
-5225.63315885
-5226.03125699
-5226.23106578
-5226.34072642
-5226.6985704
-5226.81758406
-5226.89764447
-5226.97249077
-5227.39479484
-5227.79192132
-5228.09177506
-5228.48942
-5228.29853489
-5228.68695847
-5228.79853101
-5228.9884631
-5229.34771729
-5229.84877486
-5230.85397686
-5232.18465445
-5234.52562569
-5237.11648774
-5239.4245039
-5242.08703553
-5243.55441728
-5244.21715781
-5243.48398964
-5244.39857101
-5244.58235438
-5244.78331213
-5245.02716895
-5245.23620918
-5245.1103229
-5245.31711917
-5245.4030606
-5245.4495457
-5245.50502454
-5245.55401839
-5245.6184567
-5245.69990148
-5245.89806879
-5245.79727783
-5246.01940249
-5246.19515257
-5246.46833078
-5246.54925087
-5246.60019598
-5246.62284577
-5246.6487645
-5246.65551723
-5246.7113324
-5246.72549017
-5246.74156295
-5246.74317805
-5246.757281
-5246.76195717
-5246.77021819
-5246.78180808
-5246.80321775
-5246.82675287
-5246.84676158
-5246.86660431
-5246.88109087
-5246.92923524
-5247.01654677
-5247.19401014
-5247.44266611
-5247.77790296
-5248.25325629
-5248.39151491
-5248.51791892
-5248.54895256
-5248.56824723
-5248.57462191
-5248.59615391
-5248.63155347
-5248.70579518
-5248.83064897
-5248.96688787
-5248.9732086
-5249.04881162
-5249.12364629
-5249.16211246
-5249.18542487
-5249.22047344
-5249.27828038
-5249.34933117
-5249.40477827
-5249.47748528
-5249.52098176
-5249.55547374
-5249.58108038
-5249.58048509
-5249.591272
-5249.60413117
-5249.62190235
-5249.64325021
-5249.66081034
-5249.63211844
-5249.66707295
-5249.67225473
-5249.67910757
-5249.68868413
-5249.70472302
-5249.72946593
-5249.75738667
-5249.77917533
-5249.79764961
-5249.81014407
-5249.84998761
-5249.88835989
-5249.77328228
-5249.90437027
-5249.92647489
-5249.95499031
-5249.99592709
-5250.09490165
-5250.1502719
-5250.26493629
-5250.31719381
-5250.34103591
-5250.3580243
-5250.38270047
-5250.39847084
-5250.40907576
-5250.41960326
-5250.42966682
-5250.43617331
-5250.44068653
-5250.44585182
-5250.41928041
-5250.44804621
-5250.44958775
-5250.45275979
-5250.45561906
-5250.45264509
-5250.45818942
-5250.46309635
-5250.46636772
-5250.46835587
-5250.47078491
-5250.47509884
-5250.48636859
-5250.49309074
-5250.50798445
-5250.5116765
-5250.51671558
-5250.52410763
-5250.53926907
-5250.56758372
-5250.57702653
-5250.63451159
-5250.66115746
-5250.6806781
-5250.68885085
-5250.70072922
-5250.70769242
-5250.7138706
-5250.72155801
-5250.71320876
-5250.72586597
-5250.73257903
-5250.73617092
-5250.74003498
-5250.74366441
-5250.74893822
-5250.76702595
-5250.77930068
-5250.77826146
-5250.79009834
-5250.81078833
-5250.82463194
-5250.83191117
-5250.83758593
-5250.84858501
-5250.86246635
-5250.87685325
-5250.90479461
-5250.88393795
-5250.90896316
-5250.91350892
-5250.92332408
-5250.92631187
-5250.92984052
-5250.93672631
-5250.94109665
-5250.94827228
-5250.95052599
-5250.95552543
-5250.95823023
-5250.96092037
-5250.9639848
-5250.96942394
-5250.97513996
-5250.97945735
-5250.98284872
-5250.98520966
-5250.98591863
-5250.98679801
-5250.98760355
-5250.98940307
-5250.99125706
-5250.99101935
-5250.9922484
-5250.99341161
-5250.99453611
-5250.99532549
-5250.9966809
-5250.99789862
-5250.99889266
-5251.00082718
-5251.00353181
-5251.01201351
-5251.0046434
-5251.01529944
-5251.0214931
-5251.02477135
-5251.0255303
-5251.02654077
-5251.02877212
-5251.03239422
-5251.03803046
-5251.04439945
-5251.0249923
-5251.04720712
-5251.04923142
-5251.05069038
-5251.05017205
-5251.05149004
-5251.05248838
-5251.0530215
-5251.05621903
-5251.06148881
-5251.07135963
-5251.07985414
-5251.08774439
-5251.09326822
-5251.09621672
-5251.1008926
-5251.11664735
-5251.13274184
-5251.1493982
-5251.16576694
-5251.1831685
-5251.18908177
-5251.19193757
-5251.19261092
-5251.19326195
-5251.1941918
-5251.19492992
-5251.19602451
-5251.19566234
-5251.19667473
-5251.1978432
-5251.1995016
-5251.20083909
-5251.20326708
-5251.20562834
-5251.20683162
-5251.20937173
-5251.21121417
-5251.2150519
-5251.21687585
-5251.22119119
-5251.22220835
-5251.2316195
-5251.23525325
-5251.23900964
-5251.22676086
-5251.23966896
-5251.24075518
-5251.24329369
-5251.24795492
-5251.25437804
-5251.26282507
-5251.26063695
-5251.26653797
-5251.26878531
-5251.26933607
-5251.2694441
-5251.26996496
-5251.2704445
-5251.26357037
-5251.27062911
-5251.27122018
-5251.27163985
-5251.27224727
-5251.27317142
-5251.27496633
-5251.27676165
-5251.27825616
-5251.27916809
-5251.27949385
-5251.2799398
-5251.28065512
-5251.2815191
-5251.28341553
-5251.28694216
-5251.29705923
-5251.31496916
-5251.3468146
-5251.27257926
-5251.36417244
-5251.40562476
-5251.43778848
-5251.45369032
-5251.47181748
-5251.51101663
-5251.57118323
-5251.6415583
-5251.71865723
-5251.73262288
-5251.75511036
-5251.7615938
-5251.77581058
-5251.78791498
-5251.79854667
-5251.80302815
-5251.80447756
-5251.80541095
-5251.80604877
-5251.80683898
-5251.80778288
-5251.80992199
-5251.81372803
-5251.80035513
-5251.81523762
-5251.81964208
-5251.82718153
-5251.83311231
-5251.84665121
-5251.86625569
-5251.88725428
-5251.90795929
-5251.91717731
-5251.93104183
-5251.93775824
-5251.95615371
-5251.96692297
-5251.9844718
-5251.99548612
-5252.004983
-5251.98880266
-5252.00751141
-5252.01100698
-5252.01424888
-5252.01601405
-5252.01728845
-5252.01965805
-5252.02456234
-5252.03221251
-5252.04284762
-5252.05344063
-5252.0613679
-5252.01996224
-5252.0635291
-5252.06743398
-5252.0737843
-5252.07423641
-5252.08076496
-5252.09273417
-5252.11730022
-5252.15178546
-5252.22463731
-5252.31548089
-5251.83539164
-5252.33528616
-5252.37560362
-5252.38736782
-5252.39052451
-5252.39439274
-5252.40336745
-5252.41257144
-5252.42096032
-5252.42778703
-5252.43228762
-5252.43383896
-5252.43459846
-5252.43552917
-5252.4367643
-5252.43993112
-5252.44588143
-5252.4548276
-5252.45942457
-5252.46687775
-5252.46963964
-5252.47298849
-5252.48411064
-5252.49673078
-5252.51795549
-5252.53799027
-5252.579225
-5252.61259333
-5252.63887973
-5252.66856009
-5252.60145524
-5252.67333698
-5252.68118659
-5252.68485102
-5252.68948494
-5252.69415414
-5252.70353115
-5252.71983744
-5252.73876142
-5252.75685583
-5252.78201948
-5252.80365524
-5252.8060598
-5252.83068301
-5252.86589668
-5252.87329608
-5252.87216827
-5252.87616279
-5252.87897424
-5252.88027839
-5252.88447745
-5252.88813329
-5252.875218
-5252.88996963
-5252.89342029
-5252.8947142
-5252.89596088
-5252.89579881
-5252.89681342
-5252.89869201
-5252.90344394
-5252.90773512
-5252.82463452
-5252.90875862
-5252.91070842
-5252.91281538
-5252.91667205
-5252.9224479
-5252.93088495
-5252.94456104
-5252.96532423
-5252.97322034
-5252.95682233
-5252.97668998
25.0
Initial likelihood is 25.0
25.0
-2143.80282953
-2310.32872545
-2592.63663461
-2909.70318425
-2993.15706811
-3095.13684272
-3146.02706216
-3230.2067766
-3295.48099872
-3424.78614824
-3544.57906114
-3648.81064106
-3693.8899836
-3719.08872229
-3726.51372009
-3733.35441518
-3734.40627539
-3737.10956188
-3743.0534151
-3751.48694015
-3764.54715052
-3800.72688383
-3835.87900955
-3842.47004985
-3878.53591145
-3889.57130589
-3923.73574379
-3974.66052613
-4059.14945981
-4155.81288116
-4417.45161388
-4546.94015335
-4668.64178764
-4795.04294977
-4857.5307208
-4884.22301562
-4904.69355506
-4915.39396621
-4922.80178495
-4926.3688473
-4928.26079816
-4927.9368194
-4928.98955626
-4929.48524246
-4929.90161359
-4931.3396929
-4933.03390454
-4936.08526271
-4942.69301681
-4951.42130739
-4967.14020098
-4984.43643946
-4991.82028617
-4998.38107121
-5000.22035941
-5002.25967294
-5006.1329722
-5010.63418993
-5019.05623142
-5027.6580982
-5034.42613471
-5036.57170126
-5037.5255945
-5038.19409858
-5039.39358076
-5040.53222454
-5043.75885999
-5045.15689024
-5049.11431721
-5050.58450934
-5051.16006353
-5051.66463711
-5052.22041568
-5052.98388278
-5054.25042757
-5055.23052097
-5055.62679654
-5055.93145082
-5056.07245577
-5056.27984945
-5056.74993229
-5056.90198614
-5058.2720535
-5059.69486292
-5061.66341633
-5064.17090592
-5067.37273858
-5073.72727903
-5077.92423669
-5079.60597642
-5080.72367318
-5082.61412398
-5083.81724947
-5085.07561029
-5087.39264435
-5088.59545559
-5089.35052217
-5089.63387834
-5089.83393275
-5090.44556187
-5091.57063697
-5094.27045845
-5096.14618544
-5097.29769399
-5098.21127219
-5099.60014305
-5102.38161191
-5104.56443091
-5106.17996118
-5106.91325427
-5107.53293254
-5107.70070156
-5107.90743036
-5107.97500387
-5108.13359506
-5108.43259428
-5109.12834762
-5110.08055466
-5111.31352868
-5112.78464571
-5113.5397878
-5114.45304463
-5116.48336339
-5118.53465035
-5097.72312284
-5119.24161167
-5121.39023441
-5124.14369586
-5128.79444437
-5134.18708086
-5145.97505797
-5155.72750221
-5164.24994408
-5166.41237704
-5166.78643778
-5166.93797469
-5167.16407322
-5166.94893022
-5167.22273948
-5167.31335481
-5167.44613212
-5167.65537964
-5168.07269911
-5168.65558379
-5164.04119165
-5168.77352933
-5169.32310139
-5169.75453449
-5170.23608283
-5170.80189228
-5171.66208363
-5172.34691945
-5172.59411427
-5173.03795781
-5173.56736814
-5174.27430151
-5175.23272118
-5174.9424347
-5175.3500823
-5175.55362988
-5175.56790074
-5175.57252233
-5175.58164048
-5175.60332563
-5175.44030912
-5175.61043209
-5175.63800052
-5175.67876606
-5175.70797473
-5175.73405002
-5175.78020424
-5175.81820746
-5175.88821125
-5175.92891702
-5175.99119139
-5176.11599265
-5176.30803479
-5176.42565821
-5176.57692636
-5176.64013751
-5176.67933452
-5176.765384
-5176.89736777
-5176.97559828
-5177.30826013
-5177.54553807
-5178.01142617
-5177.80948405
-5178.25445576
-5178.61950774
-5178.75183251
-5178.85132937
-5178.90522071
-5179.10899357
-5179.53569519
-5179.72226133
-5179.8163916
-5179.8635829
-5179.89989948
-5179.94351567
-5179.9481202
-5179.97799353
-5180.01349859
-5180.04317746
-5180.05733131
-5180.06557898
-5180.08590245
-5180.11848003
-5180.19306134
-5180.28478771
-5180.36024606
-5180.06874456
-5180.3750726
-5180.39953199
-5180.40659878
-5180.40910003
-5180.41093462
-5180.4121413
-5180.41472094
-5180.41898056
-5180.43232076
-5180.4573657
-5180.49576704
-5180.56407678
-5180.80351658
-5180.75948644
-5180.88326525
-5180.98188871
-5180.8888318
-5181.01967976
-5181.05906877
-5181.08503848
-5181.11470187
-5181.14069968
-5181.18362378
-5181.22278432
-5181.25113402
-5181.2727176
-5181.29994405
-5181.31903051
-5181.34593528
-5181.37278873
-5181.41464228
-5181.45864091
-5181.50948875
-5181.52960655
-5181.537646
-5181.54349233
-5181.55365689
-5181.57040884
-5181.60300766
-5181.60969169
-5181.63377451
-5181.63791798
-5181.64072897
-5181.64426485
-5181.63421847
-5181.64633156
-5181.64928931
-5181.65192739
-5181.65330617
-5181.65767651
-5181.66153278
-5181.66395169
-5181.66554381
-5181.66696776
-5181.66895203
-5181.67117988
-5181.67430441
-5181.67724143
-5181.6690649
-5181.67928669
-5181.68248116
-5181.68863596
-5181.69448836
-5181.70576354
-5181.71818631
-5181.73163219
-5181.73812786
-5181.74370823
-5181.74699252
-5181.74903544
-5181.75047488
-5181.75125811
-5181.75197341
-5181.75257687
-5181.75468036
-5181.75687094
-5181.74877494
-5181.75778589
-5181.76021617
-5181.76145922
-5181.76222961
-5181.76322141
-5181.76423313
-5181.76597642
-5181.76798013
-5181.7704165
-5181.76611205
-5181.77133028
-5181.77196365
-5181.77214522
-5181.77222026
-5181.77271889
-5181.77343165
-5181.77546537
-5181.77997475
-5181.77844498
-5181.78403584
-5181.79243937
-5181.80946563
-5181.81568499
-5181.8213637
-5181.82689118
-5181.83141771
-5181.84239218
-5181.85241106
-5181.87556792
-5181.85143429
-5181.88215451
-5181.8865118
-5181.88994433
-5181.89272255
-5181.89709815
-5181.9221864
-5181.9364138
-5181.99491537
-5182.00359833
-5182.03783685
-5182.04412261
-5182.04833426
-5182.05036745
-5182.05140758
-5182.05265295
-5182.05470225
-5182.05847094
-5182.06091521
-5182.06532267
-5182.07006469
-5182.07630043
-5182.0887762
-5182.10217525
-5182.12154467
-5182.13401935
-5182.14034455
-5182.13271455
-5182.14216442
-5182.14320487
-5182.14427366
-5182.14465883
-5182.14524226
-5182.14602669
-5182.14719005
-5182.14886597
-5182.15213233
-5182.15760626
-5182.1635165
-5182.16906637
-5182.17600363
-5182.17427785
-5182.17769289
-5182.18093543
-5182.18516178
-5182.18885105
-5182.19898725
-5182.22159959
-5182.27467575
-5182.31655449
-5182.40280678
-5182.46371698
-5182.49428541
-5182.47525116
-5182.49868872
-5182.50611193
-5182.5084471
-5182.50999228
-5182.51109539
-5182.512164
-5182.51362333
-5182.51450529
-5182.51510196
-5182.51568545
-5182.51678325
-5182.51840146
-5182.52172364
-5182.52690332
-5182.53301025
-5182.52788874
-5182.5363379
-5182.54143382
-5182.54621942
-5182.5500972
-5182.55374451
-5182.55918772
-5182.53974057
-5182.5613096
-5182.56482308
-5182.56887578
-5182.57048689
-5182.57263102
-5182.57917696
-5182.58349669
-5182.58944454
-5182.59390043
-5182.60077854
-5182.6076486
-5182.61346819
-5182.6183581
-5182.62710532
-5182.6350848
-5182.64202537
-5182.63911473
-5182.64516714
-5182.64920382
-5182.6528173
-5182.6592002
-5182.67091044
-5182.6799657
-5182.69167068
-5182.69778941
-5182.70080425
-5182.70722734
-5182.7114601
-5182.72034176
-5182.72715233
-5182.72709184
-5182.73051179
-5182.73195003
-5182.73270808
-5182.73326199
-5182.73441322
-5182.73579922
-5182.73706046
-5182.73778632
-5182.73822301
-5182.73848276
-5182.73865805
-5182.73900062
-5182.73960414
-5182.74058633
-5182.74280115
-5182.74648147
-5182.73935755
-5182.7477661
-5182.75079878
-5182.75295234
-5182.75438417
-5182.75516418
-5182.75573188
-5182.75810931
-5182.76203588
-5182.76846757
-5182.77483121
-5182.7805824
-5182.78331185
-5182.78382255
-5182.78391434
-5182.784479
-5182.78485715
-5182.78502994
-5182.78512209
-5182.78543136
-5182.78588163
-5182.78668676
-5182.78157942
-5182.786882
-5182.78757315
-5182.7879992
-5182.78840095
-5182.78876957
-5182.78954047
-5182.7915496
-5182.79546861
-5182.8007206
-5182.81044383
-5182.81601152
-5182.81417064
-5182.8173724
-5182.81902432
-5182.82099157
-5182.82370919
-5182.82733368
-5182.82453453
-5182.8284427
-5182.83042306
-5182.83113386
-5182.83144402
-5182.83178412
-5182.83239417
-5182.83296692
-5182.82990817
-5182.83317057
-5182.8332866
-5182.83335763
-5182.83341626
-5182.83349554
-5182.83359304
-5182.83369057
-5182.83374916
-5182.83378136
-5182.83380114
-5182.83383366
-5182.83392472
-5182.83400549
-5182.83407986
-5182.83413453
-5182.83417316
-5182.83423145
-5182.8343332
-5182.83448541
-5182.83387647
-5182.83458813
-5182.83485802
-5182.8351728
-5182.83491501
-5182.83529728
-5182.83544468
-5182.83549987
-5182.83550587
/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py:169: FutureWarning: comparison to `None` will result in an elementwise object comparison in the future.
  if start_coef==None:
/Users/val/anaconda/lib/python3.4/site-packages/scipy/optimize/_minimize.py:362: RuntimeWarning: Method L-BFGS-B does not use Hessian information (hess).
  RuntimeWarning)
/Users/val/MEGAsync/GLM_PythonModules/notebooks/../code/PoissonProcessClasses.py:171: OptimizeWarning: Unknown solver options: maxfev
  res = optimize.minimize(self.negLogL,start_coef,jac = self.gradNegLogL,hess = self.hessNegLogL, args = y, options = opts, method = method)

In [18]:
[print('\n Optimization results:\n'+str(res)) for res in res_list]


 Optimization results:
       x: array([  5.13824038e-02,   2.82712959e-01,   8.43214908e-01,
         1.20712875e+00,   9.33004017e-01,   5.04131619e-01,
        -9.15673292e-01,   1.41984213e-01,  -3.36716503e-03,
        -6.68070198e-02,  -1.51408914e+01,   3.89451066e+01,
        -6.75304761e+01,  -1.36291294e+01,  -4.66911403e+00,
        -1.33811029e-01,  -7.35490093e-01,   1.47097688e+00,
        -9.19007644e-01,   7.46556190e-01,   3.04875795e+00])
    nfev: 542
     fun: -5252.9766899764727
     jac: array([ 0.32796353, -0.81771162, -1.17829772,  0.14903547, -1.43539642,
        0.45543686,  0.38345457, -2.1846063 , -4.37447712, -0.72562013,
       -0.09158658,  0.15053873,  0.09126471,  0.02359388,  0.15416549,
       -0.03900187, -0.08243471,  0.17292723, -0.25657332, -0.30810721,
       -2.16429064])
  status: 1
 success: False
     nit: 501
 message: b'STOP: TOTAL NO. of ITERATIONS EXCEEDS LIMIT'

 Optimization results:
       x: array([  7.05663468e-02,   3.09214315e-01,   7.48535703e-01,
         1.19580872e+00,   8.48865009e-01,   4.65371190e-01,
        -8.34789235e-01,   8.81069024e-02,   1.64451241e-02,
        -7.25376058e-02,   4.01178596e-01,   5.02015076e-01,
         2.68784182e-01,   1.02937190e-01,   4.41811513e-01,
        -1.43834240e+01,   3.43168747e+01,  -6.14282054e+01,
        -1.14956396e+01,  -2.46805947e+00,   3.12273698e+00])
    nfev: 520
     fun: -5182.8355058743446
     jac: array([ 0.02387189,  0.05413722,  0.08830876,  0.13407165,  0.07064581,
        0.12238842, -0.10064176,  0.11851582, -0.32808238, -0.94011208,
       -0.04523677,  0.00435645, -0.02635262, -0.02550182, -0.03167762,
       -0.0027051 , -0.00626003,  0.04022574, -0.05762839,  0.04629709,
       -0.06713251])
  status: 0
 success: True
     nit: 487
 message: b'CONVERGENCE: REL_REDUCTION_OF_F_<=_FACTR*EPSMCH'
Out[18]:
[None, None]

In [19]:
k_coeff = np.array([0.061453, 0.284916, 0.860335, 1.256983, 0.910615, 0.488660, -0.887091, 0.097441, 0.026607, -0.090147])
h_coeff = np.array([-15.18,38.24,-67.58,-14.06,-3.36])

In [20]:
for i in range(len(res_list)):
    k_coeff_predicted = res_list[i].x[:10]
    h_coeff_predicted = np.reshape(res_list[i].x[10:-1],(H.shape[1],nofCells),order = 'F')
    print('Estimated dc for neuron '+str(i)+': '+str(res_list[i].x[-1]))
    fig,axs = plt.subplots(1,2,figsize = (10,5))
    fig.suptitle('Neuron%d'%(i+1))
    axs[0].plot(-kt_domain[::-1],np.dot(K,k_coeff_predicted),'r',label = 'predicted')
    axs[0].set_title('Stimulus Filter')
    axs[0].hold(True)
    axs[0].plot(-kt_domain[::-1],np.dot(K,k_coeff),'b',label = 'true')
    axs[0].plot(-kt_domain[::-1],np.dot(K,pars0[:10]),'g',label = 'initial')
    axs[0].set_xlabel('Time')
    axs[0].legend(loc = 'upper left')
    axs[1].plot(ht_domain,np.dot(H_orth,h_coeff_predicted),'r',label = 'predicted')
    axs[1].plot(ht_domain,np.dot(H_orth,h_coeff),'b',label = 'true')
    axs[1].plot(ht_domain,np.dot(H_orth,coeff_h0[:H_orth.shape[1]]),'g',label = 'initial')
    axs[1].set_title('Post-Spike Filter')
    axs[1].set_xlabel('Time')
    axs[1].legend(loc = 'upper right')


Estimated dc for neuron 0: 3.04875795424
Estimated dc for neuron 1: 3.12273698411

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]: