Lets first import the important modules.


In [1]:
%matplotlib inline
import sklearn
import scipy.io as sio
import matplotlib.pylab as plt
import matplotlib as mp
import numpy as np
import scipy as sp
import scipy.ndimage
import scipy.signal

And data


In [2]:
ft=sio.loadmat("firingTimes.mat")

print ft.keys()

allSpikes=ft['allSpikes']
totalTime=ft['totalTime']*1.0
neuronsWithInput=ft['neuronsWithInput']
neuronsWithInput_array=np.zeros(160)
neuronsWithInput_array[neuronsWithInput]=1


['totalTime', 'spikes', 'all_v_mat', 'allSpikes', 'firings2', '__header__', '__globals__', 'allFirings', 'firings', 'neuronsWithInput', 'spikes2', '__version__', 'v_mat', 'v_mat2']

Lets make a basic raster plot of spikes using spike times from spike trains generated with the poisson distribution


In [3]:
# generating poisson data
num_neurons=10
max_time=100
spike_array=(np.random.random([num_neurons,max_time]) <0.1)
neurons,time=np.where(spike_array==1)

In [4]:
print(spike_array.shape)
print(neurons.shape)
print(time.shape)


(10, 100)
(100,)
(100,)

In [5]:
# plotting rasters in two ways
plt.figure(figsize=(8, 8))
plt.subplot(211)
plt.imshow(spike_array,aspect=4,interpolation='none',cmap='Greys')
plt.subplot(212)
plt.vlines(time,neurons,neurons+1)
plt.xlabel('time')
plt.ylabel('trial')
plt.show()


In the cell below, make the raster plots for the data in the array allSpikes (same format as spike_array above)


In [6]:
print(allSpikes)


[[0 0 0 ..., 0 0 0]
 [0 0 0 ..., 0 0 0]
 [0 0 0 ..., 0 0 0]
 ..., 
 [0 0 0 ..., 0 0 0]
 [0 0 0 ..., 0 0 0]
 [0 0 0 ..., 0 0 0]]

In [7]:
all_neurons,all_time=np.where(allSpikes==1)

# plotting rasters in two ways
plt.figure(figsize=(8, 8))
plt.subplot(211)
plt.imshow(allSpikes, aspect=32,interpolation='none',cmap='Greys')
plt.subplot(212)
plt.vlines(all_time, all_neurons,all_neurons+1)
plt.xlabel('time')
plt.ylabel('trial')
plt.show()


Lets make a histogram of average spiking rate


In [13]:
print(spike_array.shape)
print(spike_array)


(10, 100)
[[ True False False False  True False False  True False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False  True  True False False False False False  True False False
  False False False False False  True False False False False False  True
  False  True False False False False False  True False False False  True
  False False False False False False False False False False False False
  False False  True  True  True False False False False False False False
  False False False False]
 [False False False  True False False False False False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False False False  True False False False False  True False
  False False False False False False False False False False False False
  False False False False False False False False False  True False False
  False False False False]
 [False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False False False  True False False False False False False
  False False False  True False False False False False  True False False
  False False False False False False False False False False False  True
  False False False False False False False False  True False False False
  False False False False  True  True False False False False False False
  False False False  True False False False False False False  True False
  False False False False]
 [False False False False False False False False False False False False
  False False False False False False False False False False False False
   True False False  True False False False False False False False False
   True False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False  True False False False False False False False False
   True False False False  True False False False False False False  True
  False False False  True False False False False False False False False
  False  True False False]
 [False False False False False False False False False False False False
  False False False False  True False False False False False False  True
  False False False False  True False False False False False False False
  False False False False False False False  True False False  True False
  False False False False False False  True False False False False False
  False False False False False False False False False False False False
  False False  True False False False False False False False  True False
  False False False False False False False False False False False False
  False  True False False]
 [False  True False False False False  True False False False False False
  False False False False False False False False False False False False
  False  True False  True  True False False  True False False False False
  False False False  True False False False False False False False False
  False False False False False False False  True False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
   True  True False False]
 [False False False False False False False False False False False False
  False False  True False False False False False False False False False
  False False False False False False False  True False False False False
  False False False False False False False False False False False  True
  False False False False False False False False False False False False
  False False False False False  True False False False False False False
  False False False False False False False False False False False False
  False  True False False False False False False False  True False False
  False False  True  True]
 [False False False False False False False False False False False False
  False  True False False False False  True  True False False False False
  False  True False False False False False False  True False  True False
  False  True  True False False False  True False  True False False False
  False False  True False False False  True False False False False False
  False False False False False False False False False False False False
  False False False False False False False False False  True False False
  False False False False False False False False False False False False
  False False  True False]
 [False False False False False  True False  True False False False False
  False False False  True False False False False  True False  True False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False False  True False False False  True False False False False
  False False False False False False False False False False False False
  False False False False False False False False False False False False
  False False  True False  True False False False False False False False
   True False False False]
 [False False False False False  True  True False  True False False False
  False  True False False False False False False False False False False
  False False  True False False False False  True False False False  True
  False False False False False False False False False False  True False
  False False False False False False False False  True False False False
  False False False False False  True False False False False False False
  False False False False False False False False False False False False
  False  True False False False False False  True False False  True False
  False False False False]]

In [15]:
print(spike_array.shape)
spikes_per_sec = np.mean(spike_array,1)*1000
print(spikes_per_sec)
plt.figure(figsize=[20,8])
plt.subplot(121)
plt.hist(spikes_per_sec,bins=10)
plt.subplot(122)
plt.hist(spikes_per_sec,bins=range(0,200,10))
plt.show()


(10, 100)
[ 140.   40.   90.   90.   90.  100.   80.  140.  100.  130.]

In the cell below. Make a histogram for the allSpikes data. Find a good binsize to get a nice looking plot.


In [9]:
spikes_per_sec = np.mean(allSpikes,1)*1000
plt.figure(figsize=[20,8])
plt.subplot(121)
plt.hist(spikes_per_sec,bins=10)
plt.subplot(122)
plt.hist(spikes_per_sec,bins=range(4,10,1))
plt.show()


Let us see how the instantanoues firing rate of the neurons changes over time

To do this, we will combine every 5 data point into 1 so that we get an average


In [9]:
combine_points=5
mean_rate_over_time=np.zeros([spike_array.shape[0],spike_array.shape[1]/combine_points])
for i in range(spike_array.shape[1]/combine_points):
    mean_rate_over_time[:,i]=np.mean(spike_array[:,combine_points*i:combine_points*(i+1)],1)*1000
plt.imshow(mean_rate_over_time,interpolation='none',cmap='Greys')
plt.colorbar()
plt.show()


In the cell below, implement this for allSpike data and then see the effect of changing the number of points we combine


In [10]:
combine_points=5
mean_rate_over_time=np.zeros([allSpikes.shape[0],allSpikes.shape[1]/combine_points])
for i in range(allSpikes.shape[1]/combine_points):
    mean_rate_over_time[:,i]=np.mean(allSpikes[:,combine_points*i:combine_points*(i+1)],1)*1000
plt.imshow(mean_rate_over_time,interpolation='none',cmap='Greys', aspect='auto')
plt.colorbar()
plt.show()



In [11]:
plt.imshow(mean_rate_over_time[:10,:20],interpolation='none',cmap='Greys', aspect='auto')
plt.colorbar()
plt.show()


A way to get smoother data is to use filters on the spike train.

Lets use the built in uniform filter to understand what is happening.


In [12]:
unif_filtered=np.zeros(spike_array.shape)
size_of_filter=10
for i in range(spike_array.shape[0]):
    unif_filtered[i,:]=sp.ndimage.filters.uniform_filter1d(spike_array[i,:]*1000,size_of_filter,mode='wrap')

In [13]:
plt.figure(figsize=(8, 8))
i_th_neuron=3
plt.plot(unif_filtered[i_th_neuron,:])
plt.vlines(np.where(spike_array[i_th_neuron,:]==1)[0],0,100)


Out[13]:
<matplotlib.collections.LineCollection at 0x7f37f1296d10>

In [14]:
plt.figure(figsize=(8, 6))
plt.imshow(unif_filtered,aspect=3,cmap='Greys',interpolation="None")
plt.colorbar()
plt.show()


In the cell below, use the filter for the allSpikes. The instead of

sp.ndimage.filters.uniform_filter1d

use

sp.ndimage.filters.gaussian_filter1d .

Experiment with what happens when you change filter size.


In [ ]:

Now we will try to use a custom filter instead of the standard ones

First lets make an array which has an exponential shape. This array will be used as the filter.


In [15]:
filter_size=30
tau=5.0
arr=np.linspace(0,filter_size,filter_size+1)
filter_exp=np.exp(-arr/tau)


filter_exp=filter_exp/sum(filter_exp) #normalize the filter so that its area is 1
plt.plot(filter_exp)
plt.show()


Now we can apply this using the function sp.ndimage.filters.convolve1d


In [16]:
exp_filtered=np.zeros(spike_array.shape)
for i in range(spike_array.shape[0]):
    exp_filtered[i,:]=sp.ndimage.filters.convolve1d(spike_array[i,:]*1000,filter_exp,mode='constant',origin=(-1*filter_size-1)/2)

In [17]:
plt.figure(figsize=(8, 4))
i_th_neuron=3
plt.plot(exp_filtered[i_th_neuron,:])
plt.vlines(np.where(spike_array[i_th_neuron,:]==1)[0],0,100)
plt.show()



In [18]:
plt.figure(figsize=(8, 4))
plt.imshow(exp_filtered,aspect=5,cmap='Greys',interpolation="None")
plt.colorbar()
plt.show()


In the cell below, implement this for allSpikes data. Then try to change the size of the filter and the tau of the filter.


In [ ]:

Now, in the cell below, try to use a tringular filter (like the shape of a pyramid)


In [ ]:

PSTH

Lets make peristimulus histogram. To do this, just find all the times at which neurons spike. The plot a histogram of those.


In [19]:
neurons,time=np.where(spike_array==1)
plt.hist(time,bins=10)
plt.show()


In the cell below, do the same for allSpikedata


In [ ]:

Machine learning

Now that we have done some basic analysis on spike train data, lets do some Machine learning.

We will start with K-Means clustering of data


In [20]:
from sklearn.cluster import KMeans
from sklearn.datasets import make_blobs

n_samples = 150
random_state = 170
X_test, y_test = make_blobs(n_samples=n_samples, random_state=random_state)

In [21]:
km=KMeans(n_clusters=2, random_state=random_state)
y_pred = km.fit_predict(X_test)
labels=km.labels_
cluster_centers=km.cluster_centers_
plt.figure(figsize=(8, 6))
# plt.subplot(221)
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_pred,cmap='Set1')
plt.show()



In [22]:
D_average = km.inertia_
print D_average


1428.10889712

In [23]:
def calc_dist(labels,X,cluster_centers):
    Y=np.zeros(X.shape[0])
    for i,j in enumerate(labels):
        Y[i]=np.linalg.norm(X[i,:]-cluster_centers[j])
    return Y
dist = calc_dist(labels,X_test,cluster_centers)
plt.figure(figsize=(8, 6))
plt.scatter(X_test[:, 0], X_test[:, 1], c=dist,cmap='hot')
plt.colorbar()
plt.show()


Lets apply k means on neural data

lets load and visualize the data


In [24]:
ft1=sio.loadmat("forclustering.mat")
new_spiketrain=ft1['new_spiketrain']

In [25]:
print new_spiketrain.shape
plt.figure(figsize=[10,10])
plt.imshow(new_spiketrain,aspect=10,cmap='hot',interpolation="None")
plt.show()


(250, 10000)

We need to preprocess data before we can apply machine learning on it. Lets filter it appropriately.


In [26]:
filtered_new=np.zeros(new_spiketrain.shape)
size_of_filter=100  # set an appropriate filter size.
for i in range(new_spiketrain.shape[0]):
    filtered_new[i,:]=sp.ndimage.filters.gaussian_filter1d(new_spiketrain[i,:]*1000,size_of_filter,mode='constant')

In [29]:
print(new_spiketrain.shape)
print(filtered_new.shape)


(250, 10000)
(250, 10000)

Generally maching learning can be done well on features extracted from raw data.

We'll extract some features using Principal Component Analysis. (PCA)


In [49]:
from sklearn.decomposition import PCA
reduced_data=PCA(n_components=2).fit_transform(filtered_new - np.mean(filtered_new))
plt.scatter(reduced_data[:,0],reduced_data[:,1],cmap='Set3')
plt.show()


As it turns out, in this case, the principle components are essentially the mean firing rate and the variance.


In [50]:
x1=np.mean(filtered_new,axis=1)
x2=np.std(filtered_new,axis=1)
X=np.array([x1,x2]).T
plt.scatter(X[:, 0], X[:, 1],c=range(X.shape[0]),cmap='jet')
plt.show()



In [51]:
km=KMeans(n_clusters=5, random_state=random_state) # play with cluster size
y_pred = km.fit_predict(X)
labels=km.labels_
cluster_centers=km.cluster_centers_
plt.scatter(X[:, 0], X[:, 1], c=y_pred,cmap='jet')
plt.show()
D_original = km.inertia_
print D_original


4736.57576425

Permutation test!


In [ ]:
x1=np.mean(filtered_new,axis=1)
x2=np.std(filtered_new,axis=1)
n_repeat=100
D_permute=np.zeros(n_repeat)
D_non_permute=np.zeros(n_repeat)
for i in range(n_repeat):
    permutation= np.random.permutation(range(x1.shape[0]))
    X_permuted=np.array([x1[permutation],x2]).T
    X_non_permuted=np.array([x1,x2]).T
        
    km=KMeans(n_clusters=5, random_state=random_state)
    y_pred = km.fit_predict(X_permuted)
    D_permute[i]=km.inertia_
    
    km=KMeans(n_clusters=5, random_state=random_state)
    y_pred = km.fit_predict(X_non_permuted)
    D_non_permute[i]=km.inertia_
plt.hist(D_permute)
plt.hist(D_non_permute)
plt.show()

Bonus question. Try to cluster the allSpike data using Kmeans

In the box below


In [ ]:

Supervised Learning : Generalized Linear Models (GLM)

Lets generate some silly data.


In [ ]:
X_lin=np.random.random([10,100])
c=np.random.random(10)
Y_lin=c.dot(X_lin)+np.random.random(100)

In [ ]:
from sklearn import linear_model
clf = linear_model.LinearRegression()
clf.fit(X_lin.T,Y_lin)
y2=clf.predict(X_lin.T)
plt.scatter(Y_lin,y2)
plt.xlim([0,10])
plt.ylim([0,10])
plt.plot(np.arange(10),np.arange(10),"r")
plt.show()

Now let us apply this on some neural data.


In [ ]:
filtered_new1=np.zeros(allSpikes.shape)
size_of_filter=100
for i in range(allSpikes.shape[0]):
    filtered_new1[i,:]=sp.ndimage.filters.gaussian_filter1d(allSpikes[i,:]*1000,size_of_filter,mode='constant')

In [ ]:
new_allsp=filtered_new1[:,:6000]
newtarget=np.zeros(6000)
newtarget[500:3500]=1
from sklearn import linear_model
clf = linear_model.LogisticRegression()
clf.fit (new_allsp.T,newtarget)
arr=np.abs(clf.coef_.argsort())[0,:]
print np.intersect1d(arr[-50:], neuronsWithInput).shape
plt.plot(clf.predict(new_allsp.T),"r.")
plt.plot(newtarget,"b,")
plt.ylim([-1,1.5])
plt.show()

In [ ]:
new_allsp=new_spiketrain
newtarget=np.zeros(new_spiketrain.shape[1])
newtarget[0:5000]=1
from sklearn import linear_model
clf = linear_model.LogisticRegression()
clf.fit (new_allsp.T,newtarget)
arr=np.abs(clf.coef_.argsort())[0,:]
print clf.coef_[0,arr[-100:]]
print labels[arr[-100:]]
plt.plot(clf.predict(new_allsp.T),"r.")
plt.plot(newtarget,"b,")
plt.ylim([-1,1.5])
plt.show()

In [ ]:
print X.shape
print arr[-100:]
plt.scatter(X[arr[-100:],0],X[arr[-100:],1])
plt.show()