In [1]:
#init before start:
%qtconsole
%pylab inline
In [2]:
import sys
sys.version
Out[2]:
In [3]:
import pandas as pd
import os
from IPython.display import Image
In [4]:
whos
In [18]:
# plot wave pack and fft wave pack
# change w and rerun
timeLength = 128 #data length
amplitude = 1 #amplitude
#peakWidth = 172.0 #Width, W > 0, and .0 is required
peakWidth = 3.0
timeIndex = arange(-timeLength/2,timeLength/2)
data = amplitude*exp(-timeIndex**2/peakWidth)
In [19]:
fig, axes = plt.subplots(2, 1, figsize=(6, 4))
axes[0].plot(timeIndex, data)
axes[0].set_title("Time Domain")
axes[1].plot(timeIndex, fftshift(abs(fft.fft(data))),'r')
axes[1].set_title("Frequency Domain(FFT)")
fig.tight_layout()
fig.show()
In [ ]:
In [23]:
# plot wave pack and fft wave pack
# change tau and rerun
timeLength = 128 #data length
amplitude = 1 #amplitude
peakWidth = 72.0 #Width
timeIndex = arange(-timeLength/2,timeLength/2)
tau = 40 #change this
data = amplitude*exp(-(timeIndex-tau)**2/peakWidth)
In [24]:
fig, axes = plt.subplots(2, 1, figsize=(6, 6))
axes[0].plot(timeIndex, data)
axes[0].set_title("Time Domain")
axes[1].plot(timeIndex, fftshift(abs(fft.fft(data))),'r')
axes[1].set_title("Frequency Domain(FFT)")
fig.tight_layout()
fig.show()
In [30]:
#A Gaussian plus noise
N = 160 #data length
A = 1 #amplitude
W = 72.0 #Width, W > 0, and .0 is required
noise_rate = 1 #noise rate, the magic part
#Let's sample it for several times, this time do a time shift:
sample_time = 30
accumulated_dirty_data = zeros(N)
i = 0
In [53]:
##manual loop
##for i in arange(sample_time):
#time shift:
tau = 50*sin(i)
noise_data = noise_rate * randn(N)
t = arange(-N/2,N/2)
#clean data
clean_data = A*exp(-(t-tau)**2/W)
#add noise to generated dirty data
dirty_data = clean_data.__add__(noise_data)
#and accumulate them
accumulated_dirty_data = accumulated_dirty_data + dirty_data
figure(1)
#plot clean data
subplot(3,1,1)
plot(clean_data)
legend(["clean data"])
subplot(3,1,2)
plot(dirty_data)
legend(["dirty data"])
subplot(3,1,3)
plot(accumulated_dirty_data)
legend(["Dirty accumulate data"])
#pause(0.3)
i+= 1
everage_dirty_data = accumulated_dirty_data/sample_time
In [97]:
#A Gaussian plus noise
N = 160 #data length
A = 1 #amplitude
W = 72.0 #Width, W > 0, and .0 is required
noise_rate = 1 #noise rate, the magic part
#Let's sample it for several times, this time do a time shift:
sample_time = 30
accumulated_dirty_data = zeros(N)
accumulated_dirty_data_fft = zeros(N)
#loop
i = 0
In [96]:
In [ ]:
In [9]:
#0.2.4 filter
In [100]:
os.chdir('/Users/chenchen/Documents/Git/ipynotebook/dataDemo20140109/FCSSimData')
currentDir = '/Users/chenchen/Documents/Git/ipynotebook/dataDemo20140109/FCSSimData'
currentDir
Out[100]:
In [102]:
ls
In [104]:
fcsFrame = pd.read_csv("bleach5e-08__dg500to550_dr600to700__D30__cAB5_cA2.5_cB2.5__red_detection_bleach0_bts.dat", header=None, names=["time", "Intensity"])
In [105]:
fcsFrame.describe()
Out[105]:
In [107]:
plot(fcsFrame.time, fcsFrame.Intensity)
show()
In [109]:
#data point to use
k = 2 ** 21 + 2**19 + 2**18 + 2**15
print "Total lenth in Frame is :", fcsFrame.time.count()
print "No. of data points to play with :", k
print "ratio of data usage :", double(k)/fcsFrame.time.count()
In [110]:
Image("FCS_Jan.png")
Out[110]:
In [114]:
fcsTarget = fcsFrame.head(k)
In [116]:
fcsTarget.describe()
Out[116]:
In [117]:
fcsAcf=acf_fft(fcsTarget.Intensity.values)
In [118]:
fcsAcf=acf_fft(fcsTarget.Intensity.values)
In [120]:
semilogx(fcsAcf)
show()
In [ ]:
fcsAcf=acf_fft(fcsTarget.Intensity.values)
In [9]:
currentDir = '/Users/chenchen/Documents/Git/ipynotebook/dataDemo20140109/spFRETTestData/t3r'
os.chdir(currentDir)
!pwd
In [9]:
In [9]:
In [9]:
In [9]:
In [ ]:
Others
In [3]:
pwd
Out[3]:
In [5]:
import sys, os
In [6]:
os.chdir("")
In [7]:
whos
1 ACF_fft
In [111]:
#Calculate Auto-correlation function based on fft
#only support 1D
#for cross correlation, in optimisation
#input data suggest to be numpy.ndarray(1d) or pandas.core.series.Series
def acf_fft(data):
""" Series or 1darray ---> Series
Calculate the auto correlation function of input data
"""
data_mean = data.mean()
#debug
#print "mean of data is ", data_mean
#Step 1: calculate fft for data
fft_data = fft.fft(data)
#print "step1 done!"
#Step 2: Calculate the pow spectrom,
S_f = fft_data.__mul__(fft_data.conj())
#print "Step2 done!"
#Step 3: calculate auto correlation function, result as g(\tau)
acf = ifft(S_f)/fft_data[0]/data_mean - 1;
#print "Step3 done!"
#Clean up, return half of data
acf = acf.real[0:int(len(acf)/2)]
#print "clean up done!"
#debug
#print max(acf)
return acf
In [ ]: