The following methods were used in this study:
Author: Julio Cárdenas-Rodríguez, Ph.D.
email: cardenaj@email.arizona.edu
A total of XX mice were used in this study. Each mouse was infected as follows:
Both thighs can be seen in each image, and a total of of five imaging slices were collected around the center of infection. The average signal for the following region of interest (ROIS) were collected for all slices:
In [233]:
# Import Python Modules
import numpy as np
#import seaborn as sn
import matplotlib.pyplot as plt
%matplotlib inline
from pylab import *
import pandas as pd
# Import LOCAL functions written by me
from mylocal_functions import *
In [2]:
# Make list of all T2.txt files
T2_list = get_ipython().getoutput('ls ../Study_03_CBA/*T2.txt')
# Allocate variables needed for analysis
T2DF=pd.DataFrame()
TR=np.linspace(.012,.012*12,12)
# Fit T2 for all ROIs, slices and mice. construct dataframe
for names in T2_list:
#Convert txt file to array
YDataMatrix=txt_2_array(names)
#Estimate T2
T2time=fitT2(TR,YDataMatrix)
#convert to data frame
df_T2=pd.DataFrame(T2time.T,columns=["Infected","Healthy_Right","Sterile_Inflammation","Healthy_Left"])
#df_T2=pd.DataFrame(T2time.T,columns=["ROI-1","ROI-2","ROI-3","ROI-4"])
df_info=name_2_df(names)
df_final=pd.concat([df_T2,df_info], axis=1)
T2DF=T2DF.append(df_final,ignore_index=True)
In [3]:
# Plot distribution of estimated T2 for each slice
#T2DF[T2DF.Slice==1].iloc[:,:4].plot.density(); title("Slice 01"); xlim((0.025,.15))
#T2DF[T2DF.Slice==2].iloc[:,:4].plot.density(); title("Slice 02"); xlim((0.025,.15))
#T2DF[T2DF.Slice==3].iloc[:,:4].plot.density(); title("Slice 03"); xlim((0.025,.15))
#T2DF[T2DF.Slice==4].iloc[:,:4].plot.density(); title("Slice 04"); xlim((0.025,.15))
T2DF[T2DF.Slice==5].iloc[:,:4].plot.density(); title("Slice 05"); xlim((0.025,.15))
Out[3]:
In [113]:
# list of files
CEST_list=get_ipython().getoutput('ls ../Study_03_CBA/*CEST.txt')
CEST_DF=pd.DataFrame()
Z=np.zeros((4,110))
def normalize_data(DataMatrix):
rows,cols = DataMatrix.shape
newData = np.zeros_like(DataMatrix)
for row in range(rows):
newData[row,:]=DataMatrix[row,:]/DataMatrix[row,8]
return newData
for names in CEST_list:
#Convert txt file to array
D=txt_2_array(names);
Zn=normalize_data(D.T)
Z=np.concatenate((Z,Zn))
Z=Z[4::,9::]
# define offsets in ppm
a1=np.linspace(-55,-50,9)
ppm=np.linspace(-8,8,101)
full_ppm = np.concatenate((a1, ppm))
In [223]:
# Fit data
from scipy.optimize import curve_fit
import seaborn as sn
from mylocal_functions import *
def Lorentzian(sat_offset,Amp,Width,Center):
Width = Width**2; Width=Width/4
xdata = (sat_offset-Center)**2
return (Amp*Width) / (Width +xdata )
def Lorentzian2(sat_offset,a1,w1,c1,a2,w2,c2):
return Lorentzian(sat_offset,a1,w1,c1) + Lorentzian(sat_offset,a2,w2,c2)
#
Signal=1-Z[12,:]
# fix xdata
xdata=ppm-ppm[Signal.argmax()]
# allocate fitting based on this
A10, W10, C10 = 0.90, 1, 0
A20, W20, C20 = .1, 1, -4
A1L, W1L, C1L = 0.5, .1, -.1
A2L, W2L, C2L = 0, .1, -6
A1U, W1U, C1U = 1.0, 5, +.1
A2U, W2U, C2U = 1.0, 5, -1.0
scale0, scaleL, scaleU = 0, -1, +1
initial_guess = [A10, W10, C10, A20, W20, C20, scale0]
lb = [A1L, W1L, C1L, A2L, W2L, C2L, scaleL]
ub = [A1U, W1U, C1U, A2U, W2U, C2U, scaleU]
p, cov = curve_fit(Lscale, xdata, Signal,p0=initial_guess,bounds=(lb, ub))
print(pars_hat)
Yhat=Lscale(xdata,p[0],p[1],p[2],p[3],p[4],p[5],p[6]);
plt.figure(figsize=(10,5))
plt.plot(xdata,Signal,'o',label='Signal');
plt.plot(xdata,Yhat,'-',label='Signal');
In [232]:
from mylocal_functions import *
mylocal_functions.fit_L2_scale?
In [193]:
plt.plot(ppm,Lscale(ppm,A10, W10, C10, A20, W20, C20, scale0));
initial_guess = [A10, W10, C10, A20, W20, C20, scale0];
lb = [A1L, W1L, C1L, A2L, W2L, C2L, scaleL];
ub = [A1U, W1U, C1U, A2U, W2U, C2U, scaleU];
A=[[initial_guess],[initial_guess]]
array(A).shape
Out[193]:
In [147]:
ppm[Signal.argmax()]
Out[147]:
In [131]:
L= Lorentzian(ppm,1,1,1); plt.plot(L)
Out[131]:
In [64]:
plt.plot(ppm,Z.T,'.'); plt.xlim(-10,10)
Out[64]:
In [68]:
len(CEST_list)
Out[68]:
In [52]:
Z=np.zeros?
In [ ]:
Z=np.zeros
In [44]:
In [45]:
plt.plot(ppm,Z,'--'); plt.xlim(-10,10)
Out[45]:
In [ ]:
#Estimate T2
T2time=fitT2(TR,YDataMatrix)
#convert to data frame
df_T2=pd.DataFrame(T2time.T,columns=["Infected","Healthy_Right","Sterile_Inflammation","Healthy_Left"])
#df_T2=pd.DataFrame(T2time.T,columns=["ROI-1","ROI-2","ROI-3","ROI-4"])
df_info=name_2_df(names)
df_final=pd.concat([df_T2,df_info], axis=1)
T2DF=T2DF.append(df_final,ignore_index=True)
In [18]:
df_info=name_2_df(names)
df_info
Out[18]:
In [16]:
# Make list of all T2.txt files
CEST_list=get_ipython().getoutput('ls ../Study_03_CBA/*T2.txt')
for names in CEST_list:
Ydata=txt_2_array(names)
print(Ydata)
df_info=name_2_df(names)
In [7]:
def scale(y,index):
return y/y[index
for names in CEST_list:
print(names)
In [11]:
In [5]:
Ydata=txt_2_array(names)
rows, cols = Ydata.shape
for i in range(cols):
ydata=Ydata[:,i]; ydata=ydata/ydata[9]; ydata=ydata[9:]
integral=np.sum(yd)
In [ ]:
# Fit T2 for all ROIs, slices and mice. construct dataframe
for names in T2_list:
#Convert txt file to array
YDataMatrix=txt_2_array(names)
#Estimate T2
T2time=fitT2(TR,YDataMatrix)
#convert to data frame
df_T2=pd.DataFrame(T2time.T,columns=["Infected","Healthy_Right","Sterile_Inflammation","Healthy_Left"])
#df_T2=pd.DataFrame(T2time.T,columns=["ROI-1","ROI-2","ROI-3","ROI-4"])
df_info=name_2_df(names)
df_final=pd.concat([df_T2,df_info], axis=1)
T2DF=T2DF.append(df_final,ignore_index=True)
In [ ]: