For a good intro to the jupyter notebook interface and general python check out the following web tutorials. https://miykael.github.io/nipype_tutorial/
In [ ]:
# We are going to use a single subject from the ABIDE dataset.
import os
## edit this section if you are running this one your own laptop
tutorial_dir ='/home/USERNAME/Documents/ss2018_pythonmri/' ## point this to the location where you want the data
nilearn_data = os.path.join(tutorial_dir, 'data', 'nilearn_data')
In [ ]:
! mkdir -p ~/conda/envs
! ln -s /scinet/course/ss2018/3_bm/1_mripython/conda_envs/coss2018_mri ~/.conda/envs/coss2018_mri
In [ ]:
# We are going to use a single subject from the ABIDE dataset.
import os
# # ## use these commands if you are running this on scinet
my_home = os.environ.get('HOME')
my_scratch = my_home.replace('/home/', '/scratch/')
tutorial_dir = os.path.join(my_scratch, 'coss2018_mripython')
# make the tutorial dir if it does not exist
if not os.path.exists(tutorial_dir):
os.makedirs(tutorial_dir)
# we are all going to read from the data in the courses folder (on the burst buffer!)
nilearn_data = '/scinet/course/ss2018/3_bm/1_mripython/data/nilearn_data'
cc200_nii = os.path.join(os.path.dirname(os.getcwd()), 'rois', 'cc200_roi_atlas.nii.gz')
In [ ]:
cc200_nii = os.path.join(os.path.dirname(os.getcwd()), 'rois', 'cc200_roi_atlas.nii.gz')
## if this directory doesn't exist - make it..
rois_dir = os.path.join(tutorial_dir, 'data', 'rois')
if not os.path.exists(rois_dir):
os.makedirs(rois_dir)
We are grabbing data from the ABIDE dataset. A data sharing initiative with over 1000 scans. Check out the references included in the abide object for more info.
Thanks to the work of the proprocessed connectome project. A lot of the data processing work has been done for us! We are going to grab the preprocessed but unfilted resting stat MR data.
In [ ]:
from nilearn import datasets
# We specify the site and number of subjects we want to download
adhd = datasets.fetch_adhd(data_dir=nilearn_data,
n_subjects = 3)
print(adhd.description)
In [ ]:
print('{}'.format(adhd.description.decode()))
In [ ]:
In [ ]:
adhd.phenotypic[0]
We need:
In [ ]:
rois = cc200_nii
func = adhd.func[0]
func_confounds = adhd.confounds[0]
roi = 174
In [ ]:
# this makes the plots show up..
import matplotlib.pyplot as plt
%matplotlib inline
In [ ]:
from nilearn import plotting
cc200_nii
plotting.plot_roi(cc200_nii)
In [ ]:
from nilearn import image as img
mean_img = img.mean_img(func)
plotting.plot_epi(mean_img)
In [ ]:
plotting.plot_roi(cc200_nii, bg_img=mean_img, alpha = 0.5, title = "cc200 atlas")
In [ ]:
# this is for interacting with NIFTI files
import nibabel as nib
func_nib = nib.load(func)
print(func_nib)
In [ ]:
affine = func_nib.affine
affine
In [ ]:
header = func_nib.header
print(header.keys())
In [ ]:
# get the dimensions for the fMRI file
dims = func_nib.shape
dims
In [ ]:
# use get data to extract the data from it
func_data = func_nib.get_data()
func_data.shape
In [ ]:
# now do the same thing for rois
rois_data = nib.load(cc200_nii).get_data()
print(rois_data.shape)
if rois_data.shape[0:2] == dims[0:2]:
print("ROIs and func file dimensions match, Hooray!!")
else:
print("FAIL, they do not match")
In [ ]:
resampled_cc200 = img.resample_to_img(cc200_nii, mean_img, interpolation = 'nearest')
plotting.plot_roi(resampled_cc200, mean_img)
resampled_cc200.to_filename(os.path.join(rois_dir, "resample_cc200.nii.gz"))
In [ ]:
# now do the same thing for rois
new_rois = os.path.join(tutorial_dir, 'data', 'rois', "resample_cc200.nii.gz")
rois_data = nib.load(new_rois).get_data()
print(rois_data.shape)
if rois_data.shape[0:2] == dims[0:2]:
print("ROIs and func file dimensions match, Hooray!!")
else:
print("FAIL, they do not match")
In [ ]:
# reshape to voxels * timepoints (4D --> 2D)
func_data = func_data.reshape(dims[0]*dims[1]*dims[2], dims[3])
In [ ]:
rois_data = rois_data.reshape(dims[0]*dims[1]*dims[2], 1)
In [ ]:
import numpy as np
import seaborn as sns
# get the seed time series
idx = np.where(rois_data == roi)[0]
# look at the timeseries
sns.tsplot(func_data[idx, :], err_style="unit_traces")
len(ts) # looks like it's the right length (i.e., our axis is correct)
In [ ]:
ts = np.mean(func_data[idx, :], axis=0)
# look at the timeseries
sns.tsplot(ts)
len(ts) # looks like it's the right length (i.e., our axis is correct)
In [ ]:
# make an output matrix
output = np.zeros(dims[0]*dims[1]*dims[2])
# correlate seed against all voxels
for i in range(dims[0]*dims[1]*dims[2]):
output[i] = np.corrcoef(ts, func_data[i, :])[0][1]
In [ ]:
mean_img_reshaped = mean_img.get_data().reshape(dims[0]*dims[1]*dims[2],1)
np.unique(mean_img_reshaped)[1:5]
In [ ]:
## add a mask to get rid of the weird bits
mask_data = np.where(mean_img_reshaped > 1)[0]
idx_mask = np.where(mask_data > 0)[0]
## recalculate the ts taking the mask into account
idx_masked = np.intersect1d(idx,idx_mask)
ts = np.mean(func_data[idx_masked, :], axis=0)
# make an output matrix
output = np.zeros(dims[0]*dims[1]*dims[2])
# correlate seed against all voxels
for i in np.arange(len(idx_mask)):
output[idx_mask[i]] = np.corrcoef(ts, func_data[idx_mask[i], :])[0][1]
In [ ]:
# get back to 4D
output_filename = os.path.join(tutorial_dir, 'seed_correlation.nii.gz')
output = np.reshape(output, (dims[0], dims[1], dims[2], 1))
# write the results into a NIFTI file
output = nib.nifti1.Nifti1Image(output, affine)
output.to_filename(output_filename)
In [ ]:
plotting.plot_glass_brain(output_filename, threshold = 0.1,
colorbar = True)
In [ ]:
plotting.plot_stat_map(output_filename, threshold = 0.3,
colorbar = True)
In [ ]:
# create output matrix
output_ts = np.zeros((len(np.unique(rois_data))-1, dims[3]))
# rois_data_masked = np.multiply(rois_data,mask_data)
# load in all mean time series from atlas
for i, roi in enumerate(np.unique(rois_data)):
if roi > 0:
idx = np.where(rois_data == roi)[0]
ts = np.mean(func_data[idx, :], axis=0)
output_ts[i-1, :] = ts
# correlation matrix
cc200_correl = np.corrcoef(output_ts)
# view output
plt.imshow(cc200_correl, interpolation='nearest', cmap=plt.cm.RdBu_r)
plt.colorbar()
In [ ]:
import seaborn
seaborn.clustermap(cc200_correl)
nilearn was built by people doing machine learning of resting state fMRI data using sci-kit learn.
It has A LOT of functionallity beyond what we have time to teach here. Including:
They also did a beautiful job of building a large set of ipython notebook tutorials to teach you about all these methods.
Go to the the nilean website for more info.
Let's use another example to see some of the more complex things nilearn can do...
We'll start by using the data fetcher to grab a different atlas. This one (dosenbach_2010) if a set of coordinates in the brain to sample from..
In [ ]:
from nilearn import datasets
dosenbach = datasets.fetch_coords_dosenbach_2010()
print(dosenbach.description.decode())
In [ ]:
from nilearn import input_data
spheres_masker = input_data.NiftiSpheresMasker(
seeds=dosenbach.rois, #the seeds are the dosenbach roi atlas
smoothing_fwhm=4, radius=4.5, # set the radius of a sphere around the roi you want extracted
standardize=True, # the time-series are centered and normed (mean 0, variance 1 in the time dimension)
detrend=True, low_pass=0.1, high_pass=0.01, t_r=2.5, confound = adhd.confounds[0]) # additional signal cleaning and filtering params
timeseries = spheres_masker.fit_transform(func)
print("the shape of the timeseries is {}".format(timeseries.shape))
Avalable options are “correlation”, “partial correlation”, “tangent”, “covariance”, “precision” or other utilites in sci-py could be plugged in (see here for an example)
Let's do partial correlation this time
In [ ]:
from nilearn.connectome import ConnectivityMeasure
from nilearn import plotting
correlation_measure = ConnectivityMeasure(kind='covariance')
dosenbach_matrix = correlation_measure.fit_transform([timeseries])[0]
In [ ]:
seaborn.clustermap(dosenbach_matrix)
In [ ]:
from nilearn import plotting
coords = np.vstack((
dosenbach.rois['x'],
dosenbach.rois['y'],
dosenbach.rois['z'],
)).T
plotting.plot_connectome(dosenbach_matrix, node_coords = coords, edge_threshold='98%')
In [ ]:
from nilearn import input_data
spheres_masker = input_data.NiftiSpheresMasker(
seeds=dosenbach.rois, #the seeds are the dosenbach roi atlas
smoothing_fwhm=4, radius=4.5, # set the radius of a sphere around the roi you want extracted
standardize=True, # the time-series are centered and normed (mean 0, variance 1 in the time dimension)
detrend=True, low_pass=0.1, high_pass=0.01, t_r=2.5) # additional signal cleaning and filtering params
timeseries = spheres_masker.fit_transform(func, confounds=func_confounds)
print("the shape of the timeseries is {}".format(timeseries.shape))
In [ ]:
correlation_measure = ConnectivityMeasure(kind='covariance')
dosenbach_matrix = correlation_measure.fit_transform([timeseries])[0]
seaborn.clustermap(dosenbach_matrix)
In [ ]:
## now let's try cleaning the whole image with image.clean
In [ ]:
# image clearning inside nilearn
cleaned_func = img.clean_img(func, confounds=func_confounds, low_pass=0.1, high_pass=0.01, t_r=2.0,)