This code will load the model information, generate the model definition, and run the model estimation using FSL


In [5]:
import nipype.algorithms.modelgen as model   # model generation
from  nipype.interfaces import fsl, ants      
from nipype.interfaces.base import Bunch
import os,json,glob,sys
import numpy
import nibabel
import nilearn.plotting
sys.path.append('../utils/')
from compute_fd_dvars import compute_fd,compute_dvars

from make_event_files_from_json import MakeEventFilesFromJSON
%matplotlib inline
import matplotlib.pyplot as plt


try:
    datadir=os.environ['FMRIDATADIR']
    assert not datadir==''
except:
    datadir='/Users/poldrack/data_unsynced/myconnectome/sub00001'
    
results_dir = os.path.abspath("../../results")
if not os.path.exists(results_dir):
    os.mkdir(results_dir)

from nipype.caching import Memory
mem = Memory(base_dir='.')

print('Using data from',datadir)


Using data from /home/vagrant/data

Load the scan and model info, and generate the event files for FSL from the information in model.json


In [14]:
subject='sub00001'
session='ses014'  
# note - we have to use the anatomy from a different session'
subdir=os.path.join(datadir,'ds031', subject, session)

tasknum=2 # n-back
bolddir=os.path.join(datadir,'ds031/sub00001',session,
        'functional')
boldfile=os.path.join(bolddir,
    'sub00001_ses014_task002_run001_bold.nii.gz')

preprocessed_epi = os.path.join(results_dir, 
                    "sub00001_ses014_task002_run001_bold_mcf_brain.nii.gz")

scaninfo=json.load(open(os.path.join(subdir,
        'functional/sub00001_ses014_task002_run001_bold.json')))
tr=scaninfo['RepetitionTime']

modelfile=os.path.join(subdir,'model.json')
modelinfo=json.load(open(modelfile))
taskinfo=modelinfo['task%03d'%tasknum]['model001']
evs=taskinfo['Variables']
contrasts=taskinfo['Contrasts']

# get the response onsets
response_onsets=[]

for v in evs.keys():

    if evs[v]['VariableName'].find('_target_ons')>-1:
        for ons in evs[v]['onsets']:
            response_onsets.append(ons[0])

Load the motion parameters that we created during preprocessing, so we can use them as regressors. Also generate the framewise displacement.


In [15]:
# First you need to set this to your mcflirt cache directory
mcpars=numpy.loadtxt(os.path.join(results_dir, "motion.par"))
fd=compute_fd(mcpars)
plt.plot(fd,color='g')


Out[15]:
[<matplotlib.lines.Line2D at 0x7f28b1b6ddd8>]

If the necessary files don't exist, then rerun preprocessing on BOLD file


In [19]:
if not os.path.exists(preprocessed_epi):
    mcflirt = mem.cache(fsl.MCFLIRT)
    mcflirt_results = mcflirt(in_file=boldfile,
                              mean_vol=True)

    boldbet = mem.cache(fsl.BET)
    bet_results = boldbet(functional=True,
                  in_file=mcflirt_results.outputs.out_file,
                  out_file=preprocessed_epi,
                  mask=True)


161014-13:56:34,334 workflow INFO:
	 Executing node fbfa00c218746e802e49f0bde8aa0174 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-MCFLIRT/fbfa00c218746e802e49f0bde8aa0174
161014-13:56:34,337 workflow INFO:
	 Collecting precomputed outputs
161014-13:56:34,344 workflow INFO:
	 Executing node 969038877c9f2d833607ae0287f263a8 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-BET/969038877c9f2d833607ae0287f263a8
161014-13:56:34,350 workflow INFO:
	 Running: bet /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-MCFLIRT/fbfa00c218746e802e49f0bde8aa0174/sub00001_ses014_task002_run001_bold_mcf.nii.gz /home/vagrant/fmri-analysis-vm/results/sub00001_ses014_task002_run001_bold_mcf_brain.nii.gz -F -m

Specify the model. For the sake of speed we will use a simplified model that treats the study as a blocked design rather than modeling each item separately, but we also model instructions and motor responses; this, it is a hybrid block/event-related.


In [20]:
instruction_onsets=list(numpy.array([68,176,372,2,154,416,24,220,350,112,198,328,46,264,394,90,242,306])-2.0)

info = [Bunch(conditions=['faces-1back',
                          'faces-2back',
                          'scenes-1back',
                          'scenes-2back',
                          'chars-1back',
                          'chars-2back',
                          'instructions',
                          'responses'],
              onsets=[[68,176,372],
                      [2,154,416],
                      [24,220,350],
                      [112,198,328],
                      [46,264,394],
                      [90,242,306],
                      instruction_onsets,
                      response_onsets],
              durations=[[20],
                         [20],
                         [20],
                         [20],
                         [20],
                         [20],
                         [2],
                         [1]],
            regressors=[fd],
            regressor_names=['FD'])
       ]

s = model.SpecifyModel()
s.inputs.input_units = 'secs'
s.inputs.functional_runs = preprocessed_epi
s.inputs.time_repetition = tr
s.inputs.high_pass_filter_cutoff = 128.
s.inputs.subject_info = info
specify_model_results = s.run()
s.inputs


Out[20]:
event_files = <undefined>
functional_runs = ['/home/vagrant/fmri-analysis-vm/results/sub00001_ses014_task002_run001_bold_mcf_brain.nii.gz']
high_pass_filter_cutoff = 128.0
ignore_exception = False
input_units = secs
outlier_files = <undefined>
realignment_parameters = <undefined>
subject_info = [Bunch(conditions=['faces-1back', 'faces-2back', 'scenes-1back', 'scenes-2back', 'chars-1back', 'chars-2back', 'instructions', 'responses'], durations=[[20], [20], [20], [20], [20], [20], [2], [1]], onsets=[[68, 176, 372], [2, 154, 416], [24, 220, 350], [112, 198, 328], [46, 264, 394], [90, 242, 306], [66.0, 174.0, 370.0, 0.0, 152.0, 414.0, 22.0, 218.0, 348.0, 110.0, 196.0, 326.0, 44.0, 262.0, 392.0, 88.0, 240.0, 304.0], [60.024, 62.024, 98.023, 270.027, 400.013, 14.021, 70.011, 74.013, 160.015, 170.019, 178.022, 186.025, 374.019, 384.024, 432.027, 34.013, 36.014, 122.016, 214.02, 334.02, 362.014]], regressor_names=['FD'], regressors=[array([ 0.        ,  0.07236592,  0.06300166,  0.11626191,  0.11167607,
        0.09921674,  0.04116566,  0.10523709,  0.11246583,  0.0899106 ,
        0.0497155 ,  0.09389955,  0.08240785,  0.0384268 ,  0.08819755,
        0.04770173,  0.02822161,  0.06832792,  0.11623336,  0.04567599,
        0.10513914,  0.14890597,  0.05369501,  0.03833644,  0.10724255,
        0.10080339,  0.07316389,  0.08075565,  0.0900043 ,  0.13351365,
        0.05495496,  0.12196236,  0.1140008 ,  0.06849365,  0.04833765,
        0.0437612 ,  0.09096273,  0.15253988,  0.07526705,  0.11393353,
        0.14427679,  0.05346435,  0.03688195,  0.15669057,  0.1542385 ,
        0.08893105,  0.0413989 ,  0.1171556 ,  0.15333441,  0.05882251,
        0.15391713,  0.14882398,  0.03186287,  0.02319445,  0.1029761 ,
        0.11336535,  0.0571468 ,  0.01975229,  0.13814034,  0.12472347,
        0.03655572,  0.07188381,  0.07147366,  0.06903687,  0.05264666,
        0.20921653,  0.16199707,  0.07067971,  0.17937853,  0.08064511,
        0.1350757 ,  0.07453749,  0.08782699,  0.08711443,  0.06185535,
        0.04708813,  0.13793215,  0.19141211,  0.0393395 ,  0.04090892,
        0.03240783,  0.0278938 ,  0.20028221,  0.29491158,  0.10366045,
        0.03900514,  0.01931544,  0.18995761,  0.1496618 ,  0.09466242,
        0.01815427,  0.17670622,  0.07226226,  0.11132935,  0.05560501,
        0.16705189,  0.24049361,  0.04666386,  0.20952853,  0.24338331,
        0.08897406,  0.16717461,  0.06127831,  0.17919228,  0.07814586,
        0.11428767,  0.0762034 ,  0.74191873,  0.3026518 ,  0.24416695,
        0.13887295,  0.09797841,  0.09099526,  0.05997169,  0.1654566 ,
        0.13642765,  0.09190757,  0.12327556,  0.07366418,  0.0600268 ,
        0.02472008,  0.07659428,  0.04939321,  0.0661983 ,  0.19270955,
        0.15550182,  0.01892088,  0.14072429,  0.08406414,  0.08452798,
        0.07979473,  0.02482773,  0.12524362,  0.03421194,  0.15759142,
        0.13524818,  0.05074183,  0.13635572,  0.14353265,  0.08336552,
        0.14005451,  0.05370438,  0.10305597,  0.03528759,  0.1122349 ,
        0.21577028,  0.0311832 ,  0.15382741,  0.13960741,  0.04153863,
        0.04713224,  0.10343026,  0.13727429,  0.04317867,  0.03929225,
        0.04306015,  0.11724517,  0.07478719,  0.05162421,  0.03478745,
        0.13206166,  0.06417389,  0.03824411,  0.15304904,  0.14009934,
        0.05443136,  0.04320403,  0.14428093,  0.16857785,  0.05117237,
        0.04362433,  0.09453799,  0.07200695,  0.11381838,  0.15020469,
        0.09758238,  0.0612845 ,  0.05305019,  0.10682224,  0.21147008,
        0.08789438,  0.0541588 ,  0.14615858,  0.21267998,  0.09231235,
        0.04007173,  0.12698531,  0.18087871,  0.0797536 ,  0.03836664])])]
time_repetition = 2.32

Generate the fsf and ev files using Level1Design.

Exercise: add an additional T contrast for the FD variable.


In [21]:
contrasts=[['faces>Baseline','T', 
            ['faces-1back','faces-2back'],[0.5,0.5]],
           ['scenes>Baseline','T', 
            ['scenes-1back','scenes-2back'],[0.5,0.5]],
           ['chars>Baseline','T', 
            ['chars-1back','chars-2back'],[0.5,0.5]],
           ['2back>1back','T', 
            ['faces-1back','faces-2back','scenes-1back','scenes-2back','chars-1back','chars-2back'],[-1,1,-1,1,-1,1,-1,1]],
          ['response>Baseline','T',
           ['responses'],[1]],
          ['instructions>Baseline','T',
           ['instructions'],[1]]]
           
level1design = mem.cache(fsl.model.Level1Design)
level1design_results = level1design(interscan_interval = tr,
                                    bases = {'dgamma':{'derivs': True}},
                                    session_info = specify_model_results.outputs.session_info,
                                    model_serial_correlations=True,
                                    contrasts=contrasts)

level1design_results.outputs


161014-13:57:11,302 workflow INFO:
	 Executing node 5e8bf8747177d64252f8403f0098baef in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef
Out[21]:
ev_files = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_faces-1back_0_1.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_faces-2back_0_3.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_scenes-1back_0_5.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_scenes-2back_0_7.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_chars-1back_0_9.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_chars-2back_0_11.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_instructions_0_13.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_responses_0_15.txt', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/ev_FD_0_17.txt']
fsf_files = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-Level1Design/5e8bf8747177d64252f8403f0098baef/run0.fsf

Generate the full set of model files using FEATModel


In [22]:
modelgen = mem.cache(fsl.model.FEATModel)
modelgen_results = modelgen(fsf_file=level1design_results.outputs.fsf_files,
                            ev_files=level1design_results.outputs.ev_files)
modelgen_results.outputs


161014-13:57:14,755 workflow INFO:
	 Executing node 4aab0385c7ccc4ed92462e1ba5c09d84 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84
161014-13:57:14,760 workflow INFO:
	 Running: feat_model run0 
Out[22]:
con_file = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0.con
design_cov = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0_cov.png
design_file = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0.mat
design_image = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0.png
fcon_file = <undefined>

Visualize the design matrix


In [23]:
desmtx=numpy.loadtxt(modelgen_results.outputs.design_file,skiprows=5)
plt.imshow(desmtx,aspect='auto',interpolation='nearest',cmap='gray')


Out[23]:
<matplotlib.image.AxesImage at 0x7f28b4bccbe0>

Show the correlation matrix for design


In [24]:
cc=numpy.corrcoef(desmtx.T)
plt.imshow(cc,aspect='auto',interpolation='nearest', cmap=plt.cm.cubehelix_r)
plt.colorbar()


Out[24]:
<matplotlib.colorbar.Colorbar at 0x7f28b518cba8>

Estimate the model using FILMGLS - this will take a few minutes.


In [25]:
mask = mem.cache(fsl.maths.ApplyMask)
mask_results = mask(in_file=preprocessed_epi,
                    mask_file=os.path.join(results_dir, "mask.nii.gz"))
mask_results.outputs


161014-13:57:30,65 workflow INFO:
	 Executing node e530d7a11bc64b26d31904692901b4ed in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-maths-ApplyMask/e530d7a11bc64b26d31904692901b4ed
161014-13:57:30,71 workflow INFO:
	 Running: fslmaths /home/vagrant/fmri-analysis-vm/results/sub00001_ses014_task002_run001_bold_mcf_brain.nii.gz -mas /home/vagrant/fmri-analysis-vm/results/mask.nii.gz /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-maths-ApplyMask/e530d7a11bc64b26d31904692901b4ed/sub00001_ses014_task002_run001_bold_mcf_brain_masked.nii.gz
Out[25]:
out_file = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-maths-ApplyMask/e530d7a11bc64b26d31904692901b4ed/sub00001_ses014_task002_run001_bold_mcf_brain_masked.nii.gz

In [26]:
filmgls= mem.cache(fsl.FILMGLS)
filmgls_results = filmgls(in_file=mask_results.outputs.out_file,
                          design_file = modelgen_results.outputs.design_file,
                          tcon_file = modelgen_results.outputs.con_file,
                          autocorr_noestimate = True)
filmgls_results.outputs


161014-13:57:44,605 workflow INFO:
	 Executing node 3ea0667ddda4e718f21ed2b703584699 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699
161014-13:57:44,610 workflow INFO:
	 Running: film_gls --noest --rn=results --con=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0.con --in=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-maths-ApplyMask/e530d7a11bc64b26d31904692901b4ed/sub00001_ses014_task002_run001_bold_mcf_brain_masked.nii.gz --pd=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FEATModel/4aab0385c7ccc4ed92462e1ba5c09d84/run0.mat --thr=0.000000
161014-13:57:44,658 interface INFO:
	 stdout 2016-10-14T13:57:44.658724:Log directory is: results
161014-13:57:50,295 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:paradigm.getDesignMatrix().Nrows()=190
161014-13:57:50,296 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:paradigm.getDesignMatrix().Ncols()=17
161014-13:57:50,297 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:sizeTS=190
161014-13:57:50,297 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:numTS=149810
161014-13:57:50,298 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:Completed
161014-13:57:50,298 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:Prewhitening and Computing PEs...
161014-13:57:50,299 interface INFO:
	 stdout 2016-10-14T13:57:50.295648:Percentage done:
161014-14:02:38,430 interface INFO:
	 stdout 2016-10-14T14:02:38.430331:1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,Completed
161014-14:02:38,438 interface INFO:
	 stdout 2016-10-14T14:02:38.430331:Saving results... 
161014-14:02:49,168 interface INFO:
	 stdout 2016-10-14T14:02:49.168197:Completed
Out[26]:
copes = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope1.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope2.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope3.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope4.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope5.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope6.nii.gz']
dof_file = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/dof
fstats = <undefined>
logfile = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/logfile
param_estimates = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe1.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe2.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe3.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe4.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe5.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe6.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe7.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe8.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe9.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe10.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe11.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe12.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe13.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe14.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe15.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe16.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/pe17.nii.gz']
residual4d = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/res4d.nii.gz
results_dir = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results
sigmasquareds = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/sigmasquareds.nii.gz
thresholdac = /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/threshac1.nii.gz
tstats = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat1.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat2.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat3.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat4.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat5.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/tstat6.nii.gz']
varcopes = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope1.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope2.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope3.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope4.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope5.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/varcope6.nii.gz']
zfstats = <undefined>
zstats = ['/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat1.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat2.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat3.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat4.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat5.nii.gz', '/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/zstat6.nii.gz']

Exercise: Visualize the statistical map for the contrast that tests for effects of the motion variable (FD).

Move copes, varcopes, and the mask into MNI space

For the group level analysis we need to move results from all subjects into one common MNI space. Let's start with the EPI derived mask (we will use it later for group level mask)


In [27]:
mask_file = os.path.join(results_dir, "mask.nii.gz")
epi_to_t1_warp = os.path.join(results_dir, "epi_to_t1_warp.nii.gz")
t1_to_mni_warp = os.path.join(results_dir, "t1_to_mni_warp.h5")
in_file = mask_file
anat_subject='ses018'
anatomydir=os.path.join(datadir,'ds031/sub00001',anat_subject,
        'anatomy')
t1_file = os.path.join(anatomydir,'sub00001_ses018_T1w_001.nii.gz')

epi_to_t1 = mem.cache(fsl.ApplyWarp)
epi_to_t1_mask_results = epi_to_t1(in_file=in_file,
                                   ref_file=t1_file,
                                   field_file=epi_to_t1_warp,
                                   interp="nn")
nilearn.plotting.plot_roi(epi_to_t1_mask_results.outputs.out_file, title="EPI mask in subject T1 space")

t1_to_mni = mem.cache(ants.ApplyTransforms)
t1_to_mni_mask_results = t1_to_mni(input_image=epi_to_t1_mask_results.outputs.out_file,
                                   reference_image=os.path.join(os.getenv('FSLDIR'),'data/standard/MNI152_T1_2mm_brain.nii.gz'),
                                   transforms=t1_to_mni_warp,
                                   interpolation="NearestNeighbor")
t1_to_mni_mask_results.outputs
nilearn.plotting.plot_roi(t1_to_mni_mask_results.outputs.output_image, title="EPI mask in MNI")


161014-14:12:39,675 workflow INFO:
	 Executing node f802a0a932632113f31ad55c8ddb2ad8 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f802a0a932632113f31ad55c8ddb2ad8
161014-14:12:39,680 workflow INFO:
	 Running: applywarp --in=/home/vagrant/fmri-analysis-vm/results/mask.nii.gz --ref=/home/vagrant/data/ds031/sub00001/ses018/anatomy/sub00001_ses018_T1w_001.nii.gz --out=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f802a0a932632113f31ad55c8ddb2ad8/mask_warp.nii.gz --warp=/home/vagrant/fmri-analysis-vm/results/epi_to_t1_warp.nii.gz --interp=nn
161014-14:12:54,214 workflow INFO:
	 Executing node 8cb1b0e25748e06ca05ce54265ed37ea in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-ants-resampling-ApplyTransforms/8cb1b0e25748e06ca05ce54265ed37ea
161014-14:12:54,220 workflow INFO:
	 Running: antsApplyTransforms --default-value 0 --input /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f802a0a932632113f31ad55c8ddb2ad8/mask_warp.nii.gz --interpolation NearestNeighbor --output mask_warp_trans.nii.gz --reference-image /usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz --transform /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5
161014-14:12:54,779 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:Using double precision for computations.
161014-14:12:54,780 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:Input scalar image: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f802a0a932632113f31ad55c8ddb2ad8/mask_warp.nii.gz
161014-14:12:54,781 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:Reference image: /usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz
161014-14:12:54,782 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:=============================================================================
161014-14:12:54,782 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:The composite transform comprises the following transforms (in order): 
161014-14:12:54,783 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:  1. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[0] (type = Euler3DTransform)
161014-14:12:54,783 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:  2. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[1] (type = TranslationTransform)
161014-14:12:54,784 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:  3. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[2] (type = Euler3DTransform)
161014-14:12:54,785 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:  4. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[3] (type = AffineTransform)
161014-14:12:54,786 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:  5. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[4] (type = DisplacementFieldTransform)
161014-14:12:54,786 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:=============================================================================
161014-14:12:54,787 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:Default pixel value: 0
161014-14:12:54,787 interface INFO:
	 stdout 2016-10-14T14:12:54.779151:Interpolation type: NearestNeighborInterpolateImageFunction
161014-14:12:55,290 interface INFO:
	 stdout 2016-10-14T14:12:55.290757:Output warped image: mask_warp_trans.nii.gz
Out[27]:
<nilearn.plotting.displays.OrthoSlicer at 0x7f28b0ecc128>

Now we can use the same procedure for all of the contrast and variance images.


In [28]:
image=filmgls_results.outputs.copes[0]
_, fname = os.path.split(image)
epi_to_t1_results = epi_to_t1(in_file=filmgls_results.outputs.copes[0],
                                   ref_file=t1_file,
                                   field_file=epi_to_t1_warp,
                                   interp="spline")

t1_to_mni_results = t1_to_mni(input_image=epi_to_t1_results.outputs.out_file,
                              reference_image=os.path.join(os.getenv('FSLDIR'),'data/standard/MNI152_T1_2mm_brain.nii.gz'),
                              transforms=t1_to_mni_warp,
                              interpolation="BSpline")

nilearn.plotting.plot_stat_map(t1_to_mni_results.outputs.output_image, 
                        title="%s in MNI"%fname, threshold='auto')


161014-14:12:56,875 workflow INFO:
	 Executing node f3d7bff3dd6a93cdf38bd35750889c05 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f3d7bff3dd6a93cdf38bd35750889c05
161014-14:12:56,880 workflow INFO:
	 Running: applywarp --in=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-model-FILMGLS/3ea0667ddda4e718f21ed2b703584699/results/cope1.nii.gz --ref=/home/vagrant/data/ds031/sub00001/ses018/anatomy/sub00001_ses018_T1w_001.nii.gz --out=/home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f3d7bff3dd6a93cdf38bd35750889c05/cope1_warp.nii.gz --warp=/home/vagrant/fmri-analysis-vm/results/epi_to_t1_warp.nii.gz --interp=spline
161014-14:13:12,764 workflow INFO:
	 Executing node 933a4a87016c6c73fbee1e1dcb536519 in dir: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-ants-resampling-ApplyTransforms/933a4a87016c6c73fbee1e1dcb536519
161014-14:13:12,770 workflow INFO:
	 Running: antsApplyTransforms --default-value 0 --input /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f3d7bff3dd6a93cdf38bd35750889c05/cope1_warp.nii.gz --interpolation BSpline --output cope1_warp_trans.nii.gz --reference-image /usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz --transform /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5
161014-14:13:13,318 interface INFO:
	 stdout 2016-10-14T14:13:13.318196:Using double precision for computations.
161014-14:13:13,323 interface INFO:
	 stdout 2016-10-14T14:13:13.318196:Input scalar image: /home/vagrant/fmri-analysis-vm/analysis/firstlevel/nipype_mem/nipype-interfaces-fsl-preprocess-ApplyWarp/f3d7bff3dd6a93cdf38bd35750889c05/cope1_warp.nii.gz
161014-14:13:13,825 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:Reference image: /usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz
161014-14:13:13,826 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:=============================================================================
161014-14:13:13,827 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:The composite transform comprises the following transforms (in order): 
161014-14:13:13,827 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:  1. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[0] (type = Euler3DTransform)
161014-14:13:13,827 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:  2. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[1] (type = TranslationTransform)
161014-14:13:13,828 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:  3. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[2] (type = Euler3DTransform)
161014-14:13:13,828 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:  4. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[3] (type = AffineTransform)
161014-14:13:13,837 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:  5. /home/vagrant/fmri-analysis-vm/results/t1_to_mni_warp.h5[4] (type = DisplacementFieldTransform)
161014-14:13:13,838 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:=============================================================================
161014-14:13:13,838 interface INFO:
	 stdout 2016-10-14T14:13:13.825122:Default pixel value: 0
161014-14:13:15,891 interface INFO:
	 stdout 2016-10-14T14:13:15.890079:Interpolation type: BSplineInterpolateImageFunction
161014-14:13:16,907 interface INFO:
	 stdout 2016-10-14T14:13:16.907362:Output warped image: cope1_warp_trans.nii.gz
Out[28]:
<nilearn.plotting.displays.OrthoSlicer at 0x7f28b0dd5898>

In [ ]: