Beispiel #1
0
 def __init__(self, in_file='path', **options):
     from nipype.interfaces.nipy.preprocess import Trim
     trim = Trim()
     trim.inputs.in_file = in_file
     for ef in options:
         setattr(trim.inputs, ef, options[ef])
     self.res = trim.run()
def check_mask_coverage(epi,brainmask):
    from os.path import abspath
    from nipype import config, logging
    config.enable_debug_mode()
    logging.update_logging(config)
    from nilearn import plotting
    from nipype.interfaces.nipy.preprocess import Trim

    trim = Trim()
    trim.inputs.in_file = epi
    trim.inputs.end_index = 1
    trim.inputs.out_file = 'epi_vol1.nii.gz'
    trim.run()
    epi_vol = abspath('epi_vol1.nii.gz')

    maskcheck_filename='maskcheck.png'
    display = plotting.plot_anat(epi_vol, display_mode='ortho',
                                 draw_cross=False,
                                 title = 'brainmask coverage')
    display.add_contours(brainmask,levels=[.5], colors='r')
    display.savefig(maskcheck_filename)
    display.close()
    maskcheck_file = abspath(maskcheck_filename)

    return(maskcheck_file)
Beispiel #3
0
def test_Trim_outputs():
    output_map = dict(out_file=dict(), )
    outputs = Trim.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Beispiel #4
0
    def __repr__(self):
        return "<Subject Dir: sub{:0>3d}>".format(self._subject_code)

	def remove_volumes_from_bold(self, model_num, bold_suffix, task_to_trim, num_of_volumes):
	    from nipype.interfaces.nipy.preprocess import Trim
        for run_dir in self.dir_tree('functional')[task_to_trim]:
            file = glob(run_dir + "/bold{}.nii.gz".format(bold_suffix))[0]
            out_file = file.replace('.nii.gz', '_{}trim.nii.gz'.format(num_of_volumes))
            if os.path.isfile(out_file):
                print "removing volumes already ran!"
                return
            trim = Trim()
            trim.inputs.in_file = file
            trim.inputs.begin_index = num_of_volumes  # remove X first volumes
            trim.inputs.out_file = out_file
            res = trim.run()
Beispiel #5
0
def test_Trim_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = Trim.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Beispiel #6
0
def trim(begin_idx=6):
    """Simple interface to trim a few volumes from a 4d fmri nifti file.

    :param begin_idx: int
        Remove first `begin_idx` volumes.

    Notes
    -----
    http://www.mit.edu/~satra/nipype-nightly/interfaces/generated/nipype.interfaces.nipy.preprocess.html#trim
    """
    trim = Trim()
    #trim.inputs.in_file = 'functional.nii'
    trim.inputs.begin_index = begin_idx
    return trim
Beispiel #7
0
def test_Trim_inputs():
    input_map = dict(
        begin_index=dict(usedefault=True, ),
        end_index=dict(usedefault=True, ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(mandatory=True, ),
        out_file=dict(),
        suffix=dict(usedefault=True, ),
    )
    inputs = Trim.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Beispiel #8
0
def test_Trim_inputs():
    input_map = dict(begin_index=dict(usedefault=True,
    ),
    end_index=dict(usedefault=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(mandatory=True,
    ),
    out_file=dict(),
    suffix=dict(usedefault=True,
    ),
    )
    inputs = Trim.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
def prepro_func(i):
    try:
        subj = i
        for s in (['session2']):

            # Define input files: 2xfMRI + 1xMPRAGE
            func1 = data_path + subj + '/Functional_scans/' + s[:-2] + s[
                -1] + '_a/epi.nii.gz'  #choose this for patients
            func2 = data_path + subj + '/Functional_scans/' + s[:-2] + s[
                -1] + '_b/epi.nii.gz'  #choose this for patients
            #anat = glob.glob(anat_path + subj +'/'+ s + '/anat/reorient/anat_*.nii.gz') #choose this for session 1
            lesion_mask_file = anat_path + subj + '/session1/anat/reorient/lesion_seg.nii.gz'
            old_lesion_mask_file = glob.glob(
                anat_path + subj +
                '/session1/anat/reorient/old_lesion_seg.nii.gz'
            )  #choose this for ones with no old lesion
            #old_lesion_mask_file = anat_path + subj +'/session1/anat/reorient/old_lesion_seg.nii.gz' #choose this for ones with old lesion
            anat = glob.glob(anat_path + subj + '/' + s +
                             '/anat/anat2hr/anat_*.nii.gz'
                             )  #choose this for sessions 2 and 3
            anat_CSF = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_0.nii.gz'
            )  # don't change, same for all sessions
            anat_WM = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_2.nii.gz'
            )  # don't change, same for all sessions
            anat_GM = glob.glob(
                anat_path + subj +
                '/session1/seg_anat/segmentation/anat_*_pve_1.nii.gz'
            )  # don't change, same for all sessions
            anat2MNI_fieldwarp = glob.glob(
                anat_path + subj +
                '/session1/anat/nonlinear_reg/anat_*_fieldwarp.nii.gz'
            )  # don't change, same for all sessions

            if not os.path.isdir(data_path + subj + '/' + s):  # No data exists
                continue

            if not os.path.isfile(func1):
                print '1. functional file ' + func1 + ' not found. Skipping!'
                continue

            if not os.path.isfile(func2):
                print '2. functional file ' + func2 + ' not found. Skipping!'
                continue

            if not anat:
                print 'Preprocessed anatomical file not found. Skipping!'
                continue
            if len(anat) > 1:
                print 'WARNING: found multiple files of preprocessed anatomical image!'
                continue
            anat = anat[0]

            if not anat2MNI_fieldwarp:
                print 'Anatomical registration to MNI152-space field file not found. Skipping!'
                continue
            if len(anat2MNI_fieldwarp) > 1:
                print 'WARNING: found multiple files of anat2MNI fieldwarp!'
                continue
            anat2MNI_fieldwarp = anat2MNI_fieldwarp[0]

            if not anat_CSF:
                anat_CSF = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_0.nii.gz')
                if not anat_CSF:
                    print 'Anatomical segmentation CSF file not found. Skipping!'
                    continue
            if len(anat_CSF) > 1:
                print 'WARNING: found multiple files of anatomical CSF file!'
                continue
            anat_CSF = anat_CSF[0]

            if not anat_WM:
                anat_WM = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_2.nii.gz')
                if not anat_WM:
                    print 'Anatomical segmentation WM file not found. Skipping!'
                    continue
            if len(anat_WM) > 1:
                print 'WARNING: found multiple files of anatomical WM file!'
                continue
            anat_WM = anat_WM[0]

            if not anat_GM:
                anat_GM = glob.glob(
                    anat_path + subj + '/' + s +
                    '/seg_anat/segmentation/anat_*_pve_1.nii.gz')
                if not anat_GM:
                    print 'Anatomical segmentation GM file not found. Skipping!'
                    continue
            if len(anat_GM) > 1:
                print 'WARNING: found multiple files of anatomical GM file!'
                continue
            anat_GM = anat_GM[0]

            if not os.path.isdir(results_path + subj):
                os.mkdir(results_path + subj)

            if not os.path.isdir(results_path + subj + '/' + s):
                os.mkdir(results_path + subj + '/' + s)

            for data in acquisitions:

                os.chdir(results_path + subj + '/' + s)
                print "Currently processing subject: " + subj + '/' + s + ' ' + data

                #Initialize workflows
                workflow = pe.Workflow(name=data)

                workflow.base_dir = '.'
                inputnode = pe.Node(
                    interface=util.IdentityInterface(fields=['source_file']),
                    name='inputspec')
                outputnode = pe.Node(
                    interface=util.IdentityInterface(fields=['result_func']),
                    name='outputspec')

                if data == 'func1':
                    inputnode.inputs.source_file = func1
                else:
                    inputnode.inputs.source_file = func2

                # Remove n_dummies first volumes
                trim = pe.Node(interface=Trim(begin_index=n_dummies),
                               name='trim')
                workflow.connect(inputnode, 'source_file', trim, 'in_file')

                # Motion correction + slice timing correction
                realign4d = pe.Node(interface=SpaceTimeRealigner(),
                                    name='realign4d')
                #realign4d.inputs.ignore_exception=True
                realign4d.inputs.slice_times = 'asc_alt_siemens'

                realign4d.inputs.slice_info = 2  # horizontal slices
                realign4d.inputs.tr = mytr  # TR in seconds
                workflow.connect(trim, 'out_file', realign4d, 'in_file')

                # Reorient
                #deoblique = pe.Node(interface=afni.Warp(deoblique=True, outputtype='NIFTI_GZ'), name='deoblique') #leave out if you don't need this
                #workflow.connect(realign4d, 'out_file', deoblique, 'in_file')
                reorient = pe.Node(
                    interface=fsl.Reorient2Std(output_type='NIFTI_GZ'),
                    name='reorient')
                workflow.connect(realign4d, 'out_file', reorient, 'in_file')

                # AFNI skullstrip and mean image skullstrip
                tstat1 = pe.Node(interface=afni.TStat(args='-mean',
                                                      outputtype="NIFTI_GZ"),
                                 name='tstat1')
                automask = pe.Node(interface=afni.Automask(
                    dilate=1, outputtype="NIFTI_GZ"),
                                   name='automask')
                skullstrip = pe.Node(interface=afni.Calc(
                    expr='a*b', outputtype="NIFTI_GZ"),
                                     name='skullstrip')
                tstat2 = pe.Node(interface=afni.TStat(args='-mean',
                                                      outputtype="NIFTI_GZ"),
                                 name='tstat2')

                workflow.connect(reorient, 'out_file', tstat1, 'in_file')
                workflow.connect(tstat1, 'out_file', automask, 'in_file')
                workflow.connect(automask, 'out_file', skullstrip, 'in_file_b')
                workflow.connect(reorient, 'out_file', skullstrip, 'in_file_a')
                workflow.connect(skullstrip, 'out_file', tstat2, 'in_file')

                # Register to anatomical space #can be changed
                #mean2anat = pe.Node(fsl.FLIRT(bins=40, cost='normmi', dof=7, interp='nearestneighbour', searchr_x=[-180,180], searchr_y=[-180,180], searchr_z=[-180,180]), name='mean2anat')
                mean2anat = pe.Node(fsl.FLIRT(bins=40,
                                              cost='normmi',
                                              dof=7,
                                              interp='nearestneighbour'),
                                    name='mean2anat')
                #mean2anat = pe.Node(fsl.FLIRT(no_search=True), name='mean2anat')
                mean2anat.inputs.reference = anat
                workflow.connect(tstat2, 'out_file', mean2anat, 'in_file')

                # Transform mean functional image
                warpmean = pe.Node(interface=fsl.ApplyWarp(), name='warpmean')
                warpmean.inputs.ref_file = MNI_brain
                warpmean.inputs.field_file = anat2MNI_fieldwarp
                workflow.connect(mean2anat, 'out_matrix_file', warpmean,
                                 'premat')
                workflow.connect(tstat2, 'out_file', warpmean, 'in_file')

                # ----- inversion matrix and eroded brain mask for regression -----

                # create inverse matrix from mean2anat registration
                invmat = pe.Node(fsl.ConvertXFM(), name='invmat')
                invmat.inputs.invert_xfm = True
                workflow.connect(mean2anat, 'out_matrix_file', invmat,
                                 'in_file')

                # erode functional brain mask
                erode_brain = pe.Node(fsl.ImageMaths(), name='erode_brain')
                erode_brain.inputs.args = '-kernel boxv 3 -ero'
                workflow.connect(automask, 'out_file', erode_brain, 'in_file')

                # register GM mask to functional image space, this is done for quality control
                reg_GM = pe.Node(fsl.preprocess.ApplyXFM(), name='register_GM')
                reg_GM.inputs.apply_xfm = True
                reg_GM.inputs.in_file = anat_GM
                workflow.connect(tstat2, 'out_file', reg_GM, 'reference')
                workflow.connect(invmat, 'out_file', reg_GM, 'in_matrix_file')

                # --------- motion regression and censor signals ------------------

                # normalize motion parameters
                norm_motion = pe.Node(interface=Function(
                    input_names=['in_file'],
                    output_names=['out_file'],
                    function=normalize_motion_data),
                                      name='normalize_motion')
                workflow.connect(realign4d, 'par_file', norm_motion, 'in_file')

                # create censor file, for censoring motion
                get_censor = pe.Node(afni.OneDToolPy(), name='motion_censors')
                get_censor.inputs.set_nruns = 1
                get_censor.inputs.censor_motion = (censor_thr, 'motion')
                get_censor.inputs.show_censor_count = True
                if overwrite: get_censor.inputs.args = '-overwrite'
                workflow.connect(norm_motion, 'out_file', get_censor,
                                 'in_file')

                # compute motion parameter derivatives (for use in regression)
                deriv_motion = pe.Node(afni.OneDToolPy(), name='deriv_motion')
                deriv_motion.inputs.set_nruns = 1
                deriv_motion.inputs.derivative = True
                if overwrite: deriv_motion.inputs.args = '-overwrite'
                deriv_motion.inputs.out_file = 'motion_derivatives.txt'
                workflow.connect(norm_motion, 'out_file', deriv_motion,
                                 'in_file')

                # scale motion parameters and get quadratures
                quadr_motion = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                       name='quadr_motion')
                quadr_motion.inputs.multicol = True
                workflow.connect(norm_motion, 'out_file', quadr_motion,
                                 'in_file')

                # scale motion derivatives and get quadratures
                quadr_motion_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                             name='quadr_motion_deriv')
                quadr_motion_deriv.inputs.multicol = True
                workflow.connect(deriv_motion, 'out_file', quadr_motion_deriv,
                                 'in_file')

                # -------- CSF regression signals ---------------

                # threshold and erode CSF mask
                erode_CSF_mask = pe.Node(fsl.ImageMaths(),
                                         name='erode_CSF_mask')
                erode_CSF_mask.inputs.args = '-thr 0.5 -kernel boxv 3 -ero'
                erode_CSF_mask.inputs.in_file = anat_CSF

                # register CSF mask to functional image space
                reg_CSF_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                       name='register_CSF_mask')
                reg_CSF_mask.inputs.apply_xfm = True
                workflow.connect(tstat2, 'out_file', reg_CSF_mask, 'reference')
                workflow.connect(invmat, 'out_file', reg_CSF_mask,
                                 'in_matrix_file')

                # inverse lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask
                inverse_lesion_mask = pe.Node(fsl.ImageMaths(),
                                              name='inverse_lesion_mask')
                inverse_lesion_mask.inputs.args = '-add 1 -rem 2'
                inverse_lesion_mask.inputs.in_file = lesion_mask_file
                rem_lesion = pe.Node(fsl.ImageMaths(), name='remove_lesion')
                workflow.connect(erode_CSF_mask, 'out_file', rem_lesion,
                                 'in_file')
                workflow.connect(inverse_lesion_mask, 'out_file', rem_lesion,
                                 'mask_file')
                '''
			# Transform lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space
		        warp_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_lesion')
		        warp_lesion.inputs.ref_file = MNI_brain
		        warp_lesion.inputs.field_file = anat2MNI_fieldwarp
			warp_lesion.inputs.in_file = lesion_mask_file
			warp_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/lesion_seg_warp.nii.gz'
			warp_lesion.run()
			'''

                # inverse old lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask
                if old_lesion_mask_file:
                    inverse_old_lesion_mask = pe.Node(
                        fsl.ImageMaths(), name='inverse_old_lesion_mask')
                    inverse_old_lesion_mask.inputs.args = '-add 1 -rem 3'
                    #inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file[0]
                    inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file
                    rem_old_lesion = pe.Node(fsl.ImageMaths(),
                                             name='remove_old_lesion')
                    workflow.connect(rem_lesion, 'out_file', rem_old_lesion,
                                     'in_file')
                    workflow.connect(inverse_old_lesion_mask, 'out_file',
                                     rem_old_lesion, 'mask_file')
                    workflow.connect(rem_old_lesion, 'out_file', reg_CSF_mask,
                                     'in_file')
                    '''
			    # Transform old lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space
		            warp_old_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_old_lesion')
		            warp_old_lesion.inputs.ref_file = MNI_brain
		            warp_old_lesion.inputs.field_file = anat2MNI_fieldwarp
			    warp_old_lesion.inputs.in_file = old_lesion_mask_file
			    warp_old_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/old_lesion_seg_warp.nii.gz'
			    warp_old_lesion.run()
			    '''

                else:
                    workflow.connect(rem_lesion, 'out_file', reg_CSF_mask,
                                     'in_file')

                # threshold CSF mask and intersect with functional brain mask
                thr_CSF_mask = pe.Node(fsl.ImageMaths(),
                                       name='threshold_CSF_mask')
                thr_CSF_mask.inputs.args = '-thr 0.25'
                workflow.connect(reg_CSF_mask, 'out_file', thr_CSF_mask,
                                 'in_file')
                workflow.connect(erode_brain, 'out_file', thr_CSF_mask,
                                 'mask_file')

                # extract CSF values
                get_CSF_noise = pe.Node(fsl.ImageMeants(),
                                        name='get_CSF_noise')
                workflow.connect(skullstrip, 'out_file', get_CSF_noise,
                                 'in_file')
                workflow.connect(thr_CSF_mask, 'out_file', get_CSF_noise,
                                 'mask')

                # compute CSF noise derivatives
                deriv_CSF = pe.Node(afni.OneDToolPy(), name='deriv_CSF')
                deriv_CSF.inputs.set_nruns = 1
                deriv_CSF.inputs.derivative = True
                if overwrite: deriv_CSF.inputs.args = '-overwrite'
                deriv_CSF.inputs.out_file = 'CSF_derivatives.txt'
                workflow.connect(get_CSF_noise, 'out_file', deriv_CSF,
                                 'in_file')

                # scale SCF noise and get quadratures
                quadr_CSF = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                    name='quadr_CSF')
                quadr_CSF.inputs.multicol = False
                workflow.connect(get_CSF_noise, 'out_file', quadr_CSF,
                                 'in_file')

                # scale CSF noise derivatives and get quadratures
                quadr_CSF_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                          name='quadr_CSF_deriv')
                quadr_CSF_deriv.inputs.multicol = False
                workflow.connect(deriv_CSF, 'out_file', quadr_CSF_deriv,
                                 'in_file')

                # -------- WM regression signals -----------------

                # threshold and erode WM mask
                erode_WM_mask = pe.Node(fsl.ImageMaths(), name='erode_WM_mask')
                erode_WM_mask.inputs.args = '-thr 0.5 -kernel boxv 7 -ero'
                erode_WM_mask.inputs.in_file = anat_WM

                # registrer WM mask to functional image space
                reg_WM_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                      name='register_WM_mask')
                reg_WM_mask.inputs.apply_xfm = True
                workflow.connect(tstat2, 'out_file', reg_WM_mask, 'reference')
                workflow.connect(invmat, 'out_file', reg_WM_mask,
                                 'in_matrix_file')
                workflow.connect(erode_WM_mask, 'out_file', reg_WM_mask,
                                 'in_file')

                # create inverse nonlinear registration MNI2anat
                invwarp = pe.Node(fsl.InvWarp(output_type='NIFTI_GZ'),
                                  name='invwarp')
                invwarp.inputs.warp = anat2MNI_fieldwarp
                invwarp.inputs.reference = anat

                # transform ventricle mask to functional space
                reg_ventricles = pe.Node(fsl.ApplyWarp(),
                                         name='register_ventricle_mask')
                reg_ventricles.inputs.in_file = ventricle_mask
                workflow.connect(tstat2, 'out_file', reg_ventricles,
                                 'ref_file')
                workflow.connect(invwarp, 'inverse_warp', reg_ventricles,
                                 'field_file')
                workflow.connect(invmat, 'out_file', reg_ventricles, 'postmat')

                # threshold WM mask and intersect with functional brain mask
                thr_WM_mask = pe.Node(fsl.ImageMaths(),
                                      name='threshold_WM_mask')
                thr_WM_mask.inputs.args = '-thr 0.25'
                workflow.connect(reg_WM_mask, 'out_file', thr_WM_mask,
                                 'in_file')
                workflow.connect(erode_brain, 'out_file', thr_WM_mask,
                                 'mask_file')

                # remove ventricles from WM mask
                exclude_ventricles = pe.Node(fsl.ImageMaths(),
                                             name='exclude_ventricles')
                workflow.connect(thr_WM_mask, 'out_file', exclude_ventricles,
                                 'in_file')
                workflow.connect(reg_ventricles, 'out_file',
                                 exclude_ventricles, 'mask_file')

                # check that WM is collected from both hemispheres
                check_WM_bilat = pe.Node(interface=Function(
                    input_names=['in_file'],
                    output_names=['errors'],
                    function=check_bilateralism),
                                         name='check_WM_bilateralism')
                workflow.connect(exclude_ventricles, 'out_file',
                                 check_WM_bilat, 'in_file')

                # extract WM values
                get_WM_noise = pe.Node(fsl.ImageMeants(), name='get_WM_noise')
                workflow.connect(skullstrip, 'out_file', get_WM_noise,
                                 'in_file')
                workflow.connect(exclude_ventricles, 'out_file', get_WM_noise,
                                 'mask')

                # compute WM noise derivatives
                deriv_WM = pe.Node(afni.OneDToolPy(), name='deriv_WM')
                deriv_WM.inputs.set_nruns = 1
                deriv_WM.inputs.derivative = True
                if overwrite: deriv_WM.inputs.args = '-overwrite'
                deriv_WM.inputs.out_file = 'WM_derivatives.txt'
                workflow.connect(get_WM_noise, 'out_file', deriv_WM, 'in_file')

                # scale WM noise and get quadratures
                quadr_WM = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                   name='quadr_WM')
                quadr_WM.inputs.multicol = False
                workflow.connect(get_WM_noise, 'out_file', quadr_WM, 'in_file')

                # scale WM noise derivatives and get quadratures
                quadr_WM_deriv = pe.Node(interface=Function(
                    input_names=['in_file', 'multicol'],
                    output_names=['out_file', 'out_quadr_file'],
                    function=scale_and_quadrature),
                                         name='quadr_WM_deriv')
                quadr_WM_deriv.inputs.multicol = False
                workflow.connect(deriv_WM, 'out_file', quadr_WM_deriv,
                                 'in_file')

                # ---------- global regression signals ----------------

                if global_reg:
                    # register anatomical whole brain mask to functional image space
                    reg_glob_mask = pe.Node(fsl.preprocess.ApplyXFM(),
                                            name='register_global_mask')
                    reg_glob_mask.inputs.apply_xfm = True
                    reg_glob_mask.inputs.in_file = anat
                    workflow.connect(tstat2, 'out_file', reg_glob_mask,
                                     'reference')
                    workflow.connect(invmat, 'out_file', reg_glob_mask,
                                     'in_matrix_file')

                    # threshold anatomical brain mask and intersect with functional brain mask
                    thr_glob_mask = pe.Node(fsl.ImageMaths(),
                                            name='threshold_global_mask')
                    thr_glob_mask.inputs.args = '-thr -0.1'
                    workflow.connect(reg_glob_mask, 'out_file', thr_glob_mask,
                                     'in_file')
                    workflow.connect(erode_brain, 'out_file', thr_glob_mask,
                                     'mask_file')

                    # extract global signal values
                    get_glob_noise = pe.Node(fsl.ImageMeants(),
                                             name='get_global_noise')
                    workflow.connect(skullstrip, 'out_file', get_glob_noise,
                                     'in_file')
                    workflow.connect(thr_glob_mask, 'out_file', get_glob_noise,
                                     'mask')

                    # compute global noise derivative
                    deriv_glob = pe.Node(afni.OneDToolPy(),
                                         name='deriv_global')
                    deriv_glob.inputs.set_nruns = 1
                    deriv_glob.inputs.derivative = True
                    if overwrite: deriv_glob.inputs.args = '-overwrite'
                    deriv_glob.inputs.out_file = 'global_derivatives.txt'
                    workflow.connect(get_glob_noise, 'out_file', deriv_glob,
                                     'in_file')

                    # scale global noise and get quadratures
                    quadr_glob = pe.Node(interface=Function(
                        input_names=['in_file', 'multicol'],
                        output_names=['out_file', 'out_quadr_file'],
                        function=scale_and_quadrature),
                                         name='quadr_glob')
                    quadr_glob.inputs.multicol = False
                    workflow.connect(get_glob_noise, 'out_file', quadr_glob,
                                     'in_file')

                    # scale global noise derivatives and get quadratures
                    quadr_glob_deriv = pe.Node(interface=Function(
                        input_names=['in_file', 'multicol'],
                        output_names=['out_file', 'out_quadr_file'],
                        function=scale_and_quadrature),
                                               name='quadr_glob_deriv')
                    quadr_glob_deriv.inputs.multicol = False
                    workflow.connect(deriv_glob, 'out_file', quadr_glob_deriv,
                                     'in_file')

                # ---------- regression matrix ----------

                # create bandpass regressors, can not be easily implemented to workflow
                get_bandpass = pe.Node(interface=Function(
                    input_names=['minf', 'maxf', 'example_file', 'tr'],
                    output_names=['out_tuple'],
                    function=bandpass),
                                       name='bandpass_regressors')
                get_bandpass.inputs.minf = myminf
                get_bandpass.inputs.maxf = mymaxf
                get_bandpass.inputs.tr = mytr
                workflow.connect(norm_motion, 'out_file', get_bandpass,
                                 'example_file')

                # concatenate regressor time series
                cat_reg_name = 'cat_regressors'
                if global_reg: cat_reg_name = cat_reg_name + '_global'
                cat_reg = pe.Node(interface=Function(
                    input_names=[
                        'mot', 'motd', 'motq', 'motdq', 'CSF', 'CSFd', 'CSFq',
                        'CSFdq', 'WM', 'WMd', 'WMq', 'WMdq', 'include_global',
                        'glob', 'globd', 'globq', 'globdq'
                    ],
                    output_names=['reg_file_args'],
                    function=concatenate_regressors),
                                  name=cat_reg_name)
                cat_reg.inputs.include_global = global_reg
                workflow.connect(quadr_motion, 'out_file', cat_reg, 'mot')
                workflow.connect(quadr_motion_deriv, 'out_file', cat_reg,
                                 'motd')
                workflow.connect(quadr_motion, 'out_quadr_file', cat_reg,
                                 'motq')
                workflow.connect(quadr_motion_deriv, 'out_quadr_file', cat_reg,
                                 'motdq')
                workflow.connect(quadr_CSF, 'out_file', cat_reg, 'CSF')
                workflow.connect(quadr_CSF_deriv, 'out_file', cat_reg, 'CSFd')
                workflow.connect(quadr_CSF, 'out_quadr_file', cat_reg, 'CSFq')
                workflow.connect(quadr_CSF_deriv, 'out_quadr_file', cat_reg,
                                 'CSFdq')
                workflow.connect(quadr_WM, 'out_file', cat_reg, 'WM')
                workflow.connect(quadr_WM_deriv, 'out_file', cat_reg, 'WMd')
                workflow.connect(quadr_WM, 'out_quadr_file', cat_reg, 'WMq')
                workflow.connect(quadr_WM_deriv, 'out_quadr_file', cat_reg,
                                 'WMdq')
                if global_reg:
                    workflow.connect(quadr_glob, 'out_file', cat_reg, 'glob')
                    workflow.connect(quadr_glob_deriv, 'out_file', cat_reg,
                                     'globd')
                    workflow.connect(quadr_glob, 'out_quadr_file', cat_reg,
                                     'globq')
                    workflow.connect(quadr_glob_deriv, 'out_quadr_file',
                                     cat_reg, 'globdq')
                else:
                    cat_reg.inputs.glob = None
                    cat_reg.inputs.globd = None
                    cat_reg.inputs.globq = None
                    cat_reg.inputs.globdq = None

# create regression matrix
                deconvolve_name = 'deconvolve'
                if global_reg: deconvolve_name = deconvolve_name + '_global'
                deconvolve = pe.Node(afni.Deconvolve(), name=deconvolve_name)
                deconvolve.inputs.polort = 2  # contstant, linear and quadratic background signals removed
                deconvolve.inputs.fout = True
                deconvolve.inputs.tout = True
                deconvolve.inputs.x1D_stop = True
                deconvolve.inputs.force_TR = mytr
                workflow.connect(cat_reg, 'reg_file_args', deconvolve, 'args')
                workflow.connect(get_bandpass, 'out_tuple', deconvolve,
                                 'ortvec')
                workflow.connect([(skullstrip, deconvolve,
                                   [(('out_file', str2list), 'in_files')])])

                # regress out motion and other unwanted signals
                tproject_name = 'tproject'
                if global_reg: tproject_name = tproject_name + '_global'
                tproject = pe.Node(afni.TProject(outputtype="NIFTI_GZ"),
                                   name=tproject_name)
                tproject.inputs.TR = mytr
                tproject.inputs.polort = 0  # use matrix created with 3dDeconvolve, higher order polynomials not needed
                tproject.inputs.cenmode = 'NTRP'  # interpolate removed time points
                workflow.connect(get_censor, 'out_file', tproject, 'censor')
                workflow.connect(skullstrip, 'out_file', tproject, 'in_file')
                workflow.connect(automask, 'out_file', tproject, 'mask')
                workflow.connect(deconvolve, 'x1D', tproject, 'ort')

                # Transform all images
                warpall_name = 'warpall'
                if global_reg: warpall_name = warpall_name + '_global'
                warpall = pe.Node(interface=fsl.ApplyWarp(), name=warpall_name)
                warpall.inputs.ref_file = MNI_brain
                warpall.inputs.field_file = anat2MNI_fieldwarp
                workflow.connect(mean2anat, 'out_matrix_file', warpall,
                                 'premat')
                workflow.connect(tproject, 'out_file', warpall, 'in_file')
                workflow.connect(warpall, 'out_file', outputnode,
                                 'result_func')

                # Run workflow
                workflow.write_graph()
                workflow.run()

        print "FUNCTIONAL PREPROCESSING DONE! Results in ", results_path + subj + '/' + s
    except:
        print "Error with patient: ", subj
        traceback.print_exc()
Beispiel #10
0
                                               outputtype="NIFTI_GZ"),
                       name='automask')
    skullstrip = pe.Node(interface=afni.Calc(expr='a*b',
                                             outputtype="NIFTI_GZ"),
                         name='skullstrip')
    tstat2 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"),
                     name='tstat2')

    workflow2.connect(inputnode2, 'drifter_result', tstat1, 'in_file')
    workflow2.connect(tstat1, 'out_file', automask, 'in_file')
    workflow2.connect(automask, 'out_file', skullstrip, 'in_file_b')
    workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a')
    workflow2.connect(skullstrip, 'out_file', tstat2, 'in_file')

    # Remove n (3) first volumes
    trim = pe.Node(interface=Trim(begin_index=3), name='trim')
    workflow2.connect(skullstrip, 'out_file', trim, 'in_file')

    # Spatial smoothing, kernel sigma 2.00 mm (5 mm is too much)
    #smooth = pe.Node(interface=fsl.maths.SpatialFilter(operation='mean', terminal_output='stream', kernel_shape='gauss', kernel_size=1.5, 			nan2zeros=True), name='smooth')

    #workflow2.connect(trim,'out_file', smooth, 'in_file')

    # Normalize the median value of each run to 10000
    intnorm = pe.Node(interface=fsl.ImageMaths(op_string='-ing 10000',
                                               suffix='_intnorm'),
                      name='intnorm')
    workflow2.connect(trim, 'out_file', intnorm, 'in_file')

    # Register to standard space
    #mean2anat = pe.Node(fsl.FLIRT(bins=40, cost='normmi', dof=12, interp='nearestneighbour'), name='mean2anat')
Beispiel #11
0
def TV_Preproc_Pipeline(base_dir=None,
                        output_dir=None,
                        subject_id=None,
                        spm_path=None):
    """ Create a preprocessing workflow for the Couples Conflict Study using nipype

    Args:
        base_dir: path to data folder where raw subject folder is located
        output_dir: path to where key output files should be saved
        subject_id: subject_id (str)
        spm_path: path to spm folder

    Returns:
        workflow: a nipype workflow that can be run
        
    """

    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as util
    from nipype.interfaces.utility import Merge as Merge_List
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.fsl.maths import UnaryMaths
    from nipype.interfaces.nipy.preprocess import Trim
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces import spm
    from nipype.interfaces.spm import Normalize12
    from nipype.algorithms.misc import Gunzip
    from nipype.interfaces.nipy.preprocess import ComputeMask
    import nipype.interfaces.matlab as mlab
    from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes
    from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates, Plot_Quality_Control
    import os
    import glob

    ########################################
    ## Setup Paths and Nodes
    ########################################

    # Specify Paths
    canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii')
    template_file = os.path.join(spm_path, 'tpm', 'TPM.nii')

    # Set the way matlab should be called
    mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    mlab.MatlabCommand.set_default_paths(spm_path)

    # Get File Names for different types of scans.  Parse into separate processing streams
    datasource = Node(interface=nio.DataGrabber(infields=['subject_id'],
                                                outfields=['struct', 'func']),
                      name='datasource')
    datasource.inputs.base_directory = base_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {
        'struct': '%s/T1.nii.gz',
        'func': '%s/*ep*.nii.gz'
    }
    datasource.inputs.template_args = {
        'struct': [['subject_id']],
        'func': [['subject_id']]
    }
    datasource.inputs.subject_id = subject_id
    datasource.inputs.sort_filelist = True

    # iterate over functional scans to define paths
    func_source = Node(interface=util.IdentityInterface(fields=['scan']),
                       name="func_source")
    func_source.iterables = ('scan',
                             glob.glob(
                                 os.path.join(base_dir, subject_id,
                                              '*ep*nii.gz')))

    ########################################
    ## Preprocessing
    ########################################

    # Trim - remove first 5 TRs
    n_vols = 5
    trim = Node(interface=Trim(), name='trim')
    trim.inputs.begin_index = n_vols

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Coregister - 12 parameters
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estwrite'

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    # Gunzip - unzip the functional and structural images
    gunzip_struc = Node(Gunzip(), name="gunzip_struc")
    gunzip_func = Node(Gunzip(), name="gunzip_func")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize = Node(interface=Normalize12(jobtype='estwrite',
                                           tpm=template_file),
                     name="normalize")

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = 6

    #Create Covariate matrix
    make_cov = Node(interface=Create_Covariates(), name="make_cov")

    #Plot Quality Control Check
    quality_control = Node(interface=Plot_Quality_Control(),
                           name='quality_control')

    # Create a datasink to clean up output files
    datasink = Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_id

    ########################################
    # Create Workflow
    ########################################

    workflow = Workflow(name='Preprocessed')
    workflow.base_dir = os.path.join(base_dir, subject_id)
    workflow.connect([
        (datasource, gunzip_struc, [('struct', 'in_file')]),
        (func_source, trim, [('scan', 'in_file')]),
        (trim, gunzip_func, [('out_file', 'in_file')]),
        (gunzip_func, realign, [('out_file', 'in_files')]),
        (realign, quality_control, [('realigned_files', 'dat_img')]),
        (gunzip_struc, coregister, [('out_file', 'source')]),
        (coregister, normalize, [('coregistered_source', 'image_to_align')]),
        (realign, coregister, [('mean_image', 'target'),
                               ('realigned_files', 'apply_to_files')]),
        (realign, normalize, [(('mean_image', get_vox_dims),
                               'write_voxel_sizes')]),
        (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters'),
                        ('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_cov, [('realignment_parameters',
                              'realignment_parameters')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (normalize, datasink, [('normalized_files', 'structural.@normalize')]),
        (coregister, datasink, [('coregistered_source', 'structural.@struct')
                                ]),
        (smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')]),
        (quality_control, datasink, [('plot', 'functional.@quality_control')])
    ])
    return workflow
Beispiel #12
0
def init_clean_wf(
    in_files,
    parcellation,
    out_dir,
    out_path_base,
    space,
    parcellation_space,
    source_file,
    work_dir,
    save_clean_dtseries=False,
    save_clean_ptseries=False,
    save_clean_pconn=False,
    save_clean_covariance=False,
    polort=-1,
    passband=None,
    stopband=None,
    desc_entity='concatenated',
    name='clean_wf'
):
    """
    Generates a parcellated connectome.

    Example:
        wf = init_clean_wf(
            in_files = {'cifti': ['sub-001_task-rest_run-1_space-fsLR32k_bold.nii.gz',
                                  'sub-001_task-rest_run-2_space-fsLR32k_bold.nii.gz'],
                        'source_files: ['sub-001_task-rest_run-1_bold.nii.gz',
                                        'sub-001_tesk-rest_run-2_bold.nii.gz'],
                        'censor': ['sub-001_task-rest_run-1_censor.1D',
                                   'sub-001_task-rest_run-2_censor.1D'],
                        'ort': ['sub-001_task-rest_run-1_ort.1D',
                                'sub-001_task-rest_run-2_ort.1D'],
                        'dt': [0.8, 0.8],
                        'trim': [2, 3]},
            parcellation = 'glasser_parcellation.dlabel.nii',
            out_dir = '/path/to/out_dir',
            out_path_base = 'some_folder_name',
            space = 'fsLR32k',
            parcellation_space = 'glasser',
            source_file = 'sub-EC1008_task-rest_bold.nii.gz',
            work_dir = '/path/to/work_dir',
            save_clena_dtseries = False,
            save_clean_ptseries = False,
            save_clean_pconn = False,
            save_clean_covariance = False,
            polort=2,
            passband=[0.01, 0.1],
            stopband=None,
            desc_entity='concatenated',
            name='really_cool_name'
        )

    Parameters
    ----------

    in_files : dict
        keys - cifti, source_files, censor, ort, dt
            cifti is a list of preprocessed cifti files
            source_files are used for naming output modified bold files
            censor is a list of afni censor 1D files or None
            ort is a list of ort 1D files or None
            dt is a list of floats for the repetition times
            trim removes these number of volumes from the begining before any
                preprocessing. Number of volumes after trimming must match
                censor and ort volumes. This number is zero-based. Use 0
                if you do not want to trim any beginning frames.
    parcellation : str
        path to cifti parcellation
    out_dir : str
        the output directory
    out_path_base : str
        the new directory for the output, to be crteate within out_dir
    space : str
        the space for the cift files
    parcellation_space : str    
        the space alias for the parcellation, so keep it terse
    source_file : str
        a filename for output naming purposes
    work_dir : str
        the working directory for the workflow
    save_clean_dtseries : bool
        save the clean dtseries for each in cifti
    save_clean_ptseries : bool
        save the clean ptseries for each in cifti
    save_clean_pconn : bool
        save the pconn for each in cifti
    save_clean_covariance : bool
        save the covariance for each in cifti
    polort : int
        remove polynomials up to and including degree polynomial
    passband : list
        fbot, ftop
    stopband : list
        sbot, stop
    desc_entity : str
        desc entity value in final output file names
    name : str
        workflow name

    Returns
    -------

    workflow : nipype workflow
        Here are the workflow steps:
            1. convert cifti to nifti.
            2. Remove nuisance regressors with 3dTproject.
            3. Convert 'cleaned' nifti back to cifti.
            4. Parcellate cifti.
                a) Calculate task connectome.
            5. Merge all task ciftis.
            6. Calculate subject connectome.
    """

    cifti = in_files['cifti']
    source_files = in_files['source_files']
    censor = in_files['censor']
    ort = in_files['ort']
    dt = in_files['dt']
    trim = in_files['trim']

    DerivativesDataSink = bids.DerivativesDataSink
    DerivativesDataSink.out_path_base = out_path_base

    write_verbose = (ort is not None 
                     or polort > 0 
                     or passband is not None
                     or stopband is not None)

    run_tproject = (censor is not None
                    or write_verbose)

    tproject_iterfields = ['in_file', 'TR']
    if censor is not None:
        tproject_iterfields.append('censor')
    if ort is not None:
        tproject_iterfields.append('ort')

    # start workflow now
    clean_wf = Workflow(name=name, base_dir=work_dir)

    cifti_to_nifti = MapNode(CiftiConvertToNifti(
        out_file='fakenifti.nii.gz'),
        name='cifti_to_nifti', iterfield=['in_file'])
    cifti_to_nifti.inputs.in_file = cifti

    trim_begin = MapNode(Trim(),
        name='trim_begin', iterfield=['in_file', 'begin_index'])
    trim_begin.inputs.begin_index = trim

    tproject = MapNode(TProject(
        out_file='clean.nii.gz',
        polort=polort,
        verb=True), name='tproject', iterfield=tproject_iterfields)
    tproject.inputs.TR = dt
    if 'censor' in tproject_iterfields:
        tproject.inputs.censor = censor
    if 'ort' in tproject_iterfields:
        tproject.inputs.ort = ort

    nifti_to_cifti = MapNode(CiftiConvertFromNifti(
        out_file='converted.dtseries.nii'),
        name='nifti_to_cifti', 
        iterfield=['in_file', 'cifti_template', 'reset_timepoints'])
    nifti_to_cifti.inputs.cifti_template = cifti
    nifti_to_cifti.inputs.reset_timepoints = [(x, 0) for x in dt]

    cifti_parcellate = MapNode(CiftiParcellate(
        cifti_label=parcellation,
        direction='COLUMN',
        out_file='parcellated.ptseries.nii'),
        name='cifti_parcellate', iterfield=['in_file'])

    task_rvals = MapNode(CiftiCorrelation(
        out_file='task_rvals.pconn.nii'),
        name='task_rvals', iterfield=['in_file'])

    task_zvals = MapNode(CiftiCorrelation(
        out_file='task_zvals.pconn.nii',
        fisher_z=True),
        name='task_zvals', iterfield=['in_file'])

    task_cov = MapNode(CiftiCorrelation(
        out_file='task_cov.pconn.nii',
        covariance=True),
        name='task_cov', iterfield=['in_file'])

    task_merge = Node(CiftiMerge(
        out_file='merge.ptseries.nii'),
        name='task_merge')

    merge_rvals = Node(CiftiCorrelation(
        out_file='merge_rvals.pconn.nii'),
        name='merge_rvals')

    merge_zvals = Node(CiftiCorrelation(
        out_file='merge_zvals.pconn.nii',
        fisher_z=True),
        name='merge_zvals')

    merge_cov = Node(CiftiCorrelation(
        out_file='merge_cov.pconn.nii',
        covariance=True),
        name='merge_cov')

    # derivatives
    ds_clean_dtseries = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        desc='clean',
        space=space,
        suffix='bold.dtseries'),
        iterfield=['in_file', 'source_file'],
        name='ds_clean_dtseries', 
        run_without_submitting=True)
    ds_clean_dtseries.inputs.source_file = source_files

    ds_task_ptseries = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        desc='clean',
        space=parcellation_space,
        suffix='bold.ptseries'),
        iterfield=['in_file', 'source_file'],
        name='ds_task_ptseries', 
        run_without_submitting=True)
    ds_task_ptseries.inputs.source_file = source_files

    ds_task_zvals = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        space=parcellation_space,
        suffix='zvals.pconn'),
        iterfield=['in_file', 'source_file'],
        name='ds_task_zvals', 
        run_without_submitting=True)
    ds_task_zvals.inputs.source_file = source_files
    
    ds_task_rvals = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        space=parcellation_space,
        suffix='rvals.pconn'),
        iterfield=['in_file', 'source_file'],
        name='ds_task_rvals', 
        run_without_submitting=True)
    ds_task_rvals.inputs.source_file = source_files

    ds_task_cov = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        space=parcellation_space,
        suffix='cov.pconn'),
        iterfield=['in_file', 'source_file'],
        name='ds_task_cov', 
        run_without_submitting=True)
    ds_task_cov.inputs.source_file = source_files

    ds_subject_zvals = Node(DerivativesDataSink(
        base_directory=out_dir,
        desc=desc_entity,
        space=parcellation_space,
        suffix='zvals.pconn',
        source_file=source_file),
        name='ds_subject_zvals', run_without_submitting=True)

    ds_subject_rvals = Node(DerivativesDataSink(
        base_directory=out_dir,
        desc=desc_entity,
        space=parcellation_space,
        suffix='rvals.pconn',
        source_file=source_file),
        name='ds_subject_rvals', run_without_submitting=True)

    ds_subject_cov = Node(DerivativesDataSink(
        base_directory=out_dir,
        desc=desc_entity,
        space=parcellation_space,
        suffix='cov.pconn',
        source_file=source_file),
        name='ds_subject_cov', run_without_submitting=True)

    ds_ort = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        desc='confounds',
        suffix='tproject'),
        iterfield=['in_file', 'source_file'],
        name='ds_ort',
        run_without_submittting=True)
    ds_ort.inputs.source_file = source_files

    ds_sval = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        desc='sval',
        suffix='tproject'),
        iterfield=['in_file', 'source_file'],
        name='ds_sval',
        run_without_submitting=True)
    ds_sval.inputs.source_file = source_files

    ds_psinv = MapNode(DerivativesDataSink(
        base_directory=out_dir,
        desc='psinv',
        suffix='tproject'),
        iterfield=['in_file', 'source_file'],
        name='ds_psinv',
        run_without_submitting=True)
    ds_psinv.inputs.source_file = source_files

    if run_tproject:

        clean_wf.connect([
            (cifti_to_nifti, trim_begin, [('out_file', 'in_file')]),
            (trim_begin, tproject, [('out_file', 'in_file')]),
            (tproject, nifti_to_cifti, [('out_file', 'in_file')]),
            (nifti_to_cifti, cifti_parcellate, [('out_file', 'in_file')]),
        ])

        if write_verbose:
            clean_wf.connect([
                (tproject, ds_ort, [('matrix', 'in_file')]),
                (tproject, ds_sval, [('singular_values', 'in_file')]),
                (tproject, ds_psinv, [('pseudo_inv', 'in_file')]),
            ])
    else:

        clean_wf.connect([
            (cifti_to_nifti, trim_begin, [('out_file', 'in_file')]),
            (trim_begin, nifti_to_cifti, [('out_file', 'in_file')]),
            (nifti_to_cifti, cifti_parcellate, [('out_file', 'in_file')]),
        ])
        
    clean_wf.connect([
        (cifti_parcellate, task_merge, [('out_file', 'in_files')]),
        (task_merge, merge_rvals, [('out_file', 'in_file')]),
        (task_merge, merge_zvals, [('out_file', 'in_file')]),
        (task_merge, merge_cov, [('out_file', 'in_file')]),
        # derivatives
        (merge_rvals, ds_subject_rvals, [('out_file', 'in_file')]),
        (merge_zvals, ds_subject_zvals, [('out_file', 'in_file')]),
        (merge_cov, ds_subject_cov, [('out_file', 'in_file')]),
    ])

    if save_clean_dtseries:
        clean_wf.connect([
            (nifti_to_cifti, ds_clean_dtseries, [('out_file', 'in_file')]),
        ])

    if save_clean_ptseries:
        clean_wf.connect([
            (cifti_parcellate, ds_task_ptseries, [('out_file', 'in_file')]),
        ])

    if save_clean_pconn:
        clean_wf.connect([
            (cifti_parcellate, task_rvals, [('out_file', 'in_file')]),
            (cifti_parcellate, task_zvals, [('out_file', 'in_file')]),
            # derivatives
            (task_rvals, ds_task_rvals, [('out_file', 'in_file')]),
            (task_zvals, ds_task_zvals, [('out_file', 'in_file')]),
        ])

    if save_clean_covariance:
        clean_wf.connect([
            (cifti_parcellate, task_cov, [('out_file', 'in_file')]),
            # derivatives
            (task_cov, ds_task_cov, [('out_file', 'in_file')]),
        ])

    return clean_wf
Beispiel #13
0
def Couple_Preproc_Pipeline(base_dir=None,
                            output_dir=None,
                            subject_id=None,
                            spm_path=None):
    """ Create a preprocessing workflow for the Couples Conflict Study using nipype

    Args:
        base_dir: path to data folder where raw subject folder is located
        output_dir: path to where key output files should be saved
        subject_id: subject_id (str)
        spm_path: path to spm folder

    Returns:
        workflow: a nipype workflow that can be run
        
    """

    from nipype.interfaces.dcm2nii import Dcm2nii
    from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as util
    from nipype.interfaces.utility import Merge as Merge_List
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.fsl.maths import UnaryMaths
    from nipype.interfaces.nipy.preprocess import Trim
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces import spm
    from nipype.interfaces.spm import Normalize12
    from nipype.algorithms.misc import Gunzip
    from nipype.interfaces.nipy.preprocess import ComputeMask
    import nipype.interfaces.matlab as mlab
    from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes
    from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates
    import os
    import glob

    ########################################
    ## Setup Paths and Nodes
    ########################################

    # Specify Paths
    canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii')
    template_file = os.path.join(spm_path, 'tpm', 'TPM.nii')

    # Set the way matlab should be called
    mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    mlab.MatlabCommand.set_default_paths(spm_path)

    # Get File Names for different types of scans.  Parse into separate processing streams
    datasource = Node(interface=nio.DataGrabber(
        infields=['subject_id'], outfields=['struct', 'ap', 'pa']),
                      name='datasource')
    datasource.inputs.base_directory = base_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {
        'struct': '%s/Study*/t1w_32ch_mpr_08mm*',
        'ap': '%s/Study*/distortion_corr_32ch_ap*',
        'pa': '%s/Study*/distortion_corr_32ch_pa*'
    }
    datasource.inputs.template_args = {
        'struct': [['subject_id']],
        'ap': [['subject_id']],
        'pa': [['subject_id']]
    }
    datasource.inputs.subject_id = subject_id
    datasource.inputs.sort_filelist = True

    # iterate over functional scans to define paths
    scan_file_list = glob.glob(
        os.path.join(base_dir, subject_id, 'Study*', '*'))
    func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s]
    func_list = [s for s in func_list
                 if "SBRef" not in s]  # Exclude sbref for now.
    func_source = Node(interface=util.IdentityInterface(fields=['scan']),
                       name="func_source")
    func_source.iterables = ('scan', func_list)

    # Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional)
    ap_dcm2nii = Node(interface=Dcm2nii(), name='ap_dcm2nii')
    ap_dcm2nii.inputs.gzip_output = True
    ap_dcm2nii.inputs.output_dir = '.'
    ap_dcm2nii.inputs.date_in_filename = False

    pa_dcm2nii = Node(interface=Dcm2nii(), name='pa_dcm2nii')
    pa_dcm2nii.inputs.gzip_output = True
    pa_dcm2nii.inputs.output_dir = '.'
    pa_dcm2nii.inputs.date_in_filename = False

    f_dcm2nii = Node(interface=Dcm2nii(), name='f_dcm2nii')
    f_dcm2nii.inputs.gzip_output = True
    f_dcm2nii.inputs.output_dir = '.'
    f_dcm2nii.inputs.date_in_filename = False

    s_dcm2nii = Node(interface=Dcm2nii(), name='s_dcm2nii')
    s_dcm2nii.inputs.gzip_output = True
    s_dcm2nii.inputs.output_dir = '.'
    s_dcm2nii.inputs.date_in_filename = False

    ########################################
    ## Setup Nodes for distortion correction
    ########################################

    # merge output files into list
    merge_to_file_list = Node(interface=Merge_List(2),
                              infields=['in1', 'in2'],
                              name='merge_to_file_list')

    # fsl merge AP + PA files (depends on direction)
    merger = Node(interface=Merge(dimension='t'), name='merger')
    merger.inputs.output_type = 'NIFTI_GZ'

    # use topup to create distortion correction map
    topup = Node(interface=TOPUP(), name='topup')
    topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                              'epi_params_APPA_MB8.txt')
    topup.inputs.output_type = "NIFTI_GZ"
    topup.inputs.config = 'b02b0.cnf'

    # apply topup to all functional images
    apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
    apply_topup.inputs.in_index = [1]
    apply_topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                                    'epi_params_APPA_MB8.txt')
    apply_topup.inputs.output_type = "NIFTI_GZ"
    apply_topup.inputs.method = 'jac'
    apply_topup.inputs.interp = 'spline'

    # Clear out Zeros from spline interpolation using absolute value.
    abs_maths = Node(interface=UnaryMaths(), name='abs_maths')
    abs_maths.inputs.operation = 'abs'

    ########################################
    ## Preprocessing
    ########################################

    # Trim - remove first 10 TRs
    n_vols = 10
    trim = Node(interface=Trim(), name='trim')
    trim.inputs.begin_index = n_vols

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Coregister - 12 parameters
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estwrite'

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    # Gunzip - unzip the functional and structural images
    gunzip_struc = Node(Gunzip(), name="gunzip_struc")
    gunzip_func = Node(Gunzip(), name="gunzip_func")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize = Node(interface=Normalize12(jobtype='estwrite',
                                           tpm=template_file),
                     name="normalize")

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = 6

    #Create Covariate matrix
    make_cov = Node(interface=Create_Covariates(), name="make_cov")

    # Create a datasink to clean up output files
    datasink = Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_id

    ########################################
    # Create Workflow
    ########################################

    workflow = Workflow(name='Preprocessed')
    workflow.base_dir = os.path.join(base_dir, subject_id)
    workflow.connect([
        (datasource, ap_dcm2nii, [('ap', 'source_dir')]),
        (datasource, pa_dcm2nii, [('pa', 'source_dir')]),
        (datasource, s_dcm2nii, [('struct', 'source_dir')]),
        (func_source, f_dcm2nii, [('scan', 'source_dir')]),
        (ap_dcm2nii, merge_to_file_list, [('converted_files', 'in1')]),
        (pa_dcm2nii, merge_to_file_list, [('converted_files', 'in2')]),
        (merge_to_file_list, merger, [('out', 'in_files')]),
        (merger, topup, [('merged_file', 'in_file')]),
        (topup, apply_topup, [('out_fieldcoef', 'in_topup_fieldcoef'),
                              ('out_movpar', 'in_topup_movpar')]),
        (f_dcm2nii, trim, [('converted_files', 'in_file')]),
        (trim, apply_topup, [('out_file', 'in_files')]),
        (apply_topup, abs_maths, [('out_corrected', 'in_file')]),
        (abs_maths, gunzip_func, [('out_file', 'in_file')]),
        (gunzip_func, realign, [('out_file', 'in_files')]),
        (s_dcm2nii, gunzip_struc, [('converted_files', 'in_file')]),
        (gunzip_struc, coregister, [('out_file', 'source')]),
        (coregister, normalize, [('coregistered_source', 'image_to_align')]),
        (realign, coregister, [('mean_image', 'target'),
                               ('realigned_files', 'apply_to_files')]),
        (realign, normalize, [(('mean_image', get_vox_dims),
                               'write_voxel_sizes')]),
        (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters'),
                        ('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_cov, [('realignment_parameters',
                              'realignment_parameters')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (normalize, datasink, [('normalized_files', 'structural.@normalize')]),
        (coregister, datasink, [('coregistered_source', 'structural.@struct')
                                ]),
        (topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]),
        (topup, datasink, [('out_movpar', 'distortion.@movpar')]),
        (smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')])
    ])
    return workflow
Beispiel #14
0
def fmri_cleanup_wf(wf_name="fmri_cleanup"):
    """ Run the resting-state fMRI pre-processing workflow against the rest files in `data_dir`.

    Tasks:
    - Trim first 6 volumes of the rs-fMRI file.
    - Slice Timing correction.
    - Motion and nuisance correction.
    - Calculate brain mask in fMRI space.
    - Bandpass frequency filtering for resting-state fMRI.
    - Smoothing.
    - Tissue maps co-registration to fMRI space.

    Parameters
    ----------
    wf_name: str

    Nipype Inputs
    -------------
    rest_input.in_file: traits.File
        The resting-state fMRI file.

    rest_input.anat: traits.File
        Path to the high-contrast anatomical image.

    rest_input.tissues: list of traits.File
        Paths to the tissue segmentations in anatomical space.
        Expected to have this order: GM, WM and CSF.

    rest_input.highpass_sigma:traits.Float
        Band pass timeseries filter higher bound in Hz.

    rest_input.lowpass_sigma: traits.Float
        Band pass timeseries filter lower bound in Hz.

    Nipype Outputs
    --------------
    rest_output.smooth: traits.File
        The isotropically smoothed time filtered nuisance corrected image.

    rest_output.nuis_corrected: traits.File
        The nuisance corrected fMRI file.

    rest_output.motion_params: traits.File
        The affine transformation file.

    rest_output.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    rest_output.epi_brain_mask: traits.File
        An estimated brain mask from mean EPI volume.

    rest_output.tissues_brain_mask: traits.File
        A brain mask calculated from the addition of coregistered
        GM, WM and CSF segmentation volumes from the anatomical
        segmentation.

    rest_output.tissues: list of traits.File
        The tissues segmentation volume in fMRI space.
        Expected to have this order: GM, WM and CSF.

    rest_output.anat: traits.File
        The T1w image in fMRI space.

    rest_output.avg_epi: traits.File
        The average EPI image in fMRI space after slice-time and motion correction.

    rest_output.motion_regressors: traits.File

    rest_output.compcor_regressors: traits.File

    rest_output.art_displacement_files
        One image file containing the voxel-displacement timeseries.

    rest_output.art_intensity_files
        One file containing the global intensity values determined from the brainmask.

    rest_output.art_norm_files
        One file containing the composite norm.

    rest_output.art_outlier_files
         One file containing a list of 0-based indices corresponding to outlier volumes.

    rest_output.art_plot_files
        One image file containing the detected outliers.

    rest_output.art_statistic_files
        One file containing information about the different types of artifacts and if design info is provided then
        details of stimulus correlated motion and a listing or artifacts by event type.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat",
        "atlas_anat",
        "coreg_target",
        "tissues",
        "lowpass_freq",
        "highpass_freq",
    ]

    out_fields = [
        "motion_corrected",
        "motion_params",
        "tissues",
        "anat",
        "avg_epi",
        "time_filtered",
        "smooth",
        "tsnr_file",
        "epi_brain_mask",
        "tissues_brain_mask",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "nuis_corrected",
        "art_displacement_files",
        "art_intensity_files",
        "art_norm_files",
        "art_outlier_files",
        "art_plot_files",
        "art_statistic_files",
    ]

    # input identities
    rest_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                            name="rest_input")

    # rs-fMRI preprocessing nodes
    trim = setup_node(Trim(), name="trim")

    stc_wf = auto_spm_slicetime()
    realign = setup_node(nipy_motion_correction(), name='realign')

    # average
    average = setup_node(
        Function(
            function=mean_img,
            input_names=["in_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name='average_epi'
    )

    mean_gunzip = setup_node(Gunzip(), name="mean_gunzip")

    # co-registration nodes
    coreg = setup_node(spm_coregister(cost_function="mi"), name="coreg_fmri")
    brain_sel = setup_node(Select(index=[0, 1, 2]), name="brain_sel")

    # brain mask made with EPI
    epi_mask = setup_node(ComputeMask(), name='epi_mask')

    # brain mask made with the merge of the tissue segmentations
    tissue_mask = setup_node(fsl.MultiImageMaths(), name='tissue_mask')
    tissue_mask.inputs.op_string = "-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin"
    tissue_mask.inputs.out_file = "tissue_brain_mask.nii.gz"

    # select tissues
    gm_select = setup_node(Select(index=[0]), name="gm_sel")
    wmcsf_select = setup_node(Select(index=[1, 2]), name="wmcsf_sel")

    # noise filter
    noise_wf = rest_noise_filter_wf()
    wm_select = setup_node(Select(index=[1]), name="wm_sel")
    csf_select = setup_node(Select(index=[2]), name="csf_sel")

    # bandpass filtering
    bandpass = setup_node(
        Function(
            input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'],
            output_names=['out_files'],
            function=bandpass_filter
        ),
        name='bandpass'
    )

    # smooth
    smooth = setup_node(
        Function(
            function=smooth_img,
            input_names=["in_file", "fwhm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name="smooth"
    )
    smooth.inputs.fwhm = get_config_setting('fmri_smooth.fwhm', default=8)
    smooth.inputs.out_file = "smooth_std_{}.nii.gz".format(wf_name)

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields), name="rest_output")

    # Connect the nodes
    wf.connect([
        # trim
        (rest_input, trim, [("in_file", "in_file")]),

        # slice time correction
        (trim, stc_wf, [("out_file", "stc_input.in_file")]),

        # motion correction
        (stc_wf, realign, [("stc_output.timecorrected_files", "in_file")]),

        # coregistration target
        (realign, average, [("out_file", "in_file")]),
        (average, mean_gunzip, [("out_file", "in_file")]),
        (mean_gunzip, coreg, [("out_file", "target")]),

        # epi brain mask
        (average, epi_mask, [("out_file", "mean_volume")]),

        # coregistration
        (rest_input, coreg, [("anat", "source")]),
        (rest_input, brain_sel, [("tissues", "inlist")]),
        (brain_sel, coreg, [(("out", flatten_list), "apply_to_files")]),

        # tissue brain mask
        (coreg, gm_select, [("coregistered_files", "inlist")]),
        (coreg, wmcsf_select, [("coregistered_files", "inlist")]),
        (gm_select, tissue_mask, [(("out", flatten_list), "in_file")]),
        (wmcsf_select, tissue_mask, [(("out", flatten_list), "operand_files")]),

        # nuisance correction
        (coreg, wm_select, [("coregistered_files", "inlist",)]),
        (coreg, csf_select, [("coregistered_files", "inlist",)]),
        (realign, noise_wf, [("out_file", "rest_noise_input.in_file",)]),
        (tissue_mask, noise_wf, [("out_file", "rest_noise_input.brain_mask")]),
        (wm_select, noise_wf, [(("out", flatten_list), "rest_noise_input.wm_mask")]),
        (csf_select, noise_wf, [(("out", flatten_list), "rest_noise_input.csf_mask")]),

        (realign, noise_wf, [("par_file", "rest_noise_input.motion_params",)]),

        # temporal filtering
        (noise_wf, bandpass, [("rest_noise_output.nuis_corrected", "files")]),
        # (realign,     bandpass,    [("out_file", "files")]),
        (stc_wf, bandpass, [("stc_output.time_repetition", "tr")]),
        (rest_input, bandpass, [
            ("lowpass_freq", "lowpass_freq"),
            ("highpass_freq", "highpass_freq"),
        ]),
        (bandpass, smooth, [("out_files", "in_file")]),

        # output
        (epi_mask, rest_output, [("brain_mask", "epi_brain_mask")]),
        (tissue_mask, rest_output, [("out_file", "tissues_brain_mask")]),
        (realign, rest_output, [
            ("out_file", "motion_corrected"),
            ("par_file", "motion_params"),
        ]),
        (coreg, rest_output, [
            ("coregistered_files", "tissues"),
            ("coregistered_source", "anat"),
        ]),
        (noise_wf, rest_output, [
            ("rest_noise_output.motion_regressors", "motion_regressors"),
            ("rest_noise_output.compcor_regressors", "compcor_regressors"),
            ("rest_noise_output.gsr_regressors", "gsr_regressors"),
            ("rest_noise_output.nuis_corrected", "nuis_corrected"),
            ("rest_noise_output.tsnr_file", "tsnr_file"),
            ("rest_noise_output.art_displacement_files", "art_displacement_files"),
            ("rest_noise_output.art_intensity_files", "art_intensity_files"),
            ("rest_noise_output.art_norm_files", "art_norm_files"),
            ("rest_noise_output.art_outlier_files", "art_outlier_files"),
            ("rest_noise_output.art_plot_files", "art_plot_files"),
            ("rest_noise_output.art_statistic_files", "art_statistic_files"),
        ]),
        (average, rest_output, [("out_file", "avg_epi")]),
        (bandpass, rest_output, [("out_files", "time_filtered")]),
        (smooth, rest_output, [("out_file", "smooth")]),
    ])

    return wf
Beispiel #15
0
def init_ica_aroma_wf(
    dt,
    aroma_melodic_dim=-200,
    err_on_aroma_warn=False,
    susan_fwhm=6.0,
    name='ica_aroma_wf',
):
    """
    Build a workflow that runs `ICA-AROMA`_.

    This workflow wraps `ICA-AROMA`_ to identify and remove motion-related
    independent components from a BOLD time series.

    The following steps are performed:

    #. Remove non-steady state volumes from the bold series.
    #. Smooth data using FSL `susan`, with a kernel width FWHM=6.0mm.
    #. Run FSL `melodic` outside of ICA-AROMA to generate the report
    #. Run ICA-AROMA
    #. Aggregate identified motion components (aggressive) to TSV
    #. Return ``classified_motion_ICs`` and ``melodic_mix`` for user to complete
       non-aggressive denoising in T1w space
    #. Calculate ICA-AROMA-identified noise components
       (columns named ``AROMAAggrCompXX``)

    There is a current discussion on whether other confounds should be extracted
    before or after denoising `here
    <http://nbviewer.jupyter.org/github/nipreps/fmriprep-notebooks/blob/922e436429b879271fa13e76767a6e73443e74d9/issue-817_aroma_confounds.ipynb>`__.

    .. _ICA-AROMA: https://github.com/maartenmennes/ICA-AROMA

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from ecp.workflows.confounds import init_ica_aroma_wf
            wf = init_ica_aroma_wf(
                dt=1.0)

    Parameters
    ----------
    dt : :obj:`float`
        bold repetition time
    aroma_melodic_dim : :obj:`int`
        Set the dimensionality of the MELODIC ICA decomposition.
        Negative numbers set a maximum on automatic dimensionality estimation.
        Positive numbers set an exact number of components to extract.
        (default: -200, i.e., estimate <=200 components)
    err_on_aroma_warn : :obj:`bool`
        Do not fail on ICA-AROMA errors
    susan_fwhm : :obj:`float`
        Kernel width (FWHM in mm) for the smoothing step with
        FSL ``susan`` (default: 6.0mm)
    name : :obj:`str`
        Name of workflow (default: ``ica_aroma_wf``)

    Inputs
    ------
    bold_std
        BOLD series NIfTI file in MNI152NLin6Asym space
    bold_mask_std
        BOLD mask for MNI152NLin6Asym space
    movpar_file
        movement parameter file
    skip_vols
        number of non steady state volumes
        
    Outputs
    -------
    aroma_confounds
        TSV of confounds identified as noise by ICA-AROMA
    aroma_noise_ics
        CSV of noise components identified by ICA-AROMA
    melodic_mix
        FSL MELODIC mixing matrix
    aroma_metatdata
        metadata
    out_report
        aroma out report

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.segmentation import ICA_AROMARPT
    from niworkflows.interfaces.utility import KeySelect
    from niworkflows.interfaces.utils import TSV2JSON

    workflow = Workflow(name=name)
    workflow.__postdesc__ = """\
Automatic removal of motion artifacts using independent component analysis
[ICA-AROMA, @aroma] was performed on the *preprocessed BOLD on MNI space*
time-series after removal of non-steady state volumes and spatial smoothing
with an isotropic, Gaussian kernel of 6mm FWHM (full-width half-maximum).
The "aggressive" noise-regressors were collected and placed
in the corresponding confounds file.
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_std',
        'bold_mask_std',
        'movpar_file',
        'skip_vols',
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'aroma_confounds', 'aroma_noise_ics', 'melodic_mix', 'aroma_metadata',
        'out_report'
    ]),
                         name='outputnode')

    # extract out to BOLD base
    rm_non_steady_state = pe.Node(Trim(), name='rm_nonsteady')
    trim_movement = pe.Node(TrimMovement(), name='trim_movement')

    calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'),
                              name='calc_median_val')
    calc_bold_mean = pe.Node(fsl.MeanImage(), name='calc_bold_mean')

    def _getusans_func(image, thresh):
        return [tuple([image, thresh])]

    getusans = pe.Node(niu.Function(function=_getusans_func,
                                    output_names=['usans']),
                       name='getusans',
                       mem_gb=0.01)

    smooth = pe.Node(fsl.SUSAN(fwhm=susan_fwhm), name='smooth')

    # melodic node
    melodic = pe.Node(fsl.MELODIC(no_bet=True,
                                  tr_sec=dt,
                                  mm_thresh=0.5,
                                  out_stats=True,
                                  dim=aroma_melodic_dim),
                      name="melodic")

    # ica_aroma node
    ica_aroma = pe.Node(ICA_AROMARPT(denoise_type='no',
                                     generate_report=True,
                                     TR=dt,
                                     args='-np'),
                        name='ica_aroma')

    # extract the confound ICs from the results
    ica_aroma_confound_extraction = pe.Node(
        ICAConfounds(err_on_aroma_warn=err_on_aroma_warn),
        name='ica_aroma_confound_extraction')

    ica_aroma_metadata_fmt = pe.Node(TSV2JSON(index_column='IC',
                                              output=None,
                                              enforce_case=True,
                                              additional_metadata={
                                                  'Method': {
                                                      'Name':
                                                      'ICA-AROMA',
                                                      'Version':
                                                      getenv(
                                                          'AROMA_VERSION',
                                                          'n/a')
                                                  }
                                              }),
                                     name='ica_aroma_metadata_fmt')

    def _getbtthresh(medianval):
        return 0.75 * medianval

    # connect the nodes
    workflow.connect([
        (inputnode, ica_aroma, [('movpar_file', 'motion_parameters')]),
        (inputnode, rm_non_steady_state, [('skip_vols', 'begin_index')]),
        (inputnode, rm_non_steady_state, [('bold_std', 'in_file')]),
        (inputnode, calc_median_val, [('bold_mask_std', 'mask_file')]),
        (inputnode, trim_movement, [('movpar_file', 'movpar_file')]),
        (inputnode, trim_movement, [('skip_vols', 'skip_vols')]),
        (rm_non_steady_state, calc_median_val, [('out_file', 'in_file')]),
        (rm_non_steady_state, calc_bold_mean, [('out_file', 'in_file')]),
        (calc_bold_mean, getusans, [('out_file', 'image')]),
        (calc_median_val, getusans, [('out_stat', 'thresh')]),
        # Connect input nodes to complete smoothing
        (rm_non_steady_state, smooth, [('out_file', 'in_file')]),
        (getusans, smooth, [('usans', 'usans')]),
        (calc_median_val, smooth, [(('out_stat', _getbtthresh),
                                    'brightness_threshold')]),
        # connect smooth to melodic
        (smooth, melodic, [('smoothed_file', 'in_files')]),
        (inputnode, melodic, [('bold_mask_std', 'mask')]),
        # connect nodes to ICA-AROMA
        (smooth, ica_aroma, [('smoothed_file', 'in_file')]),
        (inputnode, ica_aroma, [('bold_mask_std', 'report_mask'),
                                ('bold_mask_std', 'mask')]),
        (melodic, ica_aroma, [('out_dir', 'melodic_dir')]),
        # generate tsvs from ICA-AROMA
        (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'in_directory')
                                                    ]),
        (inputnode, ica_aroma_confound_extraction, [('skip_vols', 'skip_vols')
                                                    ]),
        (ica_aroma_confound_extraction, ica_aroma_metadata_fmt,
         [('aroma_metadata', 'in_file')]),
        # output for processing and reporting
        (ica_aroma_confound_extraction,
         outputnode, [('aroma_confounds', 'aroma_confounds'),
                      ('aroma_noise_ics', 'aroma_noise_ics'),
                      ('melodic_mix', 'melodic_mix')]),
        (ica_aroma_metadata_fmt, outputnode, [('output', 'aroma_metadata')]),
        (ica_aroma, outputnode, [('out_report', 'out_report')]),
    ])

    return workflow
Beispiel #16
0
def builder(subject_id,
            subId,
            project_dir,
            data_dir,
            output_dir,
            output_final_dir,
            output_interm_dir,
            layout,
            anat=None,
            funcs=None,
            fmaps=None,
            task_name='',
            session=None,
            apply_trim=False,
            apply_dist_corr=False,
            apply_smooth=False,
            apply_filter=False,
            mni_template='2mm',
            apply_n4=True,
            ants_threads=8,
            readable_crash_files=False,
            write_logs=True):
    """
    Core function that returns a workflow. See wfmaker for more details.

    Args:
        subject_id: name of subject folder for final outputted sub-folder name
        subId: abbreviate name of subject for intermediate outputted sub-folder name
        project_dir: full path to root of project
        data_dir: full path to raw data files
        output_dir: upper level output dir (others will be nested within this)
        output_final_dir: final preprocessed sub-dir name
        output_interm_dir: intermediate preprcess sub-dir name
        layout: BIDS layout instance
    """

    ##################
    ### PATH SETUP ###
    ##################
    if session is not None:
        session = int(session)
        if session < 10:
            session = '0' + str(session)
        else:
            session = str(session)

    # Set MNI template
    MNItemplate = os.path.join(get_resource_path(),
                               'MNI152_T1_' + mni_template + '_brain.nii.gz')
    MNImask = os.path.join(get_resource_path(),
                           'MNI152_T1_' + mni_template + '_brain_mask.nii.gz')
    MNItemplatehasskull = os.path.join(get_resource_path(),
                                       'MNI152_T1_' + mni_template + '.nii.gz')

    # Set ANTs files
    bet_ants_template = os.path.join(get_resource_path(),
                                     'OASIS_template.nii.gz')
    bet_ants_prob_mask = os.path.join(
        get_resource_path(), 'OASIS_BrainCerebellumProbabilityMask.nii.gz')
    bet_ants_registration_mask = os.path.join(
        get_resource_path(), 'OASIS_BrainCerebellumRegistrationMask.nii.gz')

    #################################
    ### NIPYPE IMPORTS AND CONFIG ###
    #################################
    # Update nipype global config because workflow.config[] = ..., doesn't seem to work
    # Can't store nipype config/rc file in container anyway so set them globaly before importing and setting up workflow as suggested here: http://nipype.readthedocs.io/en/latest/users/config_file.html#config-file

    # Create subject's intermediate directory before configuring nipype and the workflow because that's where we'll save log files in addition to intermediate files
    if not os.path.exists(os.path.join(output_interm_dir, subId, 'logs')):
        os.makedirs(os.path.join(output_interm_dir, subId, 'logs'))
    log_dir = os.path.join(output_interm_dir, subId, 'logs')
    from nipype import config
    if readable_crash_files:
        cfg = dict(execution={'crashfile_format': 'txt'})
        config.update_config(cfg)
    config.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': write_logs
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })
    from nipype import logging
    logging.update_logging(config)

    # Now import everything else
    from nipype.interfaces.io import DataSink
    from nipype.interfaces.utility import Merge, IdentityInterface
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.nipy.preprocess import ComputeMask
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces.ants.segmentation import BrainExtraction, N4BiasFieldCorrection
    from nipype.interfaces.ants import Registration, ApplyTransforms
    from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP
    from nipype.interfaces.fsl.maths import MeanImage
    from nipype.interfaces.fsl import Merge as MERGE
    from nipype.interfaces.fsl.utils import Smooth
    from nipype.interfaces.nipy.preprocess import Trim
    from .interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates, Down_Sample_Precision, Create_Encoding_File, Filter_In_Mask

    ##################
    ### INPUT NODE ###
    ##################

    # Turn functional file list into interable Node
    func_scans = Node(IdentityInterface(fields=['scan']), name='func_scans')
    func_scans.iterables = ('scan', funcs)

    # Get TR for use in filtering below; we're assuming all BOLD runs have the same TR
    tr_length = layout.get_metadata(funcs[0])['RepetitionTime']

    #####################################
    ## TRIM ##
    #####################################
    if apply_trim:
        trim = Node(Trim(), name='trim')
        trim.inputs.begin_index = apply_trim

    #####################################
    ## DISTORTION CORRECTION ##
    #####################################

    if apply_dist_corr:
        # Get fmap file locations
        fmaps = [
            f.filename for f in layout.get(
                subject=subId, modality='fmap', extensions='.nii.gz')
        ]
        if not fmaps:
            raise IOError(
                "Distortion Correction requested but field map scans not found..."
            )

        # Get fmap metadata
        totalReadoutTimes, measurements, fmap_pes = [], [], []

        for i, fmap in enumerate(fmaps):
            # Grab total readout time for each fmap
            totalReadoutTimes.append(
                layout.get_metadata(fmap)['TotalReadoutTime'])

            # Grab measurements (for some reason pyBIDS doesn't grab dcm_meta... fields from side-car json file and json.load, doesn't either; so instead just read the header using nibabel to determine number of scans)
            measurements.append(nib.load(fmap).header['dim'][4])

            # Get phase encoding direction
            fmap_pe = layout.get_metadata(fmap)["PhaseEncodingDirection"]
            fmap_pes.append(fmap_pe)

        encoding_file_writer = Node(interface=Create_Encoding_File(),
                                    name='create_encoding')
        encoding_file_writer.inputs.totalReadoutTimes = totalReadoutTimes
        encoding_file_writer.inputs.fmaps = fmaps
        encoding_file_writer.inputs.fmap_pes = fmap_pes
        encoding_file_writer.inputs.measurements = measurements
        encoding_file_writer.inputs.file_name = 'encoding_file.txt'

        merge_to_file_list = Node(interface=Merge(2),
                                  infields=['in1', 'in2'],
                                  name='merge_to_file_list')
        merge_to_file_list.inputs.in1 = fmaps[0]
        merge_to_file_list.inputs.in1 = fmaps[1]

        # Merge AP and PA distortion correction scans
        merger = Node(interface=MERGE(dimension='t'), name='merger')
        merger.inputs.output_type = 'NIFTI_GZ'
        merger.inputs.in_files = fmaps
        merger.inputs.merged_file = 'merged_epi.nii.gz'

        # Create distortion correction map
        topup = Node(interface=TOPUP(), name='topup')
        topup.inputs.output_type = 'NIFTI_GZ'

        # Apply distortion correction to other scans
        apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
        apply_topup.inputs.output_type = 'NIFTI_GZ'
        apply_topup.inputs.method = 'jac'
        apply_topup.inputs.interp = 'spline'

    ###################################
    ### REALIGN ###
    ###################################
    realign_fsl = Node(MCFLIRT(), name="realign")
    realign_fsl.inputs.cost = 'mutualinfo'
    realign_fsl.inputs.mean_vol = True
    realign_fsl.inputs.output_type = 'NIFTI_GZ'
    realign_fsl.inputs.save_mats = True
    realign_fsl.inputs.save_rms = True
    realign_fsl.inputs.save_plots = True

    ###################################
    ### MEAN EPIs ###
    ###################################
    # For coregistration after realignment
    mean_epi = Node(MeanImage(), name='mean_epi')
    mean_epi.inputs.dimension = 'T'

    # For after normalization is done to plot checks
    mean_norm_epi = Node(MeanImage(), name='mean_norm_epi')
    mean_norm_epi.inputs.dimension = 'T'

    ###################################
    ### MASK, ART, COV CREATION ###
    ###################################
    compute_mask = Node(ComputeMask(), name='compute_mask')
    compute_mask.inputs.m = .05

    art = Node(ArtifactDetect(), name='art')
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'FSL'

    make_cov = Node(Create_Covariates(), name='make_cov')

    ################################
    ### N4 BIAS FIELD CORRECTION ###
    ################################
    if apply_n4:
        n4_correction = Node(N4BiasFieldCorrection(), name='n4_correction')
        n4_correction.inputs.copy_header = True
        n4_correction.inputs.save_bias = False
        n4_correction.inputs.num_threads = ants_threads
        n4_correction.inputs.input_image = anat

    ###################################
    ### BRAIN EXTRACTION ###
    ###################################
    brain_extraction_ants = Node(BrainExtraction(), name='brain_extraction')
    brain_extraction_ants.inputs.dimension = 3
    brain_extraction_ants.inputs.use_floatingpoint_precision = 1
    brain_extraction_ants.inputs.num_threads = ants_threads
    brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
    brain_extraction_ants.inputs.keep_temporary_files = 1
    brain_extraction_ants.inputs.brain_template = bet_ants_template
    brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
    brain_extraction_ants.inputs.out_prefix = 'bet'

    ###################################
    ### COREGISTRATION ###
    ###################################
    coregistration = Node(Registration(), name='coregistration')
    coregistration.inputs.float = False
    coregistration.inputs.output_transform_prefix = "meanEpi2highres"
    coregistration.inputs.transforms = ['Rigid']
    coregistration.inputs.transform_parameters = [(0.1, ), (0.1, )]
    coregistration.inputs.number_of_iterations = [[1000, 500, 250, 100]]
    coregistration.inputs.dimension = 3
    coregistration.inputs.num_threads = ants_threads
    coregistration.inputs.write_composite_transform = True
    coregistration.inputs.collapse_output_transforms = True
    coregistration.inputs.metric = ['MI']
    coregistration.inputs.metric_weight = [1]
    coregistration.inputs.radius_or_number_of_bins = [32]
    coregistration.inputs.sampling_strategy = ['Regular']
    coregistration.inputs.sampling_percentage = [0.25]
    coregistration.inputs.convergence_threshold = [1e-08]
    coregistration.inputs.convergence_window_size = [10]
    coregistration.inputs.smoothing_sigmas = [[3, 2, 1, 0]]
    coregistration.inputs.sigma_units = ['mm']
    coregistration.inputs.shrink_factors = [[4, 3, 2, 1]]
    coregistration.inputs.use_estimate_learning_rate_once = [True]
    coregistration.inputs.use_histogram_matching = [False]
    coregistration.inputs.initial_moving_transform_com = True
    coregistration.inputs.output_warped_image = True
    coregistration.inputs.winsorize_lower_quantile = 0.01
    coregistration.inputs.winsorize_upper_quantile = 0.99

    ###################################
    ### NORMALIZATION ###
    ###################################
    # Settings Explanations
    # Only a few key settings are worth adjusting and most others relate to how ANTs optimizer starts or iterates and won't make a ton of difference
    # Brian Avants referred to these settings as the last "best tested" when he was aligning fMRI data: https://github.com/ANTsX/ANTsRCore/blob/master/R/antsRegistration.R#L275
    # Things that matter the most:
    # smoothing_sigmas:
    # how much gaussian smoothing to apply when performing registration, probably want the upper limit of this to match the resolution that the data is collected at e.g. 3mm
    # Old settings [[3,2,1,0]]*3
    # shrink_factors
    # The coarseness with which to do registration
    # Old settings [[8,4,2,1]] * 3
    # >= 8 may result is some problems causing big chunks of cortex with little fine grain spatial structure to be moved to other parts of cortex
    # Other settings
    # transform_parameters:
    # how much regularization to do for fitting that transformation
    # for syn this pertains to both the gradient regularization term, and the flow, and elastic terms. Leave the syn settings alone as they seem to be the most well tested across published data sets
    # radius_or_number_of_bins
    # This is the bin size for MI metrics and 32 is probably adequate for most use cases. Increasing this might increase precision (e.g. to 64) but takes exponentially longer
    # use_histogram_matching
    # Use image intensity distribution to guide registration
    # Leave it on for within modality registration (e.g. T1 -> MNI), but off for between modality registration (e.g. EPI -> T1)
    # convergence_threshold
    # threshold for optimizer
    # convergence_window_size
    # how many samples should optimizer average to compute threshold?
    # sampling_strategy
    # what strategy should ANTs use to initialize the transform. Regular here refers to approximately random sampling around the center of the image mass

    normalization = Node(Registration(), name='normalization')
    normalization.inputs.float = False
    normalization.inputs.collapse_output_transforms = True
    normalization.inputs.convergence_threshold = [1e-06]
    normalization.inputs.convergence_window_size = [10]
    normalization.inputs.dimension = 3
    normalization.inputs.fixed_image = MNItemplate
    normalization.inputs.initial_moving_transform_com = True
    normalization.inputs.metric = ['MI', 'MI', 'CC']
    normalization.inputs.metric_weight = [1.0] * 3
    normalization.inputs.number_of_iterations = [[1000, 500, 250, 100],
                                                 [1000, 500, 250, 100],
                                                 [100, 70, 50, 20]]
    normalization.inputs.num_threads = ants_threads
    normalization.inputs.output_transform_prefix = 'anat2template'
    normalization.inputs.output_inverse_warped_image = True
    normalization.inputs.output_warped_image = True
    normalization.inputs.radius_or_number_of_bins = [32, 32, 4]
    normalization.inputs.sampling_percentage = [0.25, 0.25, 1]
    normalization.inputs.sampling_strategy = ['Regular', 'Regular', 'None']
    normalization.inputs.shrink_factors = [[8, 4, 2, 1]] * 3
    normalization.inputs.sigma_units = ['vox'] * 3
    normalization.inputs.smoothing_sigmas = [[3, 2, 1, 0]] * 3
    normalization.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    normalization.inputs.transform_parameters = [(0.1, ), (0.1, ),
                                                 (0.1, 3.0, 0.0)]
    normalization.inputs.use_histogram_matching = True
    normalization.inputs.winsorize_lower_quantile = 0.005
    normalization.inputs.winsorize_upper_quantile = 0.995
    normalization.inputs.write_composite_transform = True

    # NEW SETTINGS (need to be adjusted; specifically shink_factors and smoothing_sigmas need to be the same length)
    # normalization = Node(Registration(), name='normalization')
    # normalization.inputs.float = False
    # normalization.inputs.collapse_output_transforms = True
    # normalization.inputs.convergence_threshold = [1e-06, 1e-06, 1e-07]
    # normalization.inputs.convergence_window_size = [10]
    # normalization.inputs.dimension = 3
    # normalization.inputs.fixed_image = MNItemplate
    # normalization.inputs.initial_moving_transform_com = True
    # normalization.inputs.metric = ['MI', 'MI', 'CC']
    # normalization.inputs.metric_weight = [1.0]*3
    # normalization.inputs.number_of_iterations = [[1000, 500, 250, 100],
    #                                              [1000, 500, 250, 100],
    #                                              [100, 70, 50, 20]]
    # normalization.inputs.num_threads = ants_threads
    # normalization.inputs.output_transform_prefix = 'anat2template'
    # normalization.inputs.output_inverse_warped_image = True
    # normalization.inputs.output_warped_image = True
    # normalization.inputs.radius_or_number_of_bins = [32, 32, 4]
    # normalization.inputs.sampling_percentage = [0.25, 0.25, 1]
    # normalization.inputs.sampling_strategy = ['Regular',
    #                                           'Regular',
    #                                           'None']
    # normalization.inputs.shrink_factors = [[4, 3, 2, 1]]*3
    # normalization.inputs.sigma_units = ['vox']*3
    # normalization.inputs.smoothing_sigmas = [[2, 1], [2, 1], [3, 2, 1, 0]]
    # normalization.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    # normalization.inputs.transform_parameters = [(0.1,),
    #                                              (0.1,),
    #                                              (0.1, 3.0, 0.0)]
    # normalization.inputs.use_histogram_matching = True
    # normalization.inputs.winsorize_lower_quantile = 0.005
    # normalization.inputs.winsorize_upper_quantile = 0.995
    # normalization.inputs.write_composite_transform = True

    ###################################
    ### APPLY TRANSFORMS AND SMOOTH ###
    ###################################
    merge_transforms = Node(Merge(2),
                            iterfield=['in2'],
                            name='merge_transforms')

    # Used for epi -> mni, via (coreg + norm)
    apply_transforms = Node(ApplyTransforms(),
                            iterfield=['input_image'],
                            name='apply_transforms')
    apply_transforms.inputs.input_image_type = 3
    apply_transforms.inputs.float = False
    apply_transforms.inputs.num_threads = 12
    apply_transforms.inputs.environ = {}
    apply_transforms.inputs.interpolation = 'BSpline'
    apply_transforms.inputs.invert_transform_flags = [False, False]
    apply_transforms.inputs.reference_image = MNItemplate

    # Used for t1 segmented -> mni, via (norm)
    apply_transform_seg = Node(ApplyTransforms(), name='apply_transform_seg')
    apply_transform_seg.inputs.input_image_type = 3
    apply_transform_seg.inputs.float = False
    apply_transform_seg.inputs.num_threads = 12
    apply_transform_seg.inputs.environ = {}
    apply_transform_seg.inputs.interpolation = 'MultiLabel'
    apply_transform_seg.inputs.invert_transform_flags = [False]
    apply_transform_seg.inputs.reference_image = MNItemplate

    ###################################
    ### PLOTS ###
    ###################################
    plot_realign = Node(Plot_Realignment_Parameters(), name="plot_realign")
    plot_qa = Node(Plot_Quality_Control(), name="plot_qa")
    plot_normalization_check = Node(Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = MNItemplatehasskull

    ############################################
    ### FILTER, SMOOTH, DOWNSAMPLE PRECISION ###
    ############################################
    # Use cosanlab_preproc for down sampling
    down_samp = Node(Down_Sample_Precision(), name="down_samp")

    # Use FSL for smoothing
    if apply_smooth:
        smooth = Node(Smooth(), name='smooth')
        if isinstance(apply_smooth, list):
            smooth.iterables = ("fwhm", apply_smooth)
        elif isinstance(apply_smooth, int) or isinstance(apply_smooth, float):
            smooth.inputs.fwhm = apply_smooth
        else:
            raise ValueError("apply_smooth must be a list or int/float")

    # Use cosanlab_preproc for low-pass filtering
    if apply_filter:
        lp_filter = Node(Filter_In_Mask(), name='lp_filter')
        lp_filter.inputs.mask = MNImask
        lp_filter.inputs.sampling_rate = tr_length
        lp_filter.inputs.high_pass_cutoff = 0
        if isinstance(apply_filter, list):
            lp_filter.iterables = ("low_pass_cutoff", apply_filter)
        elif isinstance(apply_filter, int) or isinstance(apply_filter, float):
            lp_filter.inputs.low_pass_cutoff = apply_filter
        else:
            raise ValueError("apply_filter must be a list or int/float")

    ###################
    ### OUTPUT NODE ###
    ###################
    # Collect all final outputs in the output dir and get rid of file name additions
    datasink = Node(DataSink(), name='datasink')
    if session:
        datasink.inputs.base_directory = os.path.join(output_final_dir,
                                                      subject_id)
        datasink.inputs.container = 'ses-' + session
    else:
        datasink.inputs.base_directory = output_final_dir
        datasink.inputs.container = subject_id

    # Remove substitutions
    data_dir_parts = data_dir.split('/')[1:]
    if session:
        prefix = ['_scan_'] + data_dir_parts + [subject_id] + [
            'ses-' + session
        ] + ['func']
    else:
        prefix = ['_scan_'] + data_dir_parts + [subject_id] + ['func']
    func_scan_names = [os.path.split(elem)[-1] for elem in funcs]
    to_replace = []
    for elem in func_scan_names:
        bold_name = elem.split(subject_id + '_')[-1]
        bold_name = bold_name.split('.nii.gz')[0]
        to_replace.append(('..'.join(prefix + [elem]), bold_name))
    datasink.inputs.substitutions = to_replace

    #####################
    ### INIT WORKFLOW ###
    #####################
    # If we have sessions provide the full path to the subject's intermediate directory
    # and only rely on workflow init to create the session container *within* that directory
    # Otherwise just point to the intermediate directory and let the workflow init create the subject container within the intermediate directory
    if session:
        workflow = Workflow(name='ses_' + session)
        workflow.base_dir = os.path.join(output_interm_dir, subId)
    else:
        workflow = Workflow(name=subId)
        workflow.base_dir = output_interm_dir

    ############################
    ######### PART (1a) #########
    # func -> discorr -> trim -> realign
    # OR
    # func -> trim -> realign
    # OR
    # func -> discorr -> realign
    # OR
    # func -> realign
    ############################
    if apply_dist_corr:
        workflow.connect([(encoding_file_writer, topup, [('encoding_file',
                                                          'encoding_file')]),
                          (encoding_file_writer, apply_topup,
                           [('encoding_file', 'encoding_file')]),
                          (merger, topup, [('merged_file', 'in_file')]),
                          (func_scans, apply_topup, [('scan', 'in_files')]),
                          (topup, apply_topup,
                           [('out_fieldcoef', 'in_topup_fieldcoef'),
                            ('out_movpar', 'in_topup_movpar')])])
        if apply_trim:
            # Dist Corr + Trim
            workflow.connect([(apply_topup, trim, [('out_corrected', 'in_file')
                                                   ]),
                              (trim, realign_fsl, [('out_file', 'in_file')])])
        else:
            # Dist Corr + No Trim
            workflow.connect([(apply_topup, realign_fsl, [('out_corrected',
                                                           'in_file')])])
    else:
        if apply_trim:
            # No Dist Corr + Trim
            workflow.connect([(func_scans, trim, [('scan', 'in_file')]),
                              (trim, realign_fsl, [('out_file', 'in_file')])])
        else:
            # No Dist Corr + No Trim
            workflow.connect([
                (func_scans, realign_fsl, [('scan', 'in_file')]),
            ])

    ############################
    ######### PART (1n) #########
    # anat -> N4 -> bet
    # OR
    # anat -> bet
    ############################
    if apply_n4:
        workflow.connect([(n4_correction, brain_extraction_ants,
                           [('output_image', 'anatomical_image')])])
    else:
        brain_extraction_ants.inputs.anatomical_image = anat

    ##########################################
    ############### PART (2) #################
    # realign -> coreg -> mni (via t1)
    # t1 -> mni
    # covariate creation
    # plot creation
    ###########################################

    workflow.connect([
        (realign_fsl, plot_realign, [('par_file', 'realignment_parameters')]),
        (realign_fsl, plot_qa, [('out_file', 'dat_img')]),
        (realign_fsl, art, [('out_file', 'realigned_files'),
                            ('par_file', 'realignment_parameters')]),
        (realign_fsl, mean_epi, [('out_file', 'in_file')]),
        (realign_fsl, make_cov, [('par_file', 'realignment_parameters')]),
        (mean_epi, compute_mask, [('out_file', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (art, plot_realign, [('outlier_files', 'outliers')]),
        (plot_qa, make_cov, [('fd_outliers', 'fd_outliers')]),
        (brain_extraction_ants, coregistration, [('BrainExtractionBrain',
                                                  'fixed_image')]),
        (mean_epi, coregistration, [('out_file', 'moving_image')]),
        (brain_extraction_ants, normalization, [('BrainExtractionBrain',
                                                 'moving_image')]),
        (coregistration, merge_transforms, [('composite_transform', 'in2')]),
        (normalization, merge_transforms, [('composite_transform', 'in1')]),
        (merge_transforms, apply_transforms, [('out', 'transforms')]),
        (realign_fsl, apply_transforms, [('out_file', 'input_image')]),
        (apply_transforms, mean_norm_epi, [('output_image', 'in_file')]),
        (normalization, apply_transform_seg, [('composite_transform',
                                               'transforms')]),
        (brain_extraction_ants, apply_transform_seg,
         [('BrainExtractionSegmentation', 'input_image')]),
        (mean_norm_epi, plot_normalization_check, [('out_file', 'wra_img')])
    ])

    ##################################################
    ################### PART (3) #####################
    # epi (in mni) -> filter -> smooth -> down sample
    # OR
    # epi (in mni) -> filter -> down sample
    # OR
    # epi (in mni) -> smooth -> down sample
    # OR
    # epi (in mni) -> down sample
    ###################################################

    if apply_filter:
        workflow.connect([(apply_transforms, lp_filter, [('output_image',
                                                          'in_file')])])

        if apply_smooth:
            # Filtering + Smoothing
            workflow.connect([(lp_filter, smooth, [('out_file', 'in_file')]),
                              (smooth, down_samp, [('smoothed_file', 'in_file')
                                                   ])])
        else:
            # Filtering + No Smoothing
            workflow.connect([(lp_filter, down_samp, [('out_file', 'in_file')])
                              ])
    else:
        if apply_smooth:
            # No Filtering + Smoothing
            workflow.connect([
                (apply_transforms, smooth, [('output_image', 'in_file')]),
                (smooth, down_samp, [('smoothed_file', 'in_file')])
            ])
        else:
            # No Filtering + No Smoothing
            workflow.connect([(apply_transforms, down_samp, [('output_image',
                                                              'in_file')])])

    ##########################################
    ############### PART (4) #################
    # down sample -> save
    # plots -> save
    # covs -> save
    # t1 (in mni) -> save
    # t1 segmented masks (in mni) -> save
    # realignment parms -> save
    ##########################################

    workflow.connect([
        (down_samp, datasink, [('out_file', 'functional.@down_samp')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_qa, datasink, [('plot', 'functional.@plot_qa')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')]),
        (normalization, datasink, [('warped_image', 'structural.@normanat')]),
        (apply_transform_seg, datasink, [('output_image',
                                          'structural.@normanatseg')]),
        (realign_fsl, datasink, [('par_file', 'functional.@motionparams')])
    ])

    if not os.path.exists(os.path.join(output_dir, 'pipeline.png')):
        workflow.write_graph(dotfilename=os.path.join(output_dir, 'pipeline'),
                             format='png')

    print(f"Creating workflow for subject: {subject_id}")
    if ants_threads != 8:
        print(
            f"ANTs will utilize the user-requested {ants_threads} threads for parallel processing."
        )
    return workflow
def wfmaker(project_dir,
            raw_dir,
            subject_id,
            task_name='',
            apply_trim=False,
            apply_dist_corr=False,
            apply_smooth=False,
            apply_filter=False,
            mni_template='2mm',
            apply_n4=True,
            ants_threads=8,
            readable_crash_files=False):
    """
    This function returns a "standard" workflow based on requested settings. Assumes data is in the following directory structure in BIDS format:

    *Work flow steps*:

    1) EPI Distortion Correction (FSL; optional)
    2) Trimming (nipy)
    3) Realignment/Motion Correction (FSL)
    4) Artifact Detection (rapidART/python)
    5) Brain Extraction + N4 Bias Correction (ANTs)
    6) Coregistration (rigid) (ANTs)
    7) Normalization to MNI (non-linear) (ANTs)
    8) Low-pass filtering (nilearn; optional)
    8) Smoothing (FSL; optional)
    9) Downsampling to INT16 precision to save space (nibabel)

    Args:
        project_dir (str): full path to the root of project folder, e.g. /my/data/myproject. All preprocessed data will be placed under this foler and the raw_dir folder will be searched for under this folder
        raw_dir (str): folder name for raw data, e.g. 'raw' which would be automatically converted to /my/data/myproject/raw
        subject_id (str/int): subject ID to process. Can be either a subject ID string e.g. 'sid-0001' or an integer to index the entire list of subjects in raw_dir, e.g. 0, which would process the first subject
        apply_trim (int/bool; optional): number of volumes to trim from the beginning of each functional run; default is None
        task_name (str; optional): which functional task runs to process; default is all runs
        apply_dist_corr (bool; optional): look for fmap files and perform distortion correction; default False
        smooth (int/list; optional): smoothing to perform in FWHM mm; if a list is provided will create outputs for each smoothing kernel separately; default False
        apply_filter (float/list; optional): low-pass/high-freq filtering cut-offs in Hz; if a list is provided will create outputs for each filter cut-off separately. With high temporal resolution scans .25Hz is a decent value to capture respitory artifacts; default None/False
        mni_template (str; optional): which mm resolution template to use, e.g. '3mm'; default '2mm'
        apply_n4 (bool; optional): perform N4 Bias Field correction on the anatomical image; default true
        ants_threads (int; optional): number of threads ANTs should use for its processes; default 8
        readable_crash_files (bool; optional): should nipype crash files be saved as txt? This makes them easily readable, but sometimes interferes with nipype's ability to use cached results of successfully run nodes (i.e. picking up where it left off after bugs are fixed); default False

    Examples:

        >>> from cosanlab_preproc.wfmaker import wfmaker
        >>> # Create workflow that performs no distortion correction, trims first 5 TRs, no filtering, 6mm smoothing, and normalizes to 2mm MNI space. Run it with 16 cores.
        >>>
        >>> workflow = wfmaker(
                        project_dir = '/data/project',
                        raw_dir = 'raw',
                        apply_trim = 5)
        >>>
        >>> workflow.run('MultiProc',plugin_args = {'n_procs': 16})
        >>>
        >>> # Create workflow that performs distortion correction, trims first 25 TRs, no filtering and filtering .25hz, 6mm and 8mm smoothing, and normalizes to 3mm MNI space. Run it serially (will be super slow!).
        >>>
        >>> workflow = wfmaker(
                        project_dir = '/data/project',
                        raw_dir = 'raw',
                        apply_trim = 25,
                        apply_dist_corr = True,
                        apply_filter = [0, .25],
                        apply_smooth = [6.0, 8.0],
                        mni = '3mm')
        >>>
        >>> workflow.run()

    """

    ##################
    ### PATH SETUP ###
    ##################
    if mni_template not in ['1mm', '2mm', '3mm']:
        raise ValueError("MNI template must be: 1mm, 2mm, or 3mm")

    data_dir = os.path.join(project_dir, raw_dir)
    output_dir = os.path.join(project_dir, 'preprocessed')
    output_final_dir = os.path.join(output_dir, 'final')
    output_interm_dir = os.path.join(output_dir, 'intermediate')
    log_dir = os.path.join(project_dir, 'logs', 'nipype')

    if not os.path.exists(output_final_dir):
        os.makedirs(output_final_dir)
    if not os.path.exists(output_interm_dir):
        os.makedirs(output_interm_dir)
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)

    # Set MNI template
    MNItemplate = os.path.join(get_resource_path(),
                               'MNI152_T1_' + mni_template + '_brain.nii.gz')
    MNImask = os.path.join(get_resource_path(),
                           'MNI152_T1_' + mni_template + '_brain_mask.nii.gz')
    MNItemplatehasskull = os.path.join(get_resource_path(),
                                       'MNI152_T1_' + mni_template + '.nii.gz')

    # Set ANTs files
    bet_ants_template = os.path.join(get_resource_path(),
                                     'OASIS_template.nii.gz')
    bet_ants_prob_mask = os.path.join(
        get_resource_path(), 'OASIS_BrainCerebellumProbabilityMask.nii.gz')
    bet_ants_registration_mask = os.path.join(
        get_resource_path(), 'OASIS_BrainCerebellumRegistrationMask.nii.gz')

    #################################
    ### NIPYPE IMPORTS AND CONFIG ###
    #################################
    # Update nipype global config because workflow.config[] = ..., doesn't seem to work
    # Can't store nipype config/rc file in container anyway so set them globaly before importing and setting up workflow as suggested here: http://nipype.readthedocs.io/en/latest/users/config_file.html#config-file
    from nipype import config
    if readable_crash_files:
        cfg = dict(execution={'crashfile_format': 'txt'})
        config.update_config(cfg)
    config.update_config(
        {'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        }})
    from nipype import logging
    logging.update_logging(config)

    # Now import everything else
    from nipype.interfaces.io import DataSink
    from nipype.interfaces.utility import Merge, IdentityInterface
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.nipy.preprocess import ComputeMask
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces.ants.segmentation import BrainExtraction, N4BiasFieldCorrection
    from nipype.interfaces.ants import Registration, ApplyTransforms
    from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP
    from nipype.interfaces.fsl.maths import MeanImage
    from nipype.interfaces.fsl import Merge as MERGE
    from nipype.interfaces.fsl.utils import Smooth
    from nipype.interfaces.nipy.preprocess import Trim
    from .interfaces import Plot_Coregistration_Montage, Plot_Quality_Control, Plot_Realignment_Parameters, Create_Covariates, Down_Sample_Precision, Create_Encoding_File, Filter_In_Mask

    ##################
    ### INPUT NODE ###
    ##################

    layout = BIDSLayout(data_dir)
    # Dartmouth subjects are named with the sub- prefix, handle whether we receive an integer identifier for indexing or the full subject id with prefixg
    if isinstance(subject_id, six.string_types):
        subId = subject_id[4:]
    elif isinstance(subject_id, int):
        subId = layout.get_subjects()[subject_id]
        subject_id = 'sub-' + subId
    else:
        raise TypeError("subject_id should be a string or integer")

    #Get anat file location
    anat = layout.get(subject=subId, type='T1w',
                      extensions='.nii.gz')[0].filename

    #Get functional file locations
    if task_name:
        funcs = [
            f.filename for f in layout.get(subject=subId,
                                           type='bold',
                                           task=task_name,
                                           extensions='.nii.gz')
        ]
    else:
        funcs = [
            f.filename for f in layout.get(
                subject=subId, type='bold', extensions='.nii.gz')
        ]

    #Turn functional file list into interable Node
    func_scans = Node(IdentityInterface(fields=['scan']), name='func_scans')
    func_scans.iterables = ('scan', funcs)

    #Get TR for use in filtering below; we're assuming all BOLD runs have the same TR
    tr_length = layout.get_metadata(funcs[0])['RepetitionTime']

    #####################################
    ## TRIM ##
    #####################################
    if apply_trim:
        trim = Node(Trim(), name='trim')
        trim.inputs.begin_index = apply_trim

    #####################################
    ## DISTORTION CORRECTION ##
    #####################################

    if apply_dist_corr:
        #Get fmap file locations
        fmaps = [
            f.filename for f in layout.get(
                subject=subId, modality='fmap', extensions='.nii.gz')
        ]
        if not fmaps:
            raise IOError(
                "Distortion Correction requested but field map scans not found..."
            )

        #Get fmap metadata
        totalReadoutTimes, measurements, fmap_pes = [], [], []

        for i, fmap in enumerate(fmaps):
            # Grab total readout time for each fmap
            totalReadoutTimes.append(
                layout.get_metadata(fmap)['TotalReadoutTime'])

            # Grab measurements (for some reason pyBIDS doesn't grab dcm_meta... fields from side-car json file and json.load, doesn't either; so instead just read the header using nibabel to determine number of scans)
            measurements.append(nib.load(fmap).header['dim'][4])

            # Get phase encoding direction
            fmap_pe = layout.get_metadata(fmap)["PhaseEncodingDirection"]
            fmap_pes.append(fmap_pe)

        encoding_file_writer = Node(interface=Create_Encoding_File(),
                                    name='create_encoding')
        encoding_file_writer.inputs.totalReadoutTimes = totalReadoutTimes
        encoding_file_writer.inputs.fmaps = fmaps
        encoding_file_writer.inputs.fmap_pes = fmap_pes
        encoding_file_writer.inputs.measurements = measurements
        encoding_file_writer.inputs.file_name = 'encoding_file.txt'

        merge_to_file_list = Node(interface=Merge(2),
                                  infields=['in1', 'in2'],
                                  name='merge_to_file_list')
        merge_to_file_list.inputs.in1 = fmaps[0]
        merge_to_file_list.inputs.in1 = fmaps[1]

        #Merge AP and PA distortion correction scans
        merger = Node(interface=MERGE(dimension='t'), name='merger')
        merger.inputs.output_type = 'NIFTI_GZ'
        merger.inputs.in_files = fmaps
        merger.inputs.merged_file = 'merged_epi.nii.gz'

        #Create distortion correction map
        topup = Node(interface=TOPUP(), name='topup')
        topup.inputs.output_type = 'NIFTI_GZ'

        #Apply distortion correction to other scans
        apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
        apply_topup.inputs.output_type = 'NIFTI_GZ'
        apply_topup.inputs.method = 'jac'
        apply_topup.inputs.interp = 'spline'

    ###################################
    ### REALIGN ###
    ###################################
    realign_fsl = Node(MCFLIRT(), name="realign")
    realign_fsl.inputs.cost = 'mutualinfo'
    realign_fsl.inputs.mean_vol = True
    realign_fsl.inputs.output_type = 'NIFTI_GZ'
    realign_fsl.inputs.save_mats = True
    realign_fsl.inputs.save_rms = True
    realign_fsl.inputs.save_plots = True

    ###################################
    ### MEAN EPIs ###
    ###################################
    #For coregistration after realignment
    mean_epi = Node(MeanImage(), name='mean_epi')
    mean_epi.inputs.dimension = 'T'

    #For after normalization is done to plot checks
    mean_norm_epi = Node(MeanImage(), name='mean_norm_epi')
    mean_norm_epi.inputs.dimension = 'T'

    ###################################
    ### MASK, ART, COV CREATION ###
    ###################################
    compute_mask = Node(ComputeMask(), name='compute_mask')
    compute_mask.inputs.m = .05

    art = Node(ArtifactDetect(), name='art')
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'FSL'

    make_cov = Node(Create_Covariates(), name='make_cov')

    ################################
    ### N4 BIAS FIELD CORRECTION ###
    ################################
    if apply_n4:
        n4_correction = Node(N4BiasFieldCorrection(), name='n4_correction')
        n4_correction.inputs.copy_header = True
        n4_correction.inputs.save_bias = False
        n4_correction.inputs.num_threads = ants_threads
        n4_correction.inputs.input_image = anat

    ###################################
    ### BRAIN EXTRACTION ###
    ###################################
    brain_extraction_ants = Node(BrainExtraction(), name='brain_extraction')
    brain_extraction_ants.inputs.dimension = 3
    brain_extraction_ants.inputs.use_floatingpoint_precision = 1
    brain_extraction_ants.inputs.num_threads = ants_threads
    brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
    brain_extraction_ants.inputs.keep_temporary_files = 1
    brain_extraction_ants.inputs.brain_template = bet_ants_template
    brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
    brain_extraction_ants.inputs.out_prefix = 'bet'

    ###################################
    ### COREGISTRATION ###
    ###################################
    coregistration = Node(Registration(), name='coregistration')
    coregistration.inputs.float = False
    coregistration.inputs.output_transform_prefix = "meanEpi2highres"
    coregistration.inputs.transforms = ['Rigid']
    coregistration.inputs.transform_parameters = [(0.1, ), (0.1, )]
    coregistration.inputs.number_of_iterations = [[1000, 500, 250, 100]]
    coregistration.inputs.dimension = 3
    coregistration.inputs.num_threads = ants_threads
    coregistration.inputs.write_composite_transform = True
    coregistration.inputs.collapse_output_transforms = True
    coregistration.inputs.metric = ['MI']
    coregistration.inputs.metric_weight = [1]
    coregistration.inputs.radius_or_number_of_bins = [32]
    coregistration.inputs.sampling_strategy = ['Regular']
    coregistration.inputs.sampling_percentage = [0.25]
    coregistration.inputs.convergence_threshold = [1e-08]
    coregistration.inputs.convergence_window_size = [10]
    coregistration.inputs.smoothing_sigmas = [[3, 2, 1, 0]]
    coregistration.inputs.sigma_units = ['mm']
    coregistration.inputs.shrink_factors = [[4, 3, 2, 1]]
    coregistration.inputs.use_estimate_learning_rate_once = [True]
    coregistration.inputs.use_histogram_matching = [False]
    coregistration.inputs.initial_moving_transform_com = True
    coregistration.inputs.output_warped_image = True
    coregistration.inputs.winsorize_lower_quantile = 0.01
    coregistration.inputs.winsorize_upper_quantile = 0.99

    ###################################
    ### NORMALIZATION ###
    ###################################
    # Settings Explanations
    # Only a few key settings are worth adjusting and most others relate to how ANTs optimizer starts or iterates and won't make a ton of difference
    # Brian Avants referred to these settings as the last "best tested" when he was aligning fMRI data: https://github.com/ANTsX/ANTsRCore/blob/master/R/antsRegistration.R#L275
    # Things that matter the most:
    # smoothing_sigmas:
    # how much gaussian smoothing to apply when performing registration, probably want the upper limit of this to match the resolution that the data is collected at e.g. 3mm
    # Old settings [[3,2,1,0]]*3
    # shrink_factors
    # The coarseness with which to do registration
    # Old settings [[8,4,2,1]] * 3
    # >= 8 may result is some problems causing big chunks of cortex with little fine grain spatial structure to be moved to other parts of cortex
    # Other settings
    # transform_parameters:
    # how much regularization to do for fitting that transformation
    # for syn this pertains to both the gradient regularization term, and the flow, and elastic terms. Leave the syn settings alone as they seem to be the most well tested across published data sets
    # radius_or_number_of_bins
    # This is the bin size for MI metrics and 32 is probably adequate for most use cases. Increasing this might increase precision (e.g. to 64) but takes exponentially longer
    # use_histogram_matching
    # Use image intensity distribution to guide registration
    # Leave it on for within modality registration (e.g. T1 -> MNI), but off for between modality registration (e.g. EPI -> T1)
    # convergence_threshold
    # threshold for optimizer
    # convergence_window_size
    # how many samples should optimizer average to compute threshold?
    # sampling_strategy
    # what strategy should ANTs use to initialize the transform. Regular here refers to approximately random sampling around the center of the image mass
    normalization = Node(Registration(), name='normalization')
    normalization.inputs.float = False
    normalization.inputs.collapse_output_transforms = True
    normalization.inputs.convergence_threshold = [1e-06, 1e-06, 1e-07]
    normalization.inputs.convergence_window_size = [10]
    normalization.inputs.dimension = 3
    normalization.inputs.fixed_image = MNItemplate
    normalization.inputs.initial_moving_transform_com = True
    normalization.inputs.metric = ['MI', 'MI', 'CC']
    normalization.inputs.metric_weight = [1.0] * 3
    normalization.inputs.number_of_iterations = [[1000, 500, 250, 100],
                                                 [1000, 500, 250, 100],
                                                 [100, 70, 50, 20]]
    normalization.inputs.num_threads = ants_threads
    normalization.inputs.output_transform_prefix = 'anat2template'
    normalization.inputs.output_inverse_warped_image = True
    normalization.inputs.output_warped_image = True
    normalization.inputs.radius_or_number_of_bins = [32, 32, 4]
    normalization.inputs.sampling_percentage = [0.25, 0.25, 1]
    normalization.inputs.sampling_strategy = ['Regular', 'Regular', 'None']
    normalization.inputs.shrink_factors = [[4, 3, 2, 1]] * 3
    normalization.inputs.sigma_units = ['vox'] * 3
    normalization.inputs.smoothing_sigmas = [[2, 1], [2, 1], [3, 2, 1, 0]]
    normalization.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    normalization.inputs.transform_parameters = [(0.1, ), (0.1, ),
                                                 (0.1, 3.0, 0.0)]
    normalization.inputs.use_histogram_matching = True
    normalization.inputs.winsorize_lower_quantile = 0.005
    normalization.inputs.winsorize_upper_quantile = 0.995
    normalization.inputs.write_composite_transform = True

    ###################################
    ### APPLY TRANSFORMS AND SMOOTH ###
    ###################################
    merge_transforms = Node(Merge(2),
                            iterfield=['in2'],
                            name='merge_transforms')

    # Used for epi -> mni, via (coreg + norm)
    apply_transforms = Node(ApplyTransforms(),
                            iterfield=['input_image'],
                            name='apply_transforms')
    apply_transforms.inputs.input_image_type = 3
    apply_transforms.inputs.float = False
    apply_transforms.inputs.num_threads = 12
    apply_transforms.inputs.environ = {}
    apply_transforms.inputs.interpolation = 'BSpline'
    apply_transforms.inputs.invert_transform_flags = [False, False]
    apply_transforms.inputs.reference_image = MNItemplate

    # Used for t1 segmented -> mni, via (norm)
    apply_transform_seg = Node(ApplyTransforms(), name='apply_transform_seg')
    apply_transform_seg.inputs.input_image_type = 3
    apply_transform_seg.inputs.float = False
    apply_transform_seg.inputs.num_threads = 12
    apply_transform_seg.inputs.environ = {}
    apply_transform_seg.inputs.interpolation = 'MultiLabel'
    apply_transform_seg.inputs.invert_transform_flags = [False]
    apply_transform_seg.inputs.reference_image = MNItemplate

    ###################################
    ### PLOTS ###
    ###################################
    plot_realign = Node(Plot_Realignment_Parameters(), name="plot_realign")
    plot_qa = Node(Plot_Quality_Control(), name="plot_qa")
    plot_normalization_check = Node(Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = MNItemplatehasskull

    ############################################
    ### FILTER, SMOOTH, DOWNSAMPLE PRECISION ###
    ############################################
    #Use cosanlab_preproc for down sampling
    down_samp = Node(Down_Sample_Precision(), name="down_samp")

    #Use FSL for smoothing
    if apply_smooth:
        smooth = Node(Smooth(), name='smooth')
        if isinstance(apply_smooth, list):
            smooth.iterables = ("fwhm", apply_smooth)
        elif isinstance(apply_smooth, int) or isinstance(apply_smooth, float):
            smooth.inputs.fwhm = apply_smooth
        else:
            raise ValueError("apply_smooth must be a list or int/float")

    #Use cosanlab_preproc for low-pass filtering
    if apply_filter:
        lp_filter = Node(Filter_In_Mask(), name='lp_filter')
        lp_filter.inputs.mask = MNImask
        lp_filter.inputs.sampling_rate = tr_length
        lp_filter.inputs.high_pass_cutoff = 0
        if isinstance(apply_filter, list):
            lp_filter.iterables = ("low_pass_cutoff", apply_filter)
        elif isinstance(apply_filter, int) or isinstance(apply_filter, float):
            lp_filter.inputs.low_pass_cutoff = apply_filter
        else:
            raise ValueError("apply_filter must be a list or int/float")

    ###################
    ### OUTPUT NODE ###
    ###################
    #Collect all final outputs in the output dir and get rid of file name additions
    datasink = Node(DataSink(), name='datasink')
    datasink.inputs.base_directory = output_final_dir
    datasink.inputs.container = subject_id

    # Remove substitutions
    data_dir_parts = data_dir.split('/')[1:]
    prefix = ['_scan_'] + data_dir_parts + [subject_id] + ['func']
    func_scan_names = [os.path.split(elem)[-1] for elem in funcs]
    to_replace = []
    for elem in func_scan_names:
        bold_name = elem.split(subject_id + '_')[-1]
        bold_name = bold_name.split('.nii.gz')[0]
        to_replace.append(('..'.join(prefix + [elem]), bold_name))
    datasink.inputs.substitutions = to_replace

    #####################
    ### INIT WORKFLOW ###
    #####################
    workflow = Workflow(name=subId)
    workflow.base_dir = output_interm_dir

    ############################
    ######### PART (1a) #########
    # func -> discorr -> trim -> realign
    # OR
    # func -> trim -> realign
    # OR
    # func -> discorr -> realign
    # OR
    # func -> realign
    ############################
    if apply_dist_corr:
        workflow.connect([(encoding_file_writer, topup, [('encoding_file',
                                                          'encoding_file')]),
                          (encoding_file_writer, apply_topup,
                           [('encoding_file', 'encoding_file')]),
                          (merger, topup, [('merged_file', 'in_file')]),
                          (func_scans, apply_topup, [('scan', 'in_files')]),
                          (topup, apply_topup,
                           [('out_fieldcoef', 'in_topup_fieldcoef'),
                            ('out_movpar', 'in_topup_movpar')])])
        if apply_trim:
            # Dist Corr + Trim
            workflow.connect([(apply_topup, trim, [('out_corrected', 'in_file')
                                                   ]),
                              (trim, realign_fsl, [('out_file', 'in_file')])])
        else:
            # Dist Corr + No Trim
            workflow.connect([(apply_topup, realign_fsl, [('out_corrected',
                                                           'in_file')])])
    else:
        if apply_trim:
            # No Dist Corr + Trim
            workflow.connect([(func_scans, trim, [('scan', 'in_file')]),
                              (trim, realign_fsl, [('out_file', 'in_file')])])
        else:
            # No Dist Corr + No Trim
            workflow.connect([
                (func_scans, realign_fsl, [('scan', 'in_file')]),
            ])

    ############################
    ######### PART (1n) #########
    # anat -> N4 -> bet
    # OR
    # anat -> bet
    ############################
    if apply_n4:
        workflow.connect([(n4_correction, brain_extraction_ants,
                           [('output_image', 'anatomical_image')])])
    else:
        brain_extraction_ants.inputs.anatomical_image = anat

    ##########################################
    ############### PART (2) #################
    # realign -> coreg -> mni (via t1)
    # t1 -> mni
    # covariate creation
    # plot creation
    ###########################################

    workflow.connect([
        (realign_fsl, plot_realign, [('par_file', 'realignment_parameters')]),
        (realign_fsl, plot_qa, [('out_file', 'dat_img')]),
        (realign_fsl, art, [('out_file', 'realigned_files'),
                            ('par_file', 'realignment_parameters')]),
        (realign_fsl, mean_epi, [('out_file', 'in_file')]),
        (realign_fsl, make_cov, [('par_file', 'realignment_parameters')]),
        (mean_epi, compute_mask, [('out_file', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (art, plot_realign, [('outlier_files', 'outliers')]),
        (plot_qa, make_cov, [('fd_outliers', 'fd_outliers')]),
        (brain_extraction_ants, coregistration, [('BrainExtractionBrain',
                                                  'fixed_image')]),
        (mean_epi, coregistration, [('out_file', 'moving_image')]),
        (brain_extraction_ants, normalization, [('BrainExtractionBrain',
                                                 'moving_image')]),
        (coregistration, merge_transforms, [('composite_transform', 'in2')]),
        (normalization, merge_transforms, [('composite_transform', 'in1')]),
        (merge_transforms, apply_transforms, [('out', 'transforms')]),
        (realign_fsl, apply_transforms, [('out_file', 'input_image')]),
        (apply_transforms, mean_norm_epi, [('output_image', 'in_file')]),
        (normalization, apply_transform_seg, [('composite_transform',
                                               'transforms')]),
        (brain_extraction_ants, apply_transform_seg,
         [('BrainExtractionSegmentation', 'input_image')]),
        (mean_norm_epi, plot_normalization_check, [('out_file', 'wra_img')])
    ])

    ##################################################
    ################### PART (3) #####################
    # epi (in mni) -> filter -> smooth -> down sample
    # OR
    # epi (in mni) -> filter -> down sample
    # OR
    # epi (in mni) -> smooth -> down sample
    # OR
    # epi (in mni) -> down sample
    ###################################################

    if apply_filter:
        workflow.connect([(apply_transforms, lp_filter, [('output_image',
                                                          'in_file')])])

        if apply_smooth:
            # Filtering + Smoothing
            workflow.connect([(lp_filter, smooth, [('out_file', 'in_file')]),
                              (smooth, down_samp, [('smoothed_file', 'in_file')
                                                   ])])
        else:
            # Filtering + No Smoothing
            workflow.connect([(lp_filter, down_samp, [('out_file', 'in_file')])
                              ])
    else:
        if apply_smooth:
            # No Filtering + Smoothing
            workflow.connect([
                (apply_transforms, smooth, [('output_image', 'in_file')]),
                (smooth, down_samp, [('smoothed_file', 'in_file')])
            ])
        else:
            # No Filtering + No Smoothing
            workflow.connect([(apply_transforms, down_samp, [('output_image',
                                                              'in_file')])])

    ##########################################
    ############### PART (4) #################
    # down sample -> save
    # plots -> save
    # covs -> save
    # t1 (in mni) -> save
    # t1 segmented masks (in mni) -> save
    ##########################################

    workflow.connect([
        (down_samp, datasink, [('out_file', 'functional.@down_samp')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_qa, datasink, [('plot', 'functional.@plot_qa')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')]),
        (normalization, datasink, [('warped_image', 'structural.@normanat')]),
        (apply_transform_seg, datasink, [('output_image',
                                          'structural.@normanatseg')])
    ])

    if not os.path.exists(os.path.join(output_dir, 'pipeline.png')):
        workflow.write_graph(dotfilename=os.path.join(output_dir, 'pipeline'),
                             format='png')

    print(f"Creating workflow for subject: {subject_id}")
    if ants_threads == 8:
        print(
            f"ANTs will utilize the default of {ants_threads} threads for parallel processing."
        )
    else:
        print(
            f"ANTs will utilize the user-requested {ants_threads} threads for parallel processing."
        )
    return workflow
Beispiel #18
0
    function=utils.tsv2subjectinfo,
    input_names=['events_file', 'exclude', 'confounds_file', 'trim_indices'],
    output_names=['subject_info']),
                          name="tsv2subjinfo",
                          iterfield=['events_file', 'confounds_file'])
modelspec = pe.MapNode(interface=model.SpecifyModel(),
                       name="modelspec",
                       iterfield=['subject_info'])
level1design = pe.MapNode(interface=fsl.Level1Design(),
                          name="level1design",
                          iterfield=['session_info'])
modelgen = pe.MapNode(interface=fsl.FEATModel(),
                      name='modelgen',
                      iterfield=["fsf_file", "ev_files"])

trim = pe.MapNode(interface=Trim(), name="trim", iterfield=['in_file'])
applymask = pe.MapNode(interface=fsl.ApplyMask(),
                       name="applymask",
                       iterfield=["in_file", "mask_file"])

modelestimate = pe.MapNode(interface=fsl.FILMGLS(),
                           name='modelestimate',
                           iterfield=['design_file', 'in_file', 'tcon_file'])

# combine copes, varcopes, and masks across multiple sessions
copemerge = pe.MapNode(interface=fsl.Merge(dimension='t'),
                       iterfield=['in_files'],
                       name="copemerge")
varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'),
                          iterfield=['in_files'],
                          name="varcopemerge")
Beispiel #19
0
def build_pipeline(model_def):

    # create pointers to needed values from
    # the model dictionary
    # TODO - this could be refactored
    TR = model_def['TR']
    subject_list = model_def['subject_list']
    JSON_MODEL_FILE = model_def['model_path']

    working_dir = model_def['working_dir']
    output_dir = model_def['output_dir']

    SUBJ_DIR = model_def['SUBJ_DIR']
    PROJECT_DIR = model_def['PROJECT_DIR']
    TASK_NAME = model_def['TaskName']
    RUNS = model_def['Runs']
    MODEL_NAME = model_def['ModelName']
    PROJECT_NAME = model_def['ProjectID']
    BASE_DIR = model_def['BaseDirectory']

    SERIAL_CORRELATIONS = "AR(1)" if not model_def.get(
        'SerialCorrelations') else model_def.get('SerialCorrelations')
    RESIDUALS = model_def.get('GenerateResiduals')

    # SpecifyModel - Generates SPM-specific Model

    modelspec = pe.Node(model.SpecifySPMModel(concatenate_runs=False,
                                              input_units='secs',
                                              output_units='secs',
                                              time_repetition=TR,
                                              high_pass_filter_cutoff=128),
                        output_units='scans',
                        name="modelspec")

    # #### Level 1 Design node
    #
    # ** TODO -- get the right matching template file for fmriprep **
    #
    # * ??do we need a different mask than:
    #
    #     `'/data00/tools/spm8/apriori/brainmask_th25.nii'`

    # Level1Design - Generates an SPM design matrix
    level1design = pe.Node(
        spm.Level1Design(
            bases={'hrf': {
                'derivs': [0, 0]
            }},
            timing_units='secs',
            interscan_interval=TR,
            # model_serial_correlations='AR(1)', # [none|AR(1)|FAST]',
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            model_serial_correlations=SERIAL_CORRELATIONS,

            # TODO - allow for specified masks
            mask_image=BRAIN_MASK_PATH,
            global_intensity_normalization='none'),
        name="level1design")

    # #### Estimate Model node
    # EstimateModel - estimate the parameters of the model
    level1estimate = pe.Node(
        spm.EstimateModel(
            estimation_method={'Classical': 1},
            # 8/21/20 mbod - allow for value to be set in JSON model spec
            write_residuals=RESIDUALS),
        name="level1estimate")

    # #### Estimate Contrasts node
    # EstimateContrast - estimates contrasts
    conestimate = pe.Node(spm.EstimateContrast(), name="conestimate")

    # ## Setup pipeline workflow for level 1 model
    # Initiation of the 1st-level analysis workflow
    l1analysis = pe.Workflow(name='l1analysis')

    # Connect up the 1st-level analysis components
    l1analysis.connect([
        (modelspec, level1design, [('session_info', 'session_info')]),
        (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
        (level1estimate, conestimate, [('spm_mat_file', 'spm_mat_file'),
                                       ('beta_images', 'beta_images'),
                                       ('residual_image', 'residual_image')])
    ])

    # ## Set up nodes for file handling and subject selection
    # ### `getsubjectinfo` node
    #
    # * Use `get_subject_info()` function to generate spec data structure for first level model design matrix

    # Get Subject Info - get subject specific condition information
    getsubjectinfo = pe.Node(util.Function(
        input_names=['subject_id', 'model_path'],
        output_names=['subject_info', 'realign_params', 'condition_names'],
        function=get_subject_info),
                             name='getsubjectinfo')

    makecontrasts = pe.Node(util.Function(
        input_names=['subject_id', 'condition_names', 'model_path'],
        output_names=['contrasts'],
        function=make_contrast_list),
                            name='makecontrasts')

    if model_def.get('ExcludeDummyScans'):
        ExcludeDummyScans = model_def['ExcludeDummyScans']
    else:
        ExcludeDummyScans = 0

    #if DEBUG:
    #    print(f'Excluding {ExcludeDummyScans} dummy scans.')

    trimdummyscans = pe.MapNode(Trim(begin_index=ExcludeDummyScans),
                                name='trimdummyscans',
                                iterfield=['in_file'])

    # ### `infosource` node
    #
    # * iterate over list of subject ids and generate subject ids and produce list of contrasts for subsequent nodes

    # Infosource - a function free node to iterate over the list of subject names
    infosource = pe.Node(util.IdentityInterface(
        fields=['subject_id', 'model_path', 'resolution', 'smoothing']),
                         name="infosource")

    try:
        fwhm_list = model_def['smoothing_list']
    except:
        fwhm_list = [4, 6, 8]

    try:
        resolution_list = model_def['resolutions']
    except:
        resolution_list = ['low', 'medium', 'high']

    infosource.iterables = [
        ('subject_id', subject_list),
        ('model_path', [JSON_MODEL_FILE] * len(subject_list)),
        ('resolution', resolution_list),
        ('smoothing', ['fwhm_{}'.format(s) for s in fwhm_list])
    ]

    # SelectFiles - to grab the data (alternativ to DataGrabber)

    ## TODO: here need to figure out how to incorporate the run number and task name in call
    templates = {
        'func':
        '{subject_id}/{resolution}/{smoothing}/sr{subject_id}_task-' +
        TASK_NAME + '_run-0*_*MNI*preproc*.nii'
    }

    selectfiles = pe.Node(nio.SelectFiles(
        templates,
        base_directory='{}/{}/derivatives/nipype/resampled_and_smoothed'.
        format(BASE_DIR, PROJECT_NAME)),
                          working_dir=working_dir,
                          name="selectfiles")

    # ### Specify datasink node
    #
    # * copy files to keep from various working folders to output folder for model for subject

    # Datasink - creates output folder for important outputs
    datasink = pe.Node(
        nio.DataSink(
            base_directory=SUBJ_DIR,
            parameterization=True,
            #container=output_dir
        ),
        name="datasink")

    datasink.inputs.base_directory = output_dir

    # Use the following DataSink output substitutions
    substitutions = []
    subjFolders = [(
        '_model_path.*resolution_(low|medium|high)_smoothing_(fwhm_\\d{1,2})_subject_id_sub-.*/(.*)$',
        '\\1/\\2/\\3')]
    substitutions.extend(subjFolders)
    datasink.inputs.regexp_substitutions = substitutions

    # datasink connections

    datasink_in_outs = [('conestimate.spm_mat_file', '@spm'),
                        ('level1estimate.beta_images', '@betas'),
                        ('level1estimate.mask_image', '@mask'),
                        ('conestimate.spmT_images', '@spmT'),
                        ('conestimate.con_images', '@con'),
                        ('conestimate.spmF_images', '@spmF')]

    if model_def.get('GenerateResiduals'):
        datasink_in_outs.append(
            ('level1estimate.residual_images', '@residuals'))

    # ---------

    # ## Set up workflow for whole process

    pipeline = pe.Workflow(
        name='first_level_model_{}_{}'.format(TASK_NAME.upper(), MODEL_NAME))
    pipeline.base_dir = os.path.join(SUBJ_DIR, working_dir)

    pipeline.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('resolution', 'resolution'),
                                   ('smoothing', 'smoothing')]),
        (infosource, getsubjectinfo, [('subject_id', 'subject_id'),
                                      ('model_path', 'model_path')]),
        (infosource, makecontrasts, [('subject_id', 'subject_id'),
                                     ('model_path', 'model_path')]),
        (getsubjectinfo, makecontrasts, [('condition_names', 'condition_names')
                                         ]),
        (getsubjectinfo, l1analysis,
         [('subject_info', 'modelspec.subject_info'),
          ('realign_params', 'modelspec.realignment_parameters')]),
        (makecontrasts, l1analysis, [('contrasts', 'conestimate.contrasts')]),

        #                  (selectfiles, l1analysis, [('func',
        #                                          'modelspec.functional_runs')]),
        (selectfiles, trimdummyscans, [('func', 'in_file')]),
        (trimdummyscans, l1analysis, [('out_file', 'modelspec.functional_runs')
                                      ]),
        (infosource, datasink, [('subject_id', 'container')]),
        (l1analysis, datasink, datasink_in_outs)
    ])

    return pipeline