Esempio n. 1
0
def epi_normalize(name='epi_normalize'):

    inputnode = pe.Node(
        utility.IdentityInterface(
            fields=['fmri_mean','t1','t1_to_mni','t1_mask']),
        name='inputspec')
    outputnode = pe.Node(
        utility.IdentityInterface(
            fields=['epi2t1_warp','coregistered_fmri_mean','epi2mni_warp',
                    't1_to_epi_warp','epi_mask']),
        name='outputspec')

    n_spm_coregister = pe.Node(
        spm.Coregister(jobtype='estimate'),
        name='spm_coregister')

#    n_epi2mni = pe.Node(
#        spm.preprocess.ApplyDeformations(
#            reference_volume='/coconut/applis/src/spm8_x64/toolbox/Seg/TPM.nii'),
#        name='epi2mni')


    n_flirt_epi2t1 = pe.Node(
        fsl.FLIRT(out_matrix_file='flirt_epi2t1.mat',
                  out_file='%s_flirt',
                  cost='normmi', # as in fcon1000 preproc, why??
                  searchr_x=[-10,10],searchr_y=[-10,10],searchr_z=[-10,10],
                  dof=6),
        name='flirt_epi2t1')
    n_t1_to_epi = pe.Node(
        fsl.ConvertXFM(invert_xfm=True),
        name='t1_to_epi')

    n_mask_to_epi = pe.Node(
        fsl.FLIRT(interp='nearestneighbour',
                  out_file='%s_epi',
                  apply_xfm=True,),
        name='mask_to_epi')
    
    w=pe.Workflow(name=name)

    w.connect([
#        (inputnode,n_spm_coregister,[('fmri_mean','source'),
#                                     ('t1','target')]),
        (inputnode,n_flirt_epi2t1,[('t1','reference')]),
#        (n_spm_coregister,n_epi2mni,[('coregistered_source','in_files')]),
#        (inputnode,n_epi2mni,[('t1_to_mni','deformation_field')]),
        (inputnode,n_flirt_epi2t1,[('fmri_mean','in_file')]),
        (n_flirt_epi2t1,outputnode,[('out_matrix_file','epi2t1_warp')]),
        (n_flirt_epi2t1,n_t1_to_epi,[('out_matrix_file','in_file')]),
        (n_t1_to_epi,outputnode,[('out_file','t1_to_epi_warp')]),

        (inputnode,n_mask_to_epi,[('fmri_mean','reference'),
                                  ('t1_mask','in_file')]),
        (n_t1_to_epi, n_mask_to_epi,[('out_file','in_matrix_file')]),
        (n_mask_to_epi, outputnode, [('out_file','epi_mask')])
#        (n_spm_coregister,outputnode,[('coregistered_source',
#                                       'coregistered_fmri_mean')]),
        ])
    return w
Esempio n. 2
0
def create_workflow():
    workflow = Workflow(
        name='transform_manual_mask')

    inputs = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'refsubject_id',
        'ref_funcmask',
        'ref_func',
        'funcs',
    ]), name='in')

    # Find the transformation matrix func_ref -> func
    # First find transform from func to manualmask's ref func

    # first take the median (flirt functionality has changed and no longer automatically takes the first volume when given 4D files)
    median_func = MapNode(
                    interface=fsl.maths.MedianImage(dimension="T"),
                    name='median_func',
                    iterfield=('in_file'),
                    )
    findtrans = MapNode(fsl.FLIRT(),
                        iterfield=['in_file'],
                        name='findtrans'
                        )

    # Invert the matrix transform
    invert = MapNode(fsl.ConvertXFM(invert_xfm=True),
                     name='invert',
                     iterfield=['in_file'],
                     )
    workflow.connect(findtrans, 'out_matrix_file',
                     invert, 'in_file')

    # Transform the manualmask to be aligned with func
    funcreg = MapNode(ApplyXFMRefName(),
                      name='funcreg',
                      iterfield=['in_matrix_file', 'reference'],
                      )


    workflow.connect(inputs, 'funcs',
                     median_func, 'in_file')

    workflow.connect(median_func, 'out_file',
                     findtrans, 'in_file')
    workflow.connect(inputs, 'ref_func',
                     findtrans, 'reference')

    workflow.connect(invert, 'out_file',
                     funcreg, 'in_matrix_file')

    workflow.connect(inputs, 'ref_func',
                     funcreg, 'in_file')
    workflow.connect(inputs, 'funcs',
                     funcreg, 'reference')

    
    return workflow
Esempio n. 3
0
def reg2anat(name='registration', onlyResample=True):

	"""
		Finding the transformation which fits an arbitrary input image to an anatomical image.
		If images are different only in voxel size, onlyResample=T should be applied.
		If onlyResample=F, rigid-body registration will be performed.
		Input: upscaled image, upscaled anatomical image
		Output transformation matrix
	
	"""

	reg=pe.Workflow(name=name)
	reg.base_dir='.'

	"""
    	Set up a node to define all inputs required for the preprocessing workflow

   	"""

	inputnode = pe.Node(interface=util.IdentityInterface(fields=['in_file', 'in_anat'], mandatory_inputs=True),
                            name='inputspec')

	"""
    	Set up a node to define outputs for the preprocessing workflow

    	"""


        outputnode = pe.Node(interface=util.IdentityInterface(fields=['out_mat', 'out_inv_mat']),
                         name='outputspec')


	if (onlyResample == False):
		flirt=pe.MapNode(interface=fsl.FLIRT(dof=6, bins=256, interp='trilinear'),
						 name='linear_registration',
						 iterfield=['in_file', 'reference'])
		invertMat=pe.MapNode(interface=fsl.ConvertXFM(invert_xfm = True),
							 name='invert_matrix',
							 iterfield=['in_file'])

		reg.connect(inputnode, 'in_file', flirt, 'in_file')
		reg.connect(inputnode, 'in_anat', flirt, 'reference')
		reg.connect(flirt, 'out_matrix_file', outputnode, 'out_mat')
		reg.connect(flirt, 'out_matrix_file', invertMat, 'in_file')
		reg.connect(invertMat, 'out_file', outputnode, 'out_inv_mat')
	else:
		print('Not implemented!') # TODO




	return reg
Esempio n. 4
0
def run_mni_alignment(in_file, out_dir):
    print('Aligning anatomical with MNI template')
    flirt = fsl.FLIRT()
    flirt.inputs.in_file = in_file
    flirt.inputs.reference = os.path.join(os.environ['FSLDIR'], 'data',
                                          'standard', 'MNI152_T1_2mm.nii.gz')
    flirt.inputs.out_file = os.path.join(out_dir, 'T1w_mni.nii.gz')
    flirt.inputs.out_matrix_file = os.path.join(out_dir, 'anat_to_mni.mat')
    flirt.run()
    # Invert transform
    invt = fsl.ConvertXFM()
    invt.inputs.in_file = os.path.join(out_dir, 'anat_to_mni.mat')
    invt.inputs.invert_xfm = True
    invt.inputs.out_file = os.path.join(out_dir, 'mni_to_anat.mat')
    invt.run()
Esempio n. 5
0
def create_segments_2func_workflow(threshold=0.5,
                                   name='segments_2func_workflow'):
    segments_2func_workflow = Workflow(name=name)

    # Input Node
    inputspec = Node(
        utility.IdentityInterface(fields=['segments', 'premat', 'func_file']),
        name='inputspec')

    # Calculate inverse matrix of EPI to T1
    anat_2func_matrix = Node(fsl.ConvertXFM(invert_xfm=True),
                             name='anat_2func_matrix')

    # Transform segments to EPI space
    segments_2func_apply = MapNode(fsl.ApplyXFM(),
                                   iterfield=['in_file'],
                                   name='segments_2func_apply')

    # Threshold segments
    segments_threshold = MapNode(
        fsl.ImageMaths(op_string='-thr {0} -bin'.format(threshold)),
        iterfield=['in_file'],
        name='segments_threshold')

    # Output Node
    outputspec = Node(utility.IdentityInterface(
        fields=['segments_2func_files', 'anat_2func_matrix_file']),
                      name='outputspec')

    segments_2func_workflow.connect(inputspec, 'premat', anat_2func_matrix,
                                    'in_file')
    segments_2func_workflow.connect(inputspec, 'segments',
                                    segments_2func_apply, 'in_file')
    segments_2func_workflow.connect(inputspec, 'func_file',
                                    segments_2func_apply, 'reference')
    segments_2func_workflow.connect(anat_2func_matrix, 'out_file',
                                    segments_2func_apply, 'in_matrix_file')
    segments_2func_workflow.connect(segments_2func_apply, 'out_file',
                                    segments_threshold, 'in_file')
    segments_2func_workflow.connect(anat_2func_matrix, 'out_file', outputspec,
                                    'anat_2func_matrix_file')
    segments_2func_workflow.connect(segments_threshold, 'out_file', outputspec,
                                    'segments_2func_files')

    return segments_2func_workflow
Esempio n. 6
0
def doInverseXFM(inmat, outmat):  # Doing Inverse of transformation matrix
    '''
    Parameters
    ----------
    inmat : str
        path containing the input transformation matrix.
    outmat : str
        path to store output transformation matrix.
    
    Returns
    -------
    an inverse transformation matrix of the input matrix
    '''
    print('doing inverse of', inmat)
    invt = fsl.ConvertXFM()
    invt.inputs.in_file = inmat
    invt.inputs.invert_xfm = True
    invt.inputs.out_file = outmat
    invt.run()
    print('inverse finished', outmat, '\n')
Esempio n. 7
0
def create_workflow():
    workflow = Workflow(name='transform_manual_mask')

    inputs = Node(IdentityInterface(fields=[
        'subject_id',
        'session_id',
        'manualmask',
        'manualmask_func_ref',
        'funcs',
    ]),
                  name='in')

    # Find the transformation matrix func_ref -> func
    # First find transform from func to manualmask's ref func
    findtrans = MapNode(fsl.FLIRT(), iterfield=['in_file'], name='findtrans')

    # Invert the matrix transform
    invert = MapNode(
        fsl.ConvertXFM(invert_xfm=True),
        name='invert',
        iterfield=['in_file'],
    )
    workflow.connect(findtrans, 'out_matrix_file', invert, 'in_file')

    # Transform the manualmask to be aligned with func
    funcreg = MapNode(
        ApplyXFMRefName(),
        name='funcreg',
        iterfield=['in_matrix_file', 'reference'],
    )

    workflow.connect(inputs, 'funcs', findtrans, 'in_file')
    workflow.connect(inputs, 'manualmask_func_ref', findtrans, 'reference')

    workflow.connect(invert, 'out_file', funcreg, 'in_matrix_file')

    workflow.connect(inputs, 'manualmask', funcreg, 'in_file')
    workflow.connect(inputs, 'funcs', funcreg, 'reference')

    return workflow
Esempio n. 8
0
def doConcatXFM(inmat1, inmat2,
                outmat):  # Doing Concatenation of transformation matrices
    '''
    Parameters
    ----------
    inmat1 : str
        path containing the first input transformation matrix.
    inmat2 : str
        path containing the second input transformation matrix.
    outmat : str
        path to store output transformation matrix.
    
    Returns
    -------
    a combined transformation matrix of the two input matrices
    '''
    print('doing concat of', inmat1, inmat2)
    cont = fsl.ConvertXFM()
    cont.inputs.in_file = inmat1
    cont.inputs.in_file2 = inmat2
    cont.inputs.concat_xfm = True
    cont.inputs.out_file = outmat
    cont.run()
    print('Concatenation finished', outmat, '\n')
def create_reg_and_label_wf(name="reg_wf", manual_seg_rois=False):
    inputfields = [
        "subject_id", "aparc_aseg", "fa", "wm_mask", "termination_mask"
    ]

    if manual_seg_rois:
        inputfields.append("manual_seg_rois")

    inputnode = pe.Node(interface=util.IdentityInterface(fields=inputfields),
                        name="inputnode")

    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "dwi_to_t1_matrix", "t1_to_dwi_matrix", "rois_to_dwi", "rois",
        "wmmask_to_dwi", "termmask_to_dwi", "highres_t1_to_dwi_matrix"
    ]),
                         name="outputnode")

    dmn_labels_if = util.Function(input_names=["in_file", "out_filename"],
                                  output_names=["out_file"],
                                  function=dmn_labels_combined)
    dmn_labelling = pe.Node(interface=dmn_labels_if, name='dmn_labelling')

    align_wmmask_to_dwi = coreg_without_resample("align_wmmask_to_fa")
    align_wmmask_to_dwi.inputs.inputnode.interp = "nearestneighbour"

    rois_to_dwi = pe.Node(interface=fsl.ApplyXfm(), name='rois_to_dwi')
    rois_to_dwi.inputs.interp = "nearestneighbour"

    threshold_fa = pe.Node(interface=fsl.ImageMaths(), name='threshold_fa')
    threshold_fa.inputs.op_string = "-thr 0.2 -bin"

    multiply_rois_by_termmask = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='multiply_rois_by_termmask')
    multiply_rois_by_termmask.inputs.op_string = "-mul %s"

    termmask_to_dwi = rois_to_dwi.clone("termmask_to_dwi")

    invertxfm = pe.Node(interface=fsl.ConvertXFM(), name='invertxfm')
    invertxfm.inputs.invert_xfm = True
    '''
    Define renaming nodes
    '''
    rename_t1_to_dwi_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_t1_to_dwi_matrix"),
        name='rename_t1_to_dwi_mat')
    rename_t1_to_dwi_mat.inputs.keep_ext = True

    rename_dwi_to_t1_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_dwi_to_t1_matrix"),
        name='rename_dwi_to_t1_mat')
    rename_dwi_to_t1_mat.inputs.keep_ext = True

    rename_rois_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois_dwi"),
        name='rename_rois_dwi')
    rename_rois_dwi.inputs.keep_ext = True

    rename_rois = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois"),
        name='rename_rois')
    rename_rois.inputs.keep_ext = True

    rename_termmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_term_mask_dwi"),
        name='rename_termmask_dwi')
    rename_termmask_dwi.inputs.keep_ext = True

    rename_wmmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_wm_mask_dwi"),
        name='rename_wmmask_dwi')
    rename_wmmask_dwi.inputs.keep_ext = True

    rename_highres_matrix_file = pe.Node(interface=util.Rename(
        format_string="%(subject_id)s_t1_to_dwi_NoResample"),
                                         name='rename_highres_matrix_file')
    rename_highres_matrix_file.inputs.keep_ext = True

    workflow = pe.Workflow(name=name)

    workflow.connect([(inputnode, align_wmmask_to_dwi,
                       [("wm_mask", "inputnode.moving_image")])])
    workflow.connect([(inputnode, threshold_fa, [("fa", "in_file")])])
    workflow.connect([(threshold_fa, align_wmmask_to_dwi,
                       [("out_file", "inputnode.fixed_image")])])

    if manual_seg_rois:
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "in_file")])])
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "reference")])])
        workflow.connect([(inputnode, outputnode, [("manual_seg_rois", "rois")
                                                   ])])

    else:
        workflow.connect([(inputnode, dmn_labelling, [
            (('subject_id', add_subj_name_to_rois), 'out_filename')
        ])])
        workflow.connect([(inputnode, dmn_labelling, [("aparc_aseg", "in_file")
                                                      ])])

        workflow.connect([(dmn_labelling, multiply_rois_by_termmask,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, multiply_rois_by_termmask,
                           [("termination_mask", "operand_files")])])
        workflow.connect([(multiply_rois_by_termmask, rename_rois,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, rename_rois, [("subject_id",
                                                     "subject_id")])])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "in_file")])
                          ])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "reference")
                                                      ])])
        workflow.connect([(rename_rois, outputnode, [("out_file", "rois")])])

    workflow.connect([(align_wmmask_to_dwi, rois_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "in_file")])])
    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "reference")])])
    workflow.connect([(align_wmmask_to_dwi, termmask_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(align_wmmask_to_dwi, invertxfm,
                       [("outputnode.lowres_matrix_file", "in_file")])])

    workflow.connect([(inputnode, rename_t1_to_dwi_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_t1_to_dwi_mat,
                       [("outputnode.lowres_matrix_file", "in_file")])])
    workflow.connect([(rename_t1_to_dwi_mat, outputnode,
                       [("out_file", "t1_to_dwi_matrix")])])

    workflow.connect([(inputnode, rename_dwi_to_t1_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(invertxfm, rename_dwi_to_t1_mat, [("out_file",
                                                          "in_file")])])
    workflow.connect([(rename_dwi_to_t1_mat, outputnode,
                       [("out_file", "dwi_to_t1_matrix")])])

    workflow.connect([(inputnode, rename_rois_dwi, [("subject_id",
                                                     "subject_id")])])
    workflow.connect([(rois_to_dwi, rename_rois_dwi, [("out_file", "in_file")])
                      ])
    workflow.connect([(rename_rois_dwi, outputnode, [("out_file",
                                                      "rois_to_dwi")])])

    workflow.connect([(inputnode, rename_termmask_dwi, [("subject_id",
                                                         "subject_id")])])
    workflow.connect([(termmask_to_dwi, rename_termmask_dwi, [("out_file",
                                                               "in_file")])])
    workflow.connect([(rename_termmask_dwi, outputnode,
                       [("out_file", "termmask_to_dwi")])])

    workflow.connect([(inputnode, rename_wmmask_dwi, [("subject_id",
                                                       "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_wmmask_dwi,
                       [("outputnode.out_file", "in_file")])])
    workflow.connect([(rename_wmmask_dwi, outputnode, [("out_file",
                                                        "wmmask_to_dwi")])])

    workflow.connect([(inputnode, rename_highres_matrix_file,
                       [("subject_id", "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_highres_matrix_file,
                       [("outputnode.highres_matrix_file", "in_file")])])
    workflow.connect([(rename_highres_matrix_file, outputnode,
                       [("out_file", "highres_t1_to_dwi_matrix")])])
    return workflow
Esempio n. 10
0
def _t1reg(
    datapath,
    t1prefix,
    regdir,
    outfolder=Path.cwd(),
    inrefname=None,
    outrefname="Ref_CESTres.nii.gz",
    phantom=False,
):
    """ _t1reg - Co-registers T1 data, and then registers that data
    to the high resolution reference measurement set up in _reg_ref_7T/_reg_ref
    """
    t1dir = sorted(datapath.glob(f"*{t1prefix}*.nii.gz"))
    if t1dir is None:
        print("No T1 Files Detected!")
        return None

    if inrefname is None:
        inrefdir = datapath / outrefname
    else:
        inrefdir = sorted(datapath.glob(f"*{inrefname}*.nii.gz"))[0]

    t1outname = t1prefix

    # Merge all T1 vols together
    if len(t1dir) > 1:
        concatname = [str(i) for i in t1dir]

        # Merge all files
        fslmerge = fsl.Merge()
        fslmerge.inputs.in_files = concatname
        fslmerge.inputs.dimension = "t"
        fslmerge.inputs.merged_file = str(regdir / f"{t1outname}_merged.nii.gz")
        fslmerge.run()
    else:
        shutil.copyfile(str(t1dir[0]), str(regdir / f"{t1outname}_merged.nii.gz"))

    #  MC Flirt Data
    mcflirt = fsl.MCFLIRT()
    mcflirt.inputs.in_file = str(regdir / f"{t1outname}_merged.nii.gz")
    mcflirt.inputs.ref_vol = 0
    mcflirt.inputs.out_file = str(regdir / f"{t1outname}_merged_mcf.nii.gz")
    mcflirt.run()

    # Extract an ROI to use for registration to Ref
    fsl.ExtractROI(
        in_file=str(regdir / f"{t1outname}_merged_mcf.nii.gz"),
        roi_file=str(regdir / f"{t1outname}_vol1.nii.gz"),
        t_min=0,
        t_size=1,
    ).run()

    # Run FAST on Ref T1 image
    t1fast = fsl.FAST()
    t1fast.inputs.in_files = str(regdir / f"{t1outname}_vol1.nii.gz")
    t1fast.inputs.out_basename = str(regdir / f"{t1outname}_bc")
    t1fast.inputs.output_biascorrected = True
    t1fast.inputs.output_biasfield = True
    t1fast.inputs.no_pve = True
    t1fast.run(ignore_exception=True)

    t1vol = nib.load(str(t1dir[0]))

    # BET T1 Ref image
    if phantom:
        fmaths = fsl.ImageMaths()
        fmaths.inputs.in_file = str(regdir / f"{t1outname}_vol1.nii.gz")
        fmaths.inputs.out_file = str(regdir / f"{t1outname}_brain.nii.gz")
        fmaths.inputs.op_string = "-thrp 10"
        fmaths.run()

        fmaths.inputs.in_file = str(regdir / f"{t1outname}_brain.nii.gz")
        fmaths.inputs.out_file = str(regdir / f"{t1outname}_brain_mask.nii.gz")
        fmaths.inputs.op_string = "-bin"
        fmaths.run()

        if t1vol.ndim < 3 or t1vol.shape[2] == 1:
            # Flirt first T1 Image to Original Reference volume
            flt = fsl.FLIRT()
            flt.inputs.in_file = str(regdir / f"{t1outname}_vol1.nii.gz")
            flt.inputs.reference = str(regdir / "Ref_sROI.nii.gz")
            flt.inputs.out_file = str(regdir / "T1_to_ref.nii.gz")
            flt.inputs.rigid2D = True
            flt.inputs.output_type = "NIFTI_GZ"
            flt.inputs.out_matrix_file = str(regdir / "T1resred.txt")
            flt.run()
        else:
            # Flirt first T1 Image to Original Reference volume
            flt = fsl.FLIRT()
            flt.inputs.in_file = str(regdir / f"{t1outname}_vol1.nii.gz")
            flt.inputs.reference = str(inrefdir)
            flt.inputs.out_file = str(regdir / "T1_to_ref.nii.gz")
            flt.inputs.rigid2D = True
            flt.inputs.output_type = "NIFTI_GZ"
            flt.inputs.out_matrix_file = str(regdir / "T1resred.txt")
            flt.run()
    else:
        bett1 = fsl.BET()
        bett1.inputs.in_file = str(regdir / f"{t1outname}_vol1.nii.gz")
        bett1.inputs.out_file = str(regdir / f"{t1outname}_brain.nii.gz")
        bett1.inputs.mask = True
        bett1.inputs.padding = True
        bett1.run()

        # Flirt first T1 Image to Original Reference volume
        flt = fsl.FLIRT()
        flt.inputs.in_file = str(regdir / f"{t1outname}_vol1.nii.gz")
        flt.inputs.reference = str(inrefdir)
        flt.inputs.out_file = str(regdir / "T1_to_ref.nii.gz")
        flt.inputs.output_type = "NIFTI_GZ"
        flt.inputs.out_matrix_file = str(regdir / "T1resred.txt")
        flt.run()

    # BET rest of data
    fmaths = fsl.ImageMaths()
    fmaths.inputs.in_file = str(regdir / f"{t1outname}_merged_mcf.nii.gz")
    fmaths.inputs.op_string = "-mul"
    fmaths.inputs.in_file2 = str(regdir / f"{t1outname}_brain_mask.nii.gz")
    fmaths.inputs.out_file = str(regdir / f"{t1outname}_merged_brain.nii.gz")
    fmaths.run()

    if len(_slicenumber2d) > 0:
        # Flirt T1 Map Data to Original Reference volume
        if t1vol.ndim < 3 or t1vol.shape[2] == 1:
            flt = fsl.FLIRT()
            flt.inputs.in_file = str(regdir / f"{t1outname}_merged_brain.nii.gz")
            flt.inputs.reference = str(regdir / "Ref_sROI.nii.gz")
            flt.inputs.out_file = str(regdir / "T1Data_sROI.nii.gz")
            if phantom:
                flt.inputs.rigid2D = True
            flt.inputs.output_type = "NIFTI_GZ"
            flt.inputs.in_matrix_file = str(regdir / "T1resred.txt")
            flt.inputs.apply_xfm = True
            flt.inputs.out_matrix_file = str(regdir / "T1resred.txt")
            flt.run()
        else:
            flt = fsl.FLIRT()
            flt.inputs.in_file = str(regdir / f"{t1outname}_merged_brain.nii.gz")
            flt.inputs.reference = str(inrefdir)
            flt.inputs.out_file = str(regdir / "T1Data_to_ref.nii.gz")
            flt.inputs.output_type = "NIFTI_GZ"
            flt.inputs.in_matrix_file = str(regdir / "T1resred.txt")
            flt.inputs.apply_xfm = True
            flt.inputs.out_matrix_file = str(regdir / "T1resred.txt")
            flt.run()

            if _slicenumber2d[1] > 1:
                # Run for T1 Data
                fsl.ExtractROI(
                    in_file=str(regdir / "T1Data_to_ref.nii.gz"),
                    roi_file=str(regdir / "T1Data_sROI.nii.gz"),
                    x_min=0,
                    x_size=-1,
                    y_min=0,
                    y_size=-1,
                    z_min=_slicenumber2d[0] - 1,
                    z_size=_slicenumber2d[1],
                ).run()
            else:
                # Run for T1 Data
                fsl.ExtractROI(
                    in_file=str(regdir / "T1Data_to_ref.nii.gz"),
                    roi_file=str(regdir / "T1Data_sROI.nii.gz"),
                    x_min=0,
                    x_size=-1,
                    y_min=0,
                    y_size=-1,
                    z_min=_slicenumber2d[0],
                    z_size=_slicenumber2d[1],
                ).run()

        # Warp 2D T1 Data to CEST Space
        flt.inputs.in_file = str(regdir / "T1Data_sROI.nii.gz")
        flt.inputs.out_file = str(regdir / "T1Data_resred.nii.gz")
        flt.inputs.reference = str(regdir / outrefname)
        flt.inputs.in_matrix_file = str(regdir / "Ref_CESTres.txt")
        flt.inputs.apply_xfm = True
        flt.inputs.rigid2D = True
        flt.inputs.out_matrix_file = str(regdir / "T1toCEST.txt")
        flt.run()

    else:
        # Combine T1->Ref matrix with Ref->CEST matrix
        xfmcomb = fsl.ConvertXFM()
        xfmcomb.inputs.in_file = str(regdir / "T1resred.txt")
        xfmcomb.inputs.in_file2 = str(regdir / "Ref_CESTres.txt")
        xfmcomb.inputs.concat_xfm = True
        xfmcomb.inputs.out_file = str(regdir / "T1toCEST.txt")
        xfmcomb.run()

        # Use combine Ref->CEST matrix to register T1 data to CEST data
        flt.inputs.in_file = str(regdir / f"{t1outname}_merged_brain.nii.gz")
        flt.inputs.out_file = str(regdir / "T1Data_resred.nii.gz")
        flt.inputs.reference = str(regdir / outrefname)
        flt.inputs.in_matrix_file = str(regdir / "T1toCEST.txt")
        flt.inputs.apply_xfm = True
        flt.inputs.out_matrix_file = str(regdir / "T1toCEST.txt")
        flt.run()

    shutil.copyfile(
        str(regdir / "T1Data_resred.nii.gz"), str(outfolder / f"{t1outname}_reg.nii.gz")
    )

    return None
Esempio n. 11
0
def _b1resize(
    datapath,
    b1name,
    regdir,
    b1FAmap=None,
    outfolder=Path.cwd(),
    inrefname=None,
    outrefname="Ref_CESTres.nii.gz",
    phantom=False,
):
    """B1_RESIZE - Preprocesses B1 Data for Use with FABBER
    Takes the raw B1 data and processes it into a B1 map.  Also flirts it
    into the reference volume space.  Does not apply any actual registration,
    just resamples it so it is in the same resolution as the Reference data.

    Parameters:
    -----------
    datapath : pathlib Path object
        The path to the datafolder
    b1name : str
        The name of the B1+ scan to register.
    regdir : pathlib Path object
        The registration directory being used for all 
        of the registration computations.
    b1FAmap : str
        The name of the B1+ FA map to register. 
        If None, will assume this is in the b1name file.
    outfolder : pathlib Path object
        The output directory of the data being analyzed. 
        If blank, the datapath directory will be used.
    refname : str
        The name of the reference volume to register b1 to. 
        If blank, the file Ref_CESTres.nii.gz will be used.

    Returns:
    --------
        None

    Author:  asmith
    Version: 1.0
    Changelog:

    20181217 - initial creation
    """
    b1dir = sorted(datapath.glob("*{0}*.nii.gz".format(b1name)))

    if len(b1dir) > 1 and b1FAmap is None:
        fslmerge = fsl.Merge()
        fslmerge.inputs.in_files = [str(i) for v, i in enumerate(b1dir)]
        fslmerge.inputs.dimension = "t"
        fslmerge.inputs.merged_file = str(regdir / f"{b1name}_merged.nii.gz")
        fslmerge.run()

        b1dir = regdir / f"{b1name}_merged.nii.gz"

    try:
        b1vol = nib.load(str(b1dir[1]))
    except TypeError:
        b1vol = nib.load(str(b1dir))
    except IndexError:
        b1vol = nib.load(str(b1dir[0]))
    except:
        print(f"Unexpected Error: {sys.exc_info()[0]}")

    if b1FAmap is None and b1vol.ndim < 4:
        if len(b1dir) > 1:
            b1FAmap = Path(b1dir[-1].stem).stem
        else:
            raise NoFAMapError(
                "No FA Map specified for B1 Data!\nProvide a FA map to proceed!"
            )
    if b1FAmap is None and b1vol.shape[3] > 2:
        # Split Data
        fsplt = fsl.Split()
        fsplt.inputs.in_file = str(b1dir)
        fsplt.inputs.out_base_name = str(regdir / "DREAM_s")
        fsplt.inputs.dimension = "t"
        fsplt.run()

        # set variables so anatomical and FA map are defined
        b1dirs = sorted(regdir.glob("*DREAM_s*.nii.gz"))
        b1dir = str(b1dirs[1])
        b1FAmapdir = str(b1dirs[-1])

    elif b1FAmap is None and b1vol.shape[3] == 2:
        # Split Data
        fsplt = fsl.Split()
        fsplt.inputs.in_file = str(b1dir)
        fsplt.inputs.out_base_name = str(regdir / "DREAM_s")
        fsplt.inputs.dimension = "t"
        fsplt.run()

        # Split b1vol into component anatomicals for registration
        b1dirs = sorted(regdir.glob("*DREAM_s*.nii.gz"))
        b1dir = str(b1dirs[1])
        # Define b1FAmap for use in downstream registration
        b1FAmapdir = regdir / "B1map.nii.gz"

        # Build FA map from anatomical maps
        fmaths = fsl.ImageMaths()
        fmaths.inputs.in_file = str(b1dirs[0])
        fmaths.inputs.op_string = "-mul 2 -div"
        fmaths.inputs.in_file2 = str(b1dirs[1])
        fmaths.inputs.out_file = str(regdir / "tmp1.nii.gz")
        fmaths.run()
        fmaths = fsl.ImageMaths()
        fmaths.inputs.in_file = str(regdir / "tmp1.nii.gz")
        fmaths.inputs.op_string = f"-sqrt -atan -div {radians(60)} -mul 600"
        fmaths.inputs.out_file = str(b1FAmapdir)
        fmaths.run()
    else:
        b1FAmapdir = sorted(datapath.glob(f"*{b1FAmap}*.nii.gz"))[0]
        try:
            b1dir = str(b1dir[1])
        except IndexError:
            b1dir = str(b1dir[0])

    b1splt = fsl.ExtractROI()
    b1splt.inputs.in_file = b1dir
    b1splt.inputs.roi_file = str(regdir / "B1_1.nii.gz")
    b1splt.inputs.t_size = 1
    b1splt.inputs.t_min = 0
    b1splt.run()

    # Run FAST on B1 input data to get better registration
    b1FAST = fsl.FAST()
    b1FAST.inputs.in_files = str(regdir / "B1_1.nii.gz")
    b1FAST.inputs.out_basename = str(regdir / "B1_bc")
    b1FAST.inputs.output_biascorrected = True
    b1FAST.inputs.no_pve = True
    b1FAST.inputs.output_type = "NIFTI_GZ"
    b1FAST.run(ignore_exception=True)

    if phantom:
        # _FOVDiff(str(regdir / "B1_bc_restore.nii.gz"), str(inrefdir), "B1resred.txt", regdir=regdir)

        # Flirt B1 Image Data to Original Reference volume
        flt = fsl.FLIRT()
        flt.inputs.in_file = str(regdir / "B1_bc_restore.nii.gz")
        flt.inputs.reference = str(regdir / "Ref_bc_restore.nii.gz")
        flt.inputs.out_file = str(regdir / "B1_to_ref.nii.gz")
        flt.inputs.output_type = "NIFTI_GZ"
        flt.inputs.rigid2D = True
        # flt.inputs.in_matrix_file = str(regdir / "B1resred.txt")

        # flt.inputs.apply_xfm=True
        flt.inputs.out_matrix_file = str(regdir / "B1resred.txt")
        flt.run()
    else:
        # Flirt B1 Image Data to Original Reference volume
        flt = fsl.FLIRT()
        flt.inputs.in_file = str(regdir / "B1_bc_restore.nii.gz")
        flt.inputs.reference = str(regdir / "Ref_bc_restore.nii.gz")
        flt.inputs.out_file = str(regdir / "B1_to_ref.nii.gz")
        flt.inputs.output_type = "NIFTI_GZ"
        flt.inputs.out_matrix_file = str(regdir / "B1resred.txt")
        flt.run()

    if len(_slicenumber2d) > 0:
        # Flirt B1 Map Data to Original Reference volume
        flt = fsl.FLIRT()
        flt.inputs.in_file = str(b1FAmapdir)
        flt.inputs.reference = str(regdir / "Ref_bc_restore.nii.gz")
        flt.inputs.out_file = str(regdir / "B1map_to_ref.nii.gz")
        flt.inputs.output_type = "NIFTI_GZ"
        flt.inputs.in_matrix_file = str(regdir / "B1resred.txt")
        flt.inputs.apply_xfm = True
        flt.inputs.out_matrix_file = str(regdir / "B1resred.txt")
        flt.run()

        if _slicenumber2d[1] > 1:
            # Run for B1 Map
            fsl.ExtractROI(
                in_file=str(regdir / "B1map_to_ref.nii.gz"),
                roi_file=str(regdir / "B1map_sROI.nii.gz"),
                x_min=0,
                x_size=-1,
                y_min=0,
                y_size=-1,
                z_min=_slicenumber2d[0] - 1,
                z_size=_slicenumber2d[1],
            ).run()
        else:
            # Run for B1 Map
            fsl.ExtractROI(
                in_file=str(regdir / "B1map_to_ref.nii.gz"),
                roi_file=str(regdir / "B1map_sROI.nii.gz"),
                x_min=0,
                x_size=-1,
                y_min=0,
                y_size=-1,
                z_min=_slicenumber2d[0],
                z_size=_slicenumber2d[1],
            ).run()

        # Warp 2D B1map to CEST Space
        flt.inputs.in_file = str(regdir / "B1map_sROI.nii.gz")
        flt.inputs.out_file = str(regdir / "B1map_resred.nii.gz")
        flt.inputs.reference = str(regdir / outrefname)
        flt.inputs.in_matrix_file = str(regdir / "Ref_CESTres.txt")
        flt.inputs.apply_xfm = True
        flt.inputs.rigid2D = True
        flt.inputs.out_matrix_file = str(regdir / "B1toCEST.txt")
        flt.run()

    else:
        # Combine B1->Ref matrix with Ref->CEST matrix
        xfmcomb = fsl.ConvertXFM()
        xfmcomb.inputs.in_file = str(regdir / "B1resred.txt")
        xfmcomb.inputs.in_file2 = str(regdir / "Ref_CESTres.txt")
        xfmcomb.inputs.concat_xfm = True
        xfmcomb.inputs.out_file = str(regdir / "B1toCEST.txt")
        xfmcomb.run()

        # Use combine Ref->CEST matrix to register B1 FA map to CEST data
        flt.inputs.in_file = str(b1FAmapdir)
        flt.inputs.out_file = str(regdir / "B1map_resred.nii.gz")
        flt.inputs.reference = str(regdir / outrefname)
        flt.inputs.in_matrix_file = str(regdir / "B1toCEST.txt")
        flt.inputs.apply_xfm = True
        flt.inputs.out_matrix_file = str(regdir / "B1toCEST.txt")
        flt.run()

    # Convert Registered FA Map to Fraction of nominal angle and move to analysis folder
    fmaths = fsl.ImageMaths()
    fmaths.inputs.in_file = str(regdir / "B1map_resred.nii.gz")
    fmaths.inputs.op_string = "-div 600 -mul"
    fmaths.inputs.in_file2 = str(regdir / "Ref_Mask.nii.gz")
    fmaths.inputs.out_file = str(outfolder / "B1map_resize.nii.gz")
    fmaths.run()

    return None
epiMean = pe.Node(fsl.maths.MeanImage(dimension='T', output_type='NIFTI_GZ'),
                  name='epiMean')

epiBiasCorrect = pe.Node(ants.N4BiasFieldCorrection(
    dimension=3,
    n_iterations=[50, 50, 30, 20],
    convergence_threshold=0.0,
    shrink_factor=3,
    bspline_fitting_distance=300),
                         name='epiBiasCorrect')

## Register epi image to magnitude image
epi2mag = pe.Node(fsl.FLIRT(dof=6, cost='normcorr'), name='epi2mag')

convertXFM = pe.Node(fsl.ConvertXFM(invert_xfm=True), name='convertXFM')

fmap2epi = pe.Node(fsl.FLIRT(apply_xfm=True), name='fmap2epi')

## Fieldmap unwarping
epiUnwarp = pe.Node(fsl.preprocess.FUGUE(dwell_time=effectEcho,
                                         unwarp_direction=unwarpDir,
                                         forward_warping=False,
                                         nokspace=True),
                    name='epiUnwarp')

# Estimate signal loss map
sigloss = pe.Node(fsl.SigLoss(echo_time=TE), name='sigloss')

# Registration/Normalisation workflow
# Register functiona slab to anatomical
Esempio n. 13
0
def bbr_workflow(SinkTag="func_preproc", wf_name="func2anat"):
    """
        Modified version of CPAC.registration.registration:

        `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/registration/registration.html`


        BBR registration of functional image to anat.

        Workflow inputs:
            :param func: One volume of the 4D fMRI (The one which is the closest to the fieldmap recording in time should be chosen- e.g: if fieldmap was recorded after the fMRI the last volume of it should be chosen).
            :param skull: The oriented high res T1w image.
            :param anat_wm_segmentation: WM probability mask in .
            :param anat_csf_segmentation: CSF probability mask in
            :param bbr_schedule: Parameters which specifies BBR options.
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: bbreg_workflow - workflow
                func="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz",
                 skull="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/MS001/highres.nii.gz",
                 anat_wm_segmentation="/home/balint/Dokumentumok/phd/essen/PAINTER/probe/anat_preproc/fast/fast__prob_2.nii.gz",



        Balint Kincses
        [email protected]
        2018


        """
    import os
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.func_preproc.Onevol as onevol
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Define inputs of the workflow
    inputspec = pe.Node(utility.IdentityInterface(fields=[
        'func', 'skull', 'anat_wm_segmentation', 'anat_gm_segmentation',
        'anat_csf_segmentation', 'anat_ventricle_segmentation'
    ]),
                        name='inputspec')

    myonevol = onevol.onevol_workflow()

    # trilinear interpolation is used by default in linear registration for func to anat
    linear_reg = pe.MapNode(interface=fsl.FLIRT(),
                            iterfield=['in_file', 'reference'],
                            name='linear_func_to_anat')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6
    linear_reg.inputs.out_matrix_file = "lin_mat"

    # WM probability map is thresholded and masked
    wm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='wm_bb_mask')
    wm_bb_mask.inputs.op_string = '-thr 0.5 -bin'
    # CSf probability map is thresholded and masked
    csf_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                             iterfield=['in_file'],
                             name='csf_bb_mask')
    csf_bb_mask.inputs.op_string = '-thr 0.5 -bin'

    # GM probability map is thresholded and masked
    gm_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='gm_bb_mask')
    gm_bb_mask.inputs.op_string = '-thr 0.1 -bin'  # liberal mask to capture all gm signal

    # ventricle probability map is thresholded and masked
    vent_bb_mask = pe.MapNode(interface=fsl.ImageMaths(),
                              iterfield=['in_file'],
                              name='vent_bb_mask')
    vent_bb_mask.inputs.op_string = '-thr 0.8 -bin -ero -dilM'  # stricter threshold and some morphology for compcor

    # add the CSF and WM masks
    #add_masks=pe.MapNode(interface=fsl.ImageMaths(),
    #                     iterfield=['in_file','in_file2'],
    #                     name='add_masks')
    #add_masks.inputs.op_string = ' -add'

    # A function is defined for define bbr argumentum which says flirt to perform bbr registration
    # for each element of the list, due to MapNode
    def bbreg_args(bbreg_target):
        return '-cost bbr -wmseg ' + bbreg_target

    bbreg_arg_convert = pe.MapNode(interface=Function(
        input_names=["bbreg_target"],
        output_names=["arg"],
        function=bbreg_args),
                                   iterfield=['bbreg_target'],
                                   name="bbr_arg_converter")

    # BBR registration within the FLIRT node
    bbreg_func_to_anat = pe.MapNode(
        interface=fsl.FLIRT(),
        iterfield=['in_file', 'reference', 'in_matrix_file', 'args'],
        name='bbreg_func_to_anat')
    bbreg_func_to_anat.inputs.dof = 6

    # calculate the inverse of the transformation matrix (of func to anat)
    convertmatrix = pe.MapNode(interface=fsl.ConvertXFM(),
                               iterfield=['in_file'],
                               name="convertmatrix")
    convertmatrix.inputs.invert_xfm = True

    # use the invers registration (anat to func) to transform anatomical csf mask
    reg_anatmask_to_func1 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func1')
    #reg_anatmask_to_func1.inputs.apply_xfm = True
    # use the invers registration (anat to func) to transform anatomical wm mask
    reg_anatmask_to_func2 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func2')
    #reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func3 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func3')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # use the invers registration (anat to func) to transform anatomical gm mask
    reg_anatmask_to_func4 = pe.MapNode(
        interface=fsl.FLIRT(apply_xfm=True, interp='nearestneighbour'),
        iterfield=['in_file', 'reference', 'in_matrix_file'],
        name='anatmasks_to_func4')
    # reg_anatmask_to_func2.inputs.apply_xfm = True

    # Create png images for quality check
    myqc = qc.vol2png("func2anat")

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # Define outputs of the workflow
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'func_sample2anat', 'example_func', 'func_to_anat_linear_xfm',
        'anat_to_func_linear_xfm', 'csf_mask_in_funcspace',
        'wm_mask_in_funcspace', 'gm_mask_in_funcspace',
        'ventricle_mask_in_funcspace'
    ]),
                         name='outputspec')

    analysisflow = pe.Workflow(name=wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'func', myonevol, 'inputspec.func')
    analysisflow.connect(myonevol, 'outputspec.func1vol', linear_reg,
                         'in_file')
    analysisflow.connect(inputspec, 'skull', linear_reg, 'reference')
    analysisflow.connect(linear_reg, 'out_matrix_file', bbreg_func_to_anat,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol', bbreg_func_to_anat,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', bbreg_arg_convert,
                         'bbreg_target')
    analysisflow.connect(bbreg_arg_convert, 'arg', bbreg_func_to_anat, 'args')
    analysisflow.connect(inputspec, 'skull', bbreg_func_to_anat, 'reference')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', convertmatrix,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func1,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func1, 'reference')
    analysisflow.connect(csf_bb_mask, 'out_file', reg_anatmask_to_func1,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func2,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func2, 'reference')
    analysisflow.connect(wm_bb_mask, 'out_file', reg_anatmask_to_func2,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func3,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func3, 'reference')
    analysisflow.connect(gm_bb_mask, 'out_file', reg_anatmask_to_func3,
                         'in_file')
    analysisflow.connect(convertmatrix, 'out_file', reg_anatmask_to_func4,
                         'in_matrix_file')
    analysisflow.connect(myonevol, 'outputspec.func1vol',
                         reg_anatmask_to_func4, 'reference')
    analysisflow.connect(vent_bb_mask, 'out_file', reg_anatmask_to_func4,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_wm_segmentation', wm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_csf_segmentation', csf_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_gm_segmentation', gm_bb_mask,
                         'in_file')
    analysisflow.connect(inputspec, 'anat_ventricle_segmentation',
                         vent_bb_mask, 'in_file')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', outputspec,
                         'func_sample2anat')
    analysisflow.connect(bbreg_func_to_anat, 'out_matrix_file', outputspec,
                         'func_to_anat_linear_xfm')
    analysisflow.connect(reg_anatmask_to_func1, 'out_file', outputspec,
                         'csf_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func2, 'out_file', outputspec,
                         'wm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func3, 'out_file', outputspec,
                         'gm_mask_in_funcspace')
    analysisflow.connect(reg_anatmask_to_func4, 'out_file', outputspec,
                         'ventricle_mask_in_funcspace')
    analysisflow.connect(myonevol, 'outputspec.func1vol', outputspec,
                         'example_func')
    analysisflow.connect(convertmatrix, 'out_file', outputspec,
                         'anat_to_func_linear_xfm')
    analysisflow.connect(bbreg_func_to_anat, 'out_file', ds, "func2anat")
    analysisflow.connect(bbreg_func_to_anat, 'out_file', myqc,
                         'inputspec.bg_image')
    analysisflow.connect(wm_bb_mask, 'out_file', myqc,
                         'inputspec.overlay_image')

    return analysisflow
def apply_all_corrections_using_ants(name='UnwarpArtifacts'):
    """
    Combines two lists of linear transforms with the deformation field
    map obtained epi_correction by Ants.
    Additionally, computes the corresponding bspline coefficients and
    the map of determinants of the jacobian.
    """
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_sdc_syb', 'in_hmc', 'in_ecc', 'in_dwi', 'in_t1']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_warp', 'out_coeff', 'out_jacobian']),
        name='outputnode')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')

    concat_hmc_ecc = pe.MapNode(fsl.ConvertXFM(), name="concat_hmc_ecc",
                                iterfield=['in_file', 'in_file2'])
    concat_hmc_ecc.inputs.concat_xfm = True

    warps = pe.MapNode(fsl.ConvertWarp(), iterfield=['premat'],
                       name='ConvertWarp')

    unwarp = pe.MapNode(interface=fsl.ApplyWarp(),
                        iterfield=['in_file', 'field_file'],
                        name='unwarp_warp')
    unwarp.inputs.interp = 'spline'

    coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'),
                        iterfield=['in_file'], name='CoeffComp')
    jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True),
                          iterfield=['in_file'], name='JacobianComp')
    jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'),
                         iterfield=['in_file', 'operand_files'],
                         name='ModulateDWIs')

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, concat_hmc_ecc, [('in_ecc', 'in_file2')]),  # noqa
        (inputnode, concat_hmc_ecc, [('in_hmc', 'in_file')]),  # noqa

        (concat_hmc_ecc, warps, [('out_file',            'premat')]),  # noqa
        (inputnode,      warps, [('in_sdc_syb',           'warp1')]),  # noqa
        (inputnode,      warps, [('in_t1',            'reference')]),  # noqa
        (inputnode,      split, [('in_dwi',             'in_file')]),  # noqa
        (warps,         unwarp, [('out_file',        'field_file')]),  # noqa
        (split,         unwarp, [('out_files',          'in_file')]),  # noqa
        (inputnode,     unwarp, [('in_t1',             'ref_file')]),  # noqa
        (inputnode,     coeffs, [('in_t1',            'reference')]),  # noqa
        (warps,         coeffs, [('out_file',           'in_file')]),  # noqa
        (inputnode,   jacobian, [('in_t1',            'reference')]),  # noqa
        (coeffs,      jacobian, [('out_file',           'in_file')]),  # noqa
        (unwarp,       jacmult, [('out_file',           'in_file')]),  # noqa
        (jacobian,     jacmult, [('out_jacobian', 'operand_files')]),  # noqa
        (jacmult,        thres, [('out_file',           'in_file')]),  # noqa
        (thres,          merge, [('out_file',          'in_files')]),  # noqa
        (warps,     outputnode, [('out_file',          'out_warp')]),  # noqa
        (coeffs,    outputnode, [('out_file',         'out_coeff')]),  # noqa
        (jacobian,  outputnode, [('out_jacobian',  'out_jacobian')]),  # noqa
        (merge,     outputnode, [('merged_file',        'out_file')])  # noqa
    ])

    return wf
Esempio n. 15
0
def create_register_func_to_mni(name='register_func_to_mni'):
    """
    Registers a functional scan in native space to MNI standard space.  This is meant to be used 
    after create_nonlinear_register() has been run and relies on some of it's outputs.

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    register_func_to_mni : nipype.pipeline.engine.Workflow

    Notes
    -----
    
    Workflow Inputs::

        inputspec.func : string (nifti file)
            Input functional scan to be registered to MNI space
        inputspec.mni : string (nifti file)
            Reference MNI file
        inputspec.anat : string (nifti file)
            Corresponding anatomical scan of subject
        inputspec.interp : string
            Type of interpolation to use ('trilinear' or 'nearestneighbour' or 'sinc')
        inputspec.anat_to_mni_nonlinear_xfm : string (warp file)
            Corresponding anatomical native space to MNI warp file
        inputspec.anat_to_mni_linear_xfm : string (mat file)
            Corresponding anatomical native space to MNI mat file
            
    Workflow Outputs::
    
        outputspec.func_to_anat_linear_xfm : string (mat file)
            Affine transformation from functional to anatomical native space
        outputspec.func_to_mni_linear_xfm : string (mat file)
            Affine transformation from functional to MNI space
        outputspec.mni_to_func_linear_xfm : string (mat file)
            Affine transformation from MNI to functional space
        outputspec.mni_func : string (nifti file)
            Functional scan registered to MNI standard space
            
    Workflow Graph:
    
    .. image:: ../images/register_func_to_mni.dot.png
        :width: 500
        
    Detailed Workflow Graph:
    
    .. image:: ../images/register_func_to_mni_detailed.dot.png
        :width: 500
    """
    register_func_to_mni = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'func', 'mni', 'anat', 'interp', 'anat_to_mni_nonlinear_xfm',
        'anat_to_mni_linear_xfm'
    ]),
                        name='inputspec')
    outputspec = pe.Node(util.IdentityInterface(fields=[
        'func_to_anat_linear_xfm', 'func_to_mni_linear_xfm',
        'mni_to_func_linear_xfm', 'mni_func'
    ]),
                         name='outputspec')

    linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_func_to_anat')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6

    mni_warp = pe.Node(interface=fsl.ApplyWarp(), name='mni_warp')

    mni_affine = pe.Node(interface=fsl.ConvertXFM(), name='mni_affine')
    mni_affine.inputs.concat_xfm = True
    register_func_to_mni.connect(linear_reg, 'out_matrix_file', mni_affine,
                                 'in_file2')
    register_func_to_mni.connect(inputspec, 'anat_to_mni_linear_xfm',
                                 mni_affine, 'in_file')
    register_func_to_mni.connect(mni_affine, 'out_file', outputspec,
                                 'func_to_mni_linear_xfm')

    inv_mni_affine = pe.Node(interface=fsl.ConvertXFM(), name='inv_mni_affine')
    inv_mni_affine.inputs.invert_xfm = True
    register_func_to_mni.connect(mni_affine, 'out_file', inv_mni_affine,
                                 'in_file')
    register_func_to_mni.connect(inv_mni_affine, 'out_file', outputspec,
                                 'mni_to_func_linear_xfm')

    register_func_to_mni.connect(inputspec, 'func', linear_reg, 'in_file')
    register_func_to_mni.connect(inputspec, 'anat', linear_reg, 'reference')
    register_func_to_mni.connect(inputspec, 'interp', linear_reg, 'interp')

    register_func_to_mni.connect(inputspec, 'func', mni_warp, 'in_file')
    register_func_to_mni.connect(inputspec, 'mni', mni_warp, 'ref_file')
    register_func_to_mni.connect(inputspec, 'anat_to_mni_nonlinear_xfm',
                                 mni_warp, 'field_file')

    register_func_to_mni.connect(linear_reg, 'out_matrix_file', mni_warp,
                                 'premat')

    register_func_to_mni.connect(linear_reg, 'out_matrix_file', outputspec,
                                 'func_to_anat_linear_xfm')
    register_func_to_mni.connect(mni_warp, 'out_file', outputspec, 'mni_func')

    return register_func_to_mni
def create_preprocessed_nki(wf_name='prepoc_nki'):
    preproc = pe.Workflow(name=wf_name)
    inputNode = pe.Node(util.Identity.interface(fields=[
        'dcm_dir', 'anat_file', 'slice_timings_file', 'ref_file', 'TR'
    ]),
                        name='inputspec')

    outputNode = pe.Node(util.Identity.interface(fields=[
        'nifti_file', 'anat_skullstripped', 'slice_time_corrected_file',
        'temporal_filtering_file', 'motion_correction_file', 'registered_file',
        'mat_file_reg', 'log_file_reg', 'out_trans_matrix', 'mean_func'
    ]),
                         name='outputspec')
    ############ converting the dicom files into nii files################

    dcm_to_nii = pe.Node(interface=dcm2nii.Dcmniix(), name='dcm_to_nii')
    dcm_to_nii.inputs.bids_format = True
    preproc.connect(inputNode, 'dcm_dir', dcm_to_nii, 'source_dir')
    preproc.connect(dcm_to_nii, 'converted_files', outputNode, 'nifti_file')

    #Let's start with skull stripping the anat files
    anat_skullstrip = pe.Node(interface=fsl.BET(), name='anat_skullstrip')
    anat_skullstrip.inputs.output_type = 'NIFTI_GZ'
    preproc.connect(inputNode, 'anat_file', anat_skullstrip, 'in_file')
    preproc.connect(anat_skullstrip, 'out_file', outputNode,
                    'anat_skullstripped')

    #Now we can get to preprocessing the func files
    slice_time_corr = pe.Node(interface=afni.SliceTimer(),
                              name='slice_time_corr')
    slice_time_corr.inputs.interleaved = True
    preproc.connect(dcm_to_nii, 'converted_files', slice_time_corr, 'in_file')
    preproc.connect(inputNode, 'slice_timings_file', slice_time_corr,
                    'slice_timing')
    preproc.connect(slice_time_corr, 'out_file', outputNode,
                    'slice_time_corrected_file')

    #motion correction
    motion_corr = pe.Node(interface=afni.Volreg(), name='motion_corr')
    preproc.connect(slice_time_corr, 'out_file', motion_corr, 'in_file')
    preproc.connect(motion_corr, 'out_file', outputNode,
                    'motion_corrected_file')

    #temporal filtering
    temp_corr = pe.Node(interface=fsl.maths.TemporalFilter(), name='temp_corr')
    #temp_corr.inputs.lowpass = 0.01
    if TR == 1400:
        temp_corr.inputs.highpass_sigma = 35
    elif TR == 645:
        temp_corr.inputs.highpass_sigma = 78
    else:
        temp_corr.inputs.highpass_sigma = 20
    preproc.connect(motion_corr, 'out_file', temp_corr, 'in_file')
    preproc.connect(temp_corr, 'out_file', outputNode,
                    'temporal_filtering_file')

    #co-registration using FLIRT (FSL's Linear Registration tool)
    flirt_reg = pe.Node(interface=fsl.FLIRT(), name='flirt_reg')
    flirt_reg.inputs.interp = 'nearestneighbour'
    flirt_reg.inputs.save_log = True

    preproc.connect(inputNode, 'ref_file', flirt_reg, 'reference')
    preproc.connect(temp_corr, 'out_file', flirt_reg, 'in_file')
    preproc.connect(flirt_reg, 'out_file', outputNode,
                    'linear_registered_file')
    preproc.connect(flirt_reg, 'out_matrix_file', outputNode, 'mat_file_reg')
    preproc.connect(flirt_reg, 'out_log', outputNode, 'log_file_reg')

    #invert xfm if non linear registration is required
    inv_flirt = pe.Node(interface=fsl.ConvertXFM(), name='inv_flirt')
    preproc.connect(flirt_reg, 'out_matrix_file', inv_flirt, 'in_file')
    inv_flirt.inputs.invert_xfm = True
    preproc.connect(inv_flirt, 'out_file', outputNode, 'output_trans_matrix')

    #construct mean functional
    mean_func = pe.Node(interface=fsl.maths.MeanImage(), name='mean_func')
    mean_func.inputs.dimension = 'T'
    preproc.connect(flirt_reg, 'out_file', mean_func, 'in_file')
    preproc.connect(mean_func, 'out_file', outputNode, 'mean_func')

    return preproc
Esempio n. 17
0
def create_coreg_pipeline(name='coreg'):

    # fsl output type
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # initiate workflow
    coreg = Workflow(name='coreg')

    #inputnode
    inputnode = Node(util.IdentityInterface(fields=[
        'epi_median',
        'fs_subjects_dir',
        'fs_subject_id',
        'uni_highres',
    ]),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'uni_lowres', 'epi2lowres', 'epi2lowres_mat', 'epi2lowres_dat',
        'highres2lowres', 'highres2lowres_mat', 'highres2lowres_dat',
        'epi2highres_lin', 'epi2highres_lin_mat', 'epi2highres_lin_itk'
    ]),
                      name='outputnode')

    # convert mgz head file for reference
    fs_import = Node(interface=nio.FreeSurferSource(), name='fs_import')

    brain_convert = Node(fs.MRIConvert(out_type='niigz',
                                       out_file='uni_lowres.nii.gz'),
                         name='brain_convert')

    coreg.connect([(inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'),
                                           ('fs_subject_id', 'subject_id')]),
                   (fs_import, brain_convert, [('brain', 'in_file')]),
                   (brain_convert, outputnode, [('out_file', 'uni_lowres')])])

    # linear registration epi median to lowres mp2rage with bbregister
    bbregister_epi = Node(fs.BBRegister(contrast_type='t2',
                                        out_fsl_file='epi2lowres.mat',
                                        out_reg_file='epi2lowres.dat',
                                        registered_file='epi2lowres.nii.gz',
                                        init='fsl',
                                        epi_mask=True),
                          name='bbregister_epi')

    coreg.connect([
        (inputnode, bbregister_epi, [('fs_subjects_dir', 'subjects_dir'),
                                     ('fs_subject_id', 'subject_id'),
                                     ('epi_median', 'source_file')]),
        (bbregister_epi, outputnode, [('out_fsl_file', 'epi2lowres_mat'),
                                      ('out_reg_file', 'epi2lowres_dat'),
                                      ('registered_file', 'epi2lowres')])
    ])

    # linear register highres mp2rage to lowres mp2rage
    bbregister_anat = Node(fs.BBRegister(
        contrast_type='t1',
        out_fsl_file='highres2lowres.mat',
        out_reg_file='highres2lowres.dat',
        registered_file='highres2lowres.nii.gz',
        init='fsl'),
                           name='bbregister_anat')

    coreg.connect([
        (inputnode, bbregister_anat, [('fs_subjects_dir', 'subjects_dir'),
                                      ('fs_subject_id', 'subject_id'),
                                      ('uni_highres', 'source_file')]),
        (bbregister_anat, outputnode, [('out_fsl_file', 'highres2lowres_mat'),
                                       ('out_reg_file', 'highres2lowres_dat'),
                                       ('registered_file', 'highres2lowres')])
    ])

    # invert highres2lowres transform
    invert = Node(fsl.ConvertXFM(invert_xfm=True), name='invert')
    coreg.connect([(bbregister_anat, invert, [('out_fsl_file', 'in_file')])])

    # concatenate epi2highres transforms
    concat = Node(fsl.ConvertXFM(concat_xfm=True,
                                 out_file='epi2highres_lin.mat'),
                  name='concat')
    coreg.connect([(bbregister_epi, concat, [('out_fsl_file', 'in_file')]),
                   (invert, concat, [('out_file', 'in_file2')]),
                   (concat, outputnode, [('out_file', 'epi2higres_lin_mat')])])

    # convert epi2highres transform into itk format
    itk = Node(interface=c3.C3dAffineTool(fsl2ras=True,
                                          itk_transform='epi2highres_lin.txt'),
               name='itk')

    coreg.connect([(inputnode, itk, [('epi_median', 'source_file'),
                                     ('uni_highres', 'reference_file')]),
                   (concat, itk, [('out_file', 'transform_file')]),
                   (itk, outputnode, [('itk_transform', 'epi2highres_lin_itk')
                                      ])])

    # transform epi to highres
    epi2highres = Node(ants.ApplyTransforms(
        dimension=3,
        output_image='epi2highres_lin.nii.gz',
        interpolation='BSpline',
    ),
                       name='epi2highres')

    coreg.connect([
        (inputnode, epi2highres, [('uni_highres', 'reference_image'),
                                  ('epi_median', 'input_image')]),
        (itk, epi2highres, [('itk_transform', 'transforms')]),
        (epi2highres, outputnode, [('output_image', 'epi2highres_lin')])
    ])

    return coreg
Esempio n. 18
0
fsl2scheme.inputs.usegradmod = True
"""
FSL's Brain Extraction tool is used to create a mask from the b0 image
"""

b0Strip = pe.Node(interface=fsl.BET(mask=True), name='bet_b0')
"""
FSL's FLIRT function is used to coregister the b0 mask and the structural image.
A convert_xfm node is then used to obtain the inverse of the transformation matrix.
FLIRT is used once again to apply the inverse transformation to the parcellated brain image.
"""

coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister')
coregister.inputs.cost = ('corratio')

convertxfm = pe.Node(interface=fsl.ConvertXFM(), name='convertxfm')
convertxfm.inputs.invert_xfm = True

inverse = pe.Node(interface=fsl.FLIRT(), name='inverse')
inverse.inputs.interp = ('nearestneighbour')

inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name='inverse_AparcAseg')
inverse_AparcAseg.inputs.interp = ('nearestneighbour')
"""
A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject.
Nodes are used to convert the following:

    * Original structural image to NIFTI
    * Parcellated white matter image to NIFTI
    * Parcellated whole-brain image to NIFTI
    * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres
Esempio n. 19
0
def create_workflow():
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            'funcs',
            'subject_id',
            'session_id',
            'fwhm',  # smoothing
            'highpass'
        ]),
        name="inputspec")

    # SelectFiles
    templates = {
        'ref_manual_fmapmask':  # was: manual_fmapmask
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_manualmask.nii.gz',

        'ref_fmap_magnitude':
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_reference.nii.gz',

        'ref_fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        # 'manualweights':
        # 'manual-masks/sub-eddy/ses-20170511/func/'
        #     'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
        #     '_res-1x1x1_manualweights.nii.gz',

        'ref_func':  # was: manualmask_func_ref
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_reference.nii.gz',

        'ref_funcmask':  # was: manualmask
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_mask.nii.gz',

        'ref_t1':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_reference.nii.gz',

        'ref_t1mask':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_manualmask.nii.gz',

        # 'funcs':
        # 'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        #     # 'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
        #     'sub-{subject_id}_ses-{session_id}*run-01_bold_res-1x1x1_preproc'
        #     # '.nii.gz',
        #     '_nvol10.nii.gz',

        'fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        'fmap_magnitude':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
            '.nii.gz',

        # 'fmap_mask':
        # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        #     'sub-{subject_id}_ses-{session_id}_'
        #     'magnitude1_res-1x1x1_preproc.nii.gz',
    }

    inputfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                         name="input_files")

    featpreproc.connect([(inputnode, inputfiles, [
        ('subject_id', 'subject_id'),
        ('session_id', 'session_id'),
    ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # ------------------ Output Files
    # Datasink
    outputfiles = pe.Node(nio.DataSink(base_directory=ds_root,
                                       container='derivatives/featpreproc',
                                       parameterization=True),
                          name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'motion_parameters',
        'motion_corrected',
        'motion_plots',
        'motion_outlier_files',
        'mask',
        'smoothed_files',
        'highpassed_files',
        'mean',
        'func_unwarp',
        'ref_func',
        'ref_funcmask',
        'ref_t1',
        'ref_t1mask',
    ]),
                         name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect([(inputfiles, transmanmask_mc, [
        ('subject_id', 'in.subject_id'),
        ('session_id', 'in.session_id'),
    ])])

    featpreproc.connect(inputfiles, 'ref_funcmask', transmanmask_mc,
                        'in.manualmask')
    featpreproc.connect(inputnode, 'funcs', transmanmask_mc, 'in.funcs')
    featpreproc.connect(inputfiles, 'ref_func', transmanmask_mc,
                        'in.manualmask_func_ref')

    # fieldmaps not being used
    if False:
        trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
        featpreproc.connect(inputfiles, 'ref_manual_fmapmask', trans_fmapmask,
                            'in.manualmask')
        featpreproc.connect(inputfiles, 'fmap_magnitude', trans_fmapmask,
                            'in.funcs')
        featpreproc.connect(inputfiles, 'ref_func', trans_fmapmask,
                            'in.manualmask_func_ref')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------
    # mc = motioncorrection_workflow.create_workflow_afni()

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='premotioncorrection')

    featpreproc.connect([
        (inputnode, median_func, [
            ('funcs', 'in_file'),
        ]),
        (median_func, pre_mc, [
            ('out_file', 'in.funcs'),
        ]),
        (
            inputfiles,
            pre_mc,
            [
                # median func image will be used a reference / base
                ('ref_func', 'in.ref_func'),
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            pre_mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
            ]),
        (pre_mc, outputnode, [
            ('mc.out_file', 'pre_motion_corrected'),
            ('mc.oned_file', 'pre_motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
                ('pre_motion_parameters.oned_file',
                 'pre_motion_corrected.oned_file'
                 ),  # warp parameters in ASCII (.1D)
                ('pre_motion_parameters.oned_matrix_save',
                 'pre_motion_corrected.oned_matrix_save'
                 ),  # transformation matrices for each sub-brick
            ]),
    ])

    mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='motioncorrection',
        iterfield=('in_file', 'ref_file', 'in_weight_file'))
    # - - - - - - Connections - - - - - - -
    featpreproc.connect([
        (inputnode, mc, [
            ('funcs', 'in.funcs'),
        ]),
        (
            pre_mc,
            mc,
            [
                # the median image realigned to the reference functional will serve as reference
                #  this way motion correction is done to an image more similar to the functionals
                ('mc.out_file', 'in.ref_func'),
            ]),
        (
            inputfiles,
            mc,
            [
                # Check and make sure the ref func mask is close enough to the registered median
                # image.
                ('ref_funcmask', 'in.ref_func_weights'),
            ]),
        (
            transmanmask_mc,
            mc,
            [
                ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
            ]),
        (mc, outputnode, [
            ('mc.out_file', 'motion_corrected'),
            ('mc.oned_file', 'motion_parameters.oned_file'),
            ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
        ]),
        (
            outputnode,
            outputfiles,
            [
                ('motion_corrected', 'motion_corrected.out_file'),
                ('motion_parameters.oned_file', 'motion_corrected.oned_file'
                 ),  # warp parameters in ASCII (.1D)
                ('motion_parameters.oned_matrix_save',
                 'motion_corrected.oned_matrix_save'
                 ),  # transformation matrices for each sub-brick
            ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # so b0_unwarp is currently not needed
    if False:
        b0_unwarp = undistort_workflow.create_workflow()

        featpreproc.connect([
            (
                inputfiles,
                b0_unwarp,
                [  # ('subject_id', 'in.subject_id'),
                    # ('session_id', 'in.session_id'),
                    ('fmap_phasediff', 'in.fmap_phasediff'),
                    ('fmap_magnitude', 'in.fmap_magnitude'),
                ]),
            (mc, b0_unwarp, [
                ('mc.out_file', 'in.funcs'),
            ]),
            (transmanmask_mc, b0_unwarp, [
                ('funcreg.out_file', 'in.funcmasks'),
            ]),
            (trans_fmapmask, b0_unwarp, [('funcreg.out_file', 'in.fmap_mask')
                                         ]),
            (b0_unwarp, outputfiles, [
                ('out.funcs', 'func_unwarp.funcs'),
                ('out.funcmasks', 'func_unwarp.funcmasks'),
            ]),
            (b0_unwarp, outputnode, [
                ('out.funcs', 'func_unwarp.funcs'),
                ('out.funcmasks', 'mask'),
            ]),
        ])

    # undistort the reference images
    if False:
        b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
        featpreproc.connect([
            (
                inputfiles,
                b0_unwarp_ref,
                [  # ('subject_id', 'in.subject_id'),
                    # ('session_id', 'in.session_id'),
                    ('ref_fmap_phasediff', 'in.fmap_phasediff'),
                    ('ref_fmap_magnitude', 'in.fmap_magnitude'),
                    ('ref_manual_fmapmask', 'in.fmap_mask'),
                    ('ref_func', 'in.funcs'),
                    ('ref_funcmask', 'in.funcmasks'),
                ]),
            (b0_unwarp_ref, outputfiles, [
                ('out.funcs', 'func_unwarp_ref.func'),
                ('out.funcmasks', 'func_unwarp_ref.funcmask'),
            ]),
            (b0_unwarp_ref, outputnode, [
                ('out.funcs', 'ref_func'),
                ('out.funcmasks', 'ref_mask'),
            ]),
        ])
    else:
        featpreproc.connect([
            (inputfiles, outputfiles, [
                ('ref_func', 'reference/func'),
                ('ref_funcmask', 'reference/func_mask'),
            ]),
            (inputfiles, outputnode, [
                ('ref_func', 'ref_func'),
                ('ref_funcmask', 'ref_funcmask'),
            ]),
        ])

    # |~) _  _ . __|_ _  _  _|_ _   |~) _  |` _  _ _  _  _ _
    # |~\(/_(_||_\ | (/_|    | (_)  |~\(/_~|~(/_| (/_| |(_(/_
    #        _|
    # Register all functionals to common reference
    # --------------------------------------------------------
    if False:  # this is now done during motion correction
        # FLIRT cost: intermodal: corratio, intramodal: least squares and normcorr
        reg_to_ref = pe.MapNode(  # intra-modal
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='normcorr'),
            name='reg_to_ref',
            iterfield=('in_file', 'in_weight'),
        )
        refEPI_to_refT1 = pe.Node(
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='corratio'),
            name='refEPI_to_refT1',
        )
        # combine func -> ref_func and ref_func -> ref_T1
        reg_to_refT1 = pe.MapNode(
            interface=fsl.ConvertXFM(concat_xfm=True),
            name='reg_to_refT1',
            iterfield=('in_file'),
        )

        reg_funcs = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcs',
            iterfield=('in_file', 'in_matrix_file'),
        )
        reg_funcmasks = pe.MapNode(interface=fsl.preprocess.ApplyXFM(),
                                   name='reg_funcmasks',
                                   iterfield=('in_file', 'in_matrix_file'))

        def deref_list(x):
            assert len(x) == 1
            return x[0]

        featpreproc.connect([
            (
                b0_unwarp,
                reg_to_ref,  # --> reg_to_ref, (A)
                [
                    ('out.funcs', 'in_file'),
                    ('out.funcmasks', 'in_weight'),
                ]),
            (b0_unwarp_ref, reg_to_ref, [
                (('out.funcs', deref_list), 'reference'),
                (('out.funcmasks', deref_list), 'ref_weight'),
            ]),
            (
                b0_unwarp_ref,
                refEPI_to_refT1,  # --> refEPI_to_refT1 (B)
                [
                    (('out.funcs', deref_list), 'in_file'),
                    (('out.funcmasks', deref_list), 'in_weight'),
                ]),
            (inputfiles, refEPI_to_refT1, [
                ('ref_t1', 'reference'),
                ('ref_t1mask', 'ref_weight'),
            ]),
            (
                reg_to_ref,
                reg_to_refT1,  # --> reg_to_refT1 (A*B)
                [
                    ('out_matrix_file', 'in_file'),
                ]),
            (refEPI_to_refT1, reg_to_refT1, [
                ('out_matrix_file', 'in_file2'),
            ]),
            (
                reg_to_refT1,
                reg_funcs,  # --> reg_funcs
                [
                    # ('out_matrix_file', 'in_matrix_file'),
                    ('out_file', 'in_matrix_file'),
                ]),
            (b0_unwarp, reg_funcs, [
                ('out.funcs', 'in_file'),
            ]),
            (b0_unwarp_ref, reg_funcs, [
                (('out.funcs', deref_list), 'reference'),
            ]),
            (
                reg_to_refT1,
                reg_funcmasks,  # --> reg_funcmasks
                [
                    # ('out_matrix_file', 'in_matrix_file'),
                    ('out_file', 'in_matrix_file'),
                ]),
            (b0_unwarp, reg_funcmasks, [
                ('out.funcmasks', 'in_file'),
            ]),
            (b0_unwarp_ref, reg_funcmasks, [
                (('out.funcs', deref_list), 'reference'),
            ]),
            (reg_funcs, outputfiles, [
                ('out_file', 'common_ref.func'),
            ]),
            (reg_funcmasks, outputfiles, [
                ('out_file', 'common_ref.funcmask'),
            ]),
        ])

    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    """
    Dilate the mask
    """
    if False:
        dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                         op_string='-dilF'),
                                iterfield=['in_file'],
                                name='dilatemask')
        featpreproc.connect(reg_funcmasks, 'out_file', dilatemask, 'in_file')
    else:
        dilatemask = pe.Node(interface=fsl.ImageMaths(suffix='_dil',
                                                      op_string='-dilF'),
                             name='dilatemask')
        featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(fsl.BinaryMaths(operation='mul'),
                            iterfield=('in_file', 'operand_file'),
                            name='funcbrains')

    featpreproc.connect([
        (mc, funcbrains, [
            ('mc.out_file', 'in_file'),
        ]),
        (dilatemask, funcbrains, [
            ('out_file', 'operand_file'),
        ]),
        (funcbrains, outputfiles, [
            ('out_file', 'funcbrains'),
        ]),
    ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            # trying to "disable" `norm_threshold`:
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            # use_norm=Undefined,
            # translation_threshold=1.0,  # translation in mm
            # rotation_threshold=0.02,  # rotation in radians
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (
            mc,
            outliers,
            [  # ('mc.par_file', 'realignment_parameters'),
                ('mc.oned_file', 'realignment_parameters'),
            ]),
        (funcbrains, outliers, [
            ('out_file', 'realigned_files'),
        ]),
        (dilatemask, outliers, [
            ('out_file', 'mask_file'),
        ]),
        (
            outliers,
            outputfiles,
            [
                ('outlier_files', 'motion_outliers.@outlier_files'),
                ('plot_files', 'motion_outliers.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ]),
        (mc, outputnode, [
            ('mc.oned_file', 'motion_parameters'),
        ]),
        (
            outliers,
            outputnode,
            [
                ('outlier_files', 'motion_outlier_files'),
                ('plot_files', 'motion_plots.@plot_files'),
                ('displacement_files', 'motion_outliers.@displacement_files'),
                ('intensity_files', 'motion_outliers.@intensity_files'),
                ('mask_files', 'motion_outliers.@mask_files'),
                ('statistic_files', 'motion_outliers.@statistic_files'),
                # ('norm_files', 'outliers.@norm_files'),
            ])
    ])
    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')
    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')
    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold,
                        'op_string')
    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')

    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)
    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    # featpreproc.connect(b0_unwarp, 'out.funcs', smooth, 'inputnode.in_files')
    if False:
        featpreproc.connect(reg_funcs, 'out_file', smooth,
                            'inputnode.in_files')
    else:
        featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py
    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3,
                        'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2), name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    if False:
        featpreproc.connect(b0_unwarp, ('out.funcs', tolist), concatnode,
                            'in1')
    else:
        featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')
    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                        'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(inputnode, ('highpass', highpass_operand), highpass,
                        'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(highpass, 'out_file', outputnode,
                            'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(addmean, 'out_file', outputnode,
                            'highpassed_files')
    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles,
                        'highpassed_files')

    return (featpreproc)
Esempio n. 20
0
def make_fmap_wkfl(name='make_fieldmap', mapnode=False):

    klass = pe.Node
    if mapnode:
        klass = pe.MapNode

    inputnode = pe.Node(utility.IdentityInterface(
        fields=['fieldmap', 'magnitude', 't1_mask', 't1_mag', 'delta_TE']),
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(fields=[
        'fieldmap', 'fieldmap_reg', 'fieldmap_magnitude', 'fieldmap_mask'
    ]),
                         name='outputspec')

    n_fieldmap2t1_warp = klass(
        fsl.FLIRT(
            out_matrix_file='fieldmap2t1.mat',
            cost='normmi',
            dof=6,
            searchr_x=[-5, 5],  # restrict search as they are acquired 
            searchr_y=[-5, 5],  # in the same sequence
            searchr_z=[-5, 5],
            cost_func='normmi'),
        iterfield=['in_file'],
        name='fieldmap2t1_warp')
    n_invert_fieldmap2t1_warp = klass(fsl.ConvertXFM(invert_xfm=True),
                                      iterfield=['in_file'],
                                      name='invert_fieldmap2t1_warp')
    n_warp_t1_mask = klass(fsl.ApplyXfm(apply_xfm=True,
                                        interp='nearestneighbour',
                                        datatype='char'),
                           iterfield=['in_matrix_file', 'reference'],
                           name='warp_t1_mask')

    n_mask_mag = klass(fsl.ImageMaths(op_string='-mul',
                                      suffix='_brain',
                                      output_type='NIFTI'),
                       iterfield=['in_file', 'in_file2'],
                       name='mask_mag')

    n_make_fieldmap = klass(
        fsl.FUGUE(fmap_out_file='fieldmap.nii.gz', smooth3d=2),
        iterfield=['fmap_out_file', 'mask_file', 'fmap_in_file'],
        name='make_fieldmap')

    w = pe.Workflow(name=name)
    w.connect([
        (inputnode, n_fieldmap2t1_warp, [('t1_mag', 'reference'),
                                         ('magnitude', 'in_file')]),
        (n_fieldmap2t1_warp, n_invert_fieldmap2t1_warp, [('out_matrix_file',
                                                          'in_file')]),
        (n_invert_fieldmap2t1_warp, n_warp_t1_mask, [('out_file',
                                                      'in_matrix_file')]),
        (inputnode, n_warp_t1_mask, [('t1_mask', 'in_file')]),
        (inputnode, n_warp_t1_mask, [('magnitude', 'reference')]),
        (inputnode, n_mask_mag, [('magnitude', 'in_file')]),
        (n_warp_t1_mask, n_mask_mag, [('out_file', 'in_file2')]),
        (inputnode, n_make_fieldmap, [(('fieldmap', fname_presuffix_basename,
                                        '', '_reg', './'), 'fmap_out_file')]),
        (n_warp_t1_mask, n_make_fieldmap, [('out_file', 'mask_file')]),
        (inputnode, n_make_fieldmap, [('fieldmap', 'fmap_in_file')]),
        (inputnode, n_make_fieldmap, [('delta_TE', 'asym_se_time')]),
        (n_warp_t1_mask, outputnode, [('out_file', 'fieldmap_mask')]),
        (n_mask_mag, outputnode, [('out_file', 'fieldmap_magnitude')]),
        (n_make_fieldmap, outputnode, [('fmap_out_file', 'fieldmap')]),
        (n_make_fieldmap, outputnode, [('fmap_out_file', 'fieldmap_reg')]),
    ])

    return w
Esempio n. 21
0
def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
    """
    UNet
    options (following numbers are default):
    input_slice: 3
    conv_block: 5
    kernel_root: 16
    rescale_dim: 256
    """

    unet_mask = pe.Node(util.Function(input_names=['model_path', 'cimg_in'],
                                      output_names=['out_path'],
                                      function=predict_volumes),
                        name=f'unet_mask_{pipe_num}')

    node, out = strat_pool.get_data('unet_model')
    wf.connect(node, out, unet_mask, 'model_path')

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, unet_mask, 'cimg_in')
    """
    Revised mask with ANTs
    """
    # fslmaths <whole head> -mul <mask> brain.nii.gz
    unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                name=f'unet_masked_brain_{pipe_num}')
    unet_masked_brain.inputs.op_string = "-mul %s"

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, unet_masked_brain, 'in_file')
    wf.connect(unet_mask, 'out_path', unet_masked_brain, 'operand_files')

    # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
    # TODO: antsRegistration -z 0 -d 3 -r [NMT_SS_0.5mm.nii.gz,brain.nii.gz,0] -o [transform,brain_rot2atl.nii.gz,brain_inv_rot2atl.nii.gz] -t Rigid[0.1] -m MI[NMT_SS_0.5mm.nii.gz,brain.nii.gz,1,32,Regular,0.25] -c [1000x500x250x100,1e-08,10] -s 3.0x2.0x1.0x0.0 -f 8x4x2x1 -u 1 -t Affine[0.1] -m MI[NMT_SS_0.5mm.nii.gz,brain.nii.gz,1,32,Regular,0.25] -c [1000x500x250x100,1e-08,10] -s 3.0x2.0x1.0x0.0 -f 8x4x2x1 -u 1
    native_brain_to_template_brain = pe.Node(interface=fsl.FLIRT(),
                                             name=f'native_brain_to_template_'
                                             f'brain_{pipe_num}')
    native_brain_to_template_brain.inputs.dof = 6
    native_brain_to_template_brain.inputs.interp = 'sinc'
    wf.connect(unet_masked_brain, 'out_file', native_brain_to_template_brain,
               'in_file')

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, native_brain_to_template_brain, 'reference')

    # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
    # TODO: antsApplyTransforms -d 3 -i head.nii.gz -r NMT_0.5mm.nii.gz -n Linear -o head_rot2atl.nii.gz -v -t transform1Rigid.mat -t transform2Affine.mat -t transform0DerivedInitialMovingTranslation.mat
    native_head_to_template_head = pe.Node(interface=fsl.FLIRT(),
                                           name=f'native_head_to_template_'
                                           f'head_{pipe_num}')
    native_head_to_template_head.inputs.apply_xfm = True

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, native_head_to_template_head, 'in_file')

    wf.connect(native_brain_to_template_brain, 'out_matrix_file',
               native_head_to_template_head, 'in_matrix_file')

    node, out = strat_pool.get_data('T1w_template')
    wf.connect(node, out, native_head_to_template_head, 'reference')

    # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
    template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                  name=f'template_brain_mask_{pipe_num}')
    template_brain_mask.inputs.args = '-bin'

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, template_brain_mask, 'in_file')

    # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
    ants_template_head_to_template = pe.Node(interface=ants.Registration(),
                                             name=f'template_head_to_'
                                             f'template_{pipe_num}')
    ants_template_head_to_template.inputs.metric = ['CC']
    ants_template_head_to_template.inputs.metric_weight = [1, 5]
    ants_template_head_to_template.inputs.transforms = ['SyN']
    ants_template_head_to_template.inputs.transform_parameters = [(0.25, )]
    ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
    ants_template_head_to_template.inputs.number_of_iterations = [[60, 50, 20]]
    ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]]
    ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
    ants_template_head_to_template.inputs.convergence_threshold = [1.e-8]
    wf.connect(native_head_to_template_head, 'out_file',
               ants_template_head_to_template, 'fixed_image')

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, ants_template_head_to_template, 'moving_image')

    # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz

    template_head_transform_to_template = pe.Node(
        interface=ants.ApplyTransforms(),
        name=f'template_head_transform_to_template_{pipe_num}')
    template_head_transform_to_template.inputs.dimension = 3

    wf.connect(template_brain_mask, 'out_file',
               template_head_transform_to_template, 'input_image')
    wf.connect(native_brain_to_template_brain, 'out_file',
               template_head_transform_to_template, 'reference_image')
    wf.connect(ants_template_head_to_template, 'forward_transforms',
               template_head_transform_to_template, 'transforms')

    # TODO: replace convert_xfm and flirt with:
    # antsApplyTransforms -d 3 -i brain_rot2atl_mask.nii.gz -r brain.nii.gz -n linear -o brain_mask.nii.gz -t [transform0DerivedInitialMovingTranslation.mat,1] -t [transform2Affine.mat,1] -t [transform1Rigid.mat,1]
    # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
    invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
    invt.inputs.invert_xfm = True
    wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt,
               'in_file')

    # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
    template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(),
                                             name=f'template_brain_to_native_'
                                             f'brain_{pipe_num}')
    template_brain_to_native_brain.inputs.apply_xfm = True
    wf.connect(template_head_transform_to_template, 'output_image',
               template_brain_to_native_brain, 'in_file')
    wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain,
               'reference')
    wf.connect(invt, 'out_file', template_brain_to_native_brain,
               'in_matrix_file')

    # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
    refined_mask = pe.Node(interface=fsl.Threshold(),
                           name=f'refined_mask'
                           f'_{pipe_num}')
    refined_mask.inputs.thresh = 0.5
    refined_mask.inputs.args = '-bin'
    wf.connect(template_brain_to_native_brain, 'out_file', refined_mask,
               'in_file')

    outputs = {'space-T1w_desc-brain_mask': (refined_mask, 'out_file')}

    return (wf, outputs)
def coreg_without_resample(name="highres_coreg"):
    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=["fixed_image", "moving_image", "interp"]),
                        name="inputnode")

    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "out_file", "lowres_matrix_file", "highres_matrix_file",
        "resampled_fixed_image"
    ]),
                         name="outputnode")
    coregister_moving_to_fixed = pe.Node(interface=fsl.FLIRT(dof=12),
                                         name='coregister_moving_to_fixed')
    resample_fixed_to_moving = pe.Node(interface=fs.MRIConvert(),
                                       name='resample_fixed_to_moving')

    rewrite_mat_interface = util.Function(
        input_names=[
            "in_matrix", "orig_img", "target_img", "shape", "vox_size"
        ],
        output_names=["out_image", "out_matrix_file"],
        function=rewrite_mat_for_applyxfm)
    fix_FOV_in_matrix = pe.Node(interface=rewrite_mat_interface,
                                name='fix_FOV_in_matrix')

    apply_fixed_matrix = pe.Node(interface=fsl.ApplyXfm(),
                                 name='apply_fixed_matrix')

    final_rigid_reg_to_fixed = pe.Node(interface=fsl.FLIRT(dof=6),
                                       name='final_rigid_reg_to_fixed')

    create_highres_xfm = pe.Node(interface=fsl.ConvertXFM(),
                                 name='create_highres_xfm')
    create_highres_xfm.inputs.concat_xfm = True

    workflow = pe.Workflow(name=name)

    workflow.connect([(inputnode, coregister_moving_to_fixed, [("moving_image",
                                                                "in_file")])])
    workflow.connect([(inputnode, coregister_moving_to_fixed,
                       [("fixed_image", "reference")])])
    workflow.connect([(coregister_moving_to_fixed, fix_FOV_in_matrix,
                       [("out_matrix_file", "in_matrix")])])
    workflow.connect([(inputnode, fix_FOV_in_matrix, [("moving_image",
                                                       "orig_img")])])
    workflow.connect([(inputnode, fix_FOV_in_matrix, [("fixed_image",
                                                       "target_img")])])

    workflow.connect([(inputnode, apply_fixed_matrix, [("moving_image",
                                                        "in_file")])])
    workflow.connect([(inputnode, apply_fixed_matrix, [("interp", "interp")])])
    workflow.connect([(fix_FOV_in_matrix, apply_fixed_matrix,
                       [("out_matrix_file", "in_matrix_file")])])
    workflow.connect([(fix_FOV_in_matrix, apply_fixed_matrix,
                       [("out_image", "reference")])])

    workflow.connect([(inputnode, resample_fixed_to_moving, [('fixed_image',
                                                              'in_file')])])
    workflow.connect([(inputnode, resample_fixed_to_moving,
                       [('moving_image', 'reslice_like')])])
    workflow.connect([(resample_fixed_to_moving, final_rigid_reg_to_fixed,
                       [('out_file', 'reference')])])
    workflow.connect([(apply_fixed_matrix, final_rigid_reg_to_fixed,
                       [('out_file', 'in_file')])])
    workflow.connect([(inputnode, final_rigid_reg_to_fixed, [('interp',
                                                              'interp')])])

    workflow.connect([(final_rigid_reg_to_fixed, create_highres_xfm,
                       [('out_matrix_file', 'in_file2')])])
    workflow.connect([(fix_FOV_in_matrix, create_highres_xfm,
                       [('out_matrix_file', 'in_file')])])

    workflow.connect([(coregister_moving_to_fixed, outputnode,
                       [('out_matrix_file', 'lowres_matrix_file')])])
    workflow.connect([(create_highres_xfm, outputnode,
                       [('out_file', 'highres_matrix_file')])])

    workflow.connect([(resample_fixed_to_moving, outputnode,
                       [('out_file', 'resampled_fixed_image')])])
    workflow.connect([(final_rigid_reg_to_fixed, outputnode, [('out_file',
                                                               'out_file')])])
    return workflow
Esempio n. 23
0
def define_preproc_workflow(info, subjects, sessions, qc=True):

    # --- Workflow parameterization and data input

    scan_info = info.scan_info
    experiment = info.experiment_name

    iterables = generate_iterables(scan_info, experiment, subjects, sessions)
    subject_iterables, session_iterables, run_iterables = iterables

    subject_iterables = subjects

    subject_source = Node(IdentityInterface(["subject"]),
                          name="subject_source",
                          iterables=("subject", subject_iterables))

    session_source = Node(IdentityInterface(["subject", "session"]),
                          name="session_source",
                          itersource=("subject_source", "subject"),
                          iterables=("session", session_iterables))

    run_source = Node(IdentityInterface(["subject", "session", "run"]),
                      name="run_source",
                      itersource=("session_source", "session"),
                      iterables=("run", run_iterables))

    session_input = Node(SessionInput(data_dir=info.data_dir,
                                      proc_dir=info.proc_dir,
                                      fm_template=info.fm_template,
                                      phase_encoding=info.phase_encoding),
                         "session_input")

    run_input = Node(RunInput(experiment=experiment,
                              data_dir=info.data_dir,
                              proc_dir=info.proc_dir,
                              sb_template=info.sb_template,
                              ts_template=info.ts_template,
                              crop_frames=info.crop_frames),
                     name="run_input")

    # --- Warpfield estimation using topup

    # Distortion warpfield estimation
    #  TODO figure out how to parameterize for testing
    # topup_config = op.realpath(op.join(__file__, "../../../topup_fast.cnf"))
    topup_config = "b02b0.cnf"
    estimate_distortions = Node(fsl.TOPUP(config=topup_config),
                                "estimate_distortions")

    # Post-process the TOPUP outputs
    finalize_unwarping = Node(FinalizeUnwarping(), "finalize_unwarping")

    # --- Registration of SE-EPI (without distortions) to Freesurfer anatomy

    fm2anat = Node(fs.BBRegister(init="fsl",
                                 contrast_type="t2",
                                 registered_file=True,
                                 out_fsl_file="sess2anat.mat",
                                 out_reg_file="sess2anat.dat"),
                   "fm2anat")

    fm2anat_qc = Node(AnatRegReport(data_dir=info.data_dir), "fm2anat_qc")

    # --- Registration of SBRef to SE-EPI (with distortions)

    sb2fm = Node(fsl.FLIRT(dof=6, interp="spline"), "sb2fm")

    sb2fm_qc = Node(CoregGIF(out_file="coreg.gif"), "sb2fm_qc")

    # --- Motion correction of time series to SBRef (with distortions)

    ts2sb = Node(fsl.MCFLIRT(save_mats=True, save_plots=True),
                 "ts2sb")

    ts2sb_qc = Node(RealignmentReport(), "ts2sb_qc")

    # --- Combined motion correction, unwarping, and template registration

    # Combine pre-and post-warp linear transforms
    combine_premats = MapNode(fsl.ConvertXFM(concat_xfm=True),
                              "in_file", "combine_premats")

    combine_postmats = Node(fsl.ConvertXFM(concat_xfm=True),
                            "combine_postmats")

    # Transform Jacobian images into the template space
    transform_jacobian = Node(fsl.ApplyWarp(relwarp=True),
                              "transform_jacobian")

    # Apply rigid transforms and nonlinear warpfield to time series frames
    restore_timeseries = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                                 ["in_file", "premat"],
                                 "restore_timeseries")

    # Apply rigid transforms and nonlinear warpfield to template frames
    restore_template = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                               ["in_file", "premat", "field_file"],
                               "restore_template")

    # Perform final preprocessing operations on timeseries
    finalize_timeseries = Node(FinalizeTimeseries(experiment=experiment),
                               "finalize_timeseries")

    # Perform final preprocessing operations on template
    finalize_template = JoinNode(FinalizeTemplate(experiment=experiment),
                                 name="finalize_template",
                                 joinsource="run_source",
                                 joinfield=["mean_files", "tsnr_files",
                                            "mask_files", "noise_files"])

    # --- Workflow ouptut

    save_info = Node(SaveInfo(info_dict=info.trait_get()), "save_info")

    template_output = Node(DataSink(base_directory=info.proc_dir,
                                    parameterization=False),
                           "template_output")

    timeseries_output = Node(DataSink(base_directory=info.proc_dir,
                                      parameterization=False),
                             "timeseries_output")

    # === Assemble pipeline

    cache_base = op.join(info.cache_dir, info.experiment_name)
    workflow = Workflow(name="preproc", base_dir=cache_base)

    # Connect processing nodes

    processing_edges = [

        (subject_source, session_source,
            [("subject", "subject")]),
        (subject_source, run_source,
            [("subject", "subject")]),
        (session_source, run_source,
            [("session", "session")]),
        (session_source, session_input,
            [("session", "session")]),
        (run_source, run_input,
            [("run", "run")]),

        # Phase-encode distortion estimation

        (session_input, estimate_distortions,
            [("fm_file", "in_file"),
             ("phase_encoding", "encoding_direction"),
             ("readout_times", "readout_times")]),
        (session_input, finalize_unwarping,
            [("fm_file", "raw_file"),
             ("phase_encoding", "phase_encoding")]),
        (estimate_distortions, finalize_unwarping,
            [("out_corrected", "corrected_file"),
             ("out_warps", "warp_files"),
             ("out_jacs", "jacobian_files")]),

        # Registration of corrected SE-EPI to anatomy

        (session_input, fm2anat,
            [("subject", "subject_id")]),
        (finalize_unwarping, fm2anat,
            [("corrected_file", "source_file")]),

        # Registration of each frame to SBRef image

        (run_input, ts2sb,
            [("ts_file", "in_file"),
             ("sb_file", "ref_file")]),
        (ts2sb, finalize_timeseries,
            [("par_file", "mc_file")]),

        # Registration of SBRef volume to SE-EPI fieldmap

        (run_input, sb2fm,
            [("sb_file", "in_file")]),
        (finalize_unwarping, sb2fm,
            [("raw_file", "reference"),
             ("mask_file", "ref_weight")]),

        # Single-interpolation spatial realignment and unwarping

        (ts2sb, combine_premats,
            [("mat_file", "in_file")]),
        (sb2fm, combine_premats,
            [("out_matrix_file", "in_file2")]),
        (fm2anat, combine_postmats,
            [("out_fsl_file", "in_file")]),
        (session_input, combine_postmats,
            [("reg_file", "in_file2")]),

        (run_input, transform_jacobian,
            [("anat_file", "ref_file")]),
        (finalize_unwarping, transform_jacobian,
            [("jacobian_file", "in_file")]),
        (combine_postmats, transform_jacobian,
            [("out_file", "premat")]),

        (run_input, restore_timeseries,
            [("ts_frames", "in_file")]),
        (run_input, restore_timeseries,
            [("anat_file", "ref_file")]),
        (combine_premats, restore_timeseries,
            [("out_file", "premat")]),
        (finalize_unwarping, restore_timeseries,
            [("warp_file", "field_file")]),
        (combine_postmats, restore_timeseries,
            [("out_file", "postmat")]),
        (run_input, finalize_timeseries,
            [("run_tuple", "run_tuple"),
             ("anat_file", "anat_file"),
             ("seg_file", "seg_file"),
             ("mask_file", "mask_file")]),
        (transform_jacobian, finalize_timeseries,
            [("out_file", "jacobian_file")]),
        (restore_timeseries, finalize_timeseries,
            [("out_file", "in_files")]),

        (session_input, restore_template,
            [("fm_frames", "in_file"),
             ("anat_file", "ref_file")]),
        (estimate_distortions, restore_template,
            [("out_mats", "premat"),
             ("out_warps", "field_file")]),
        (combine_postmats, restore_template,
            [("out_file", "postmat")]),
        (session_input, finalize_template,
            [("session_tuple", "session_tuple"),
             ("seg_file", "seg_file"),
             ("anat_file", "anat_file")]),
        (transform_jacobian, finalize_template,
            [("out_file", "jacobian_file")]),
        (restore_template, finalize_template,
            [("out_file", "in_files")]),

        (finalize_timeseries, finalize_template,
            [("mean_file", "mean_files"),
             ("tsnr_file", "tsnr_files"),
             ("mask_file", "mask_files"),
             ("noise_file", "noise_files")]),

        # --- Persistent data storage

        # Ouputs associated with each scanner run

        (finalize_timeseries, timeseries_output,
            [("output_path", "container"),
             ("out_file", "@func"),
             ("mean_file", "@mean"),
             ("mask_file", "@mask"),
             ("tsnr_file", "@tsnr"),
             ("noise_file", "@noise"),
             ("mc_file", "@mc")]),

        # Ouputs associated with the session template

        (finalize_template, template_output,
            [("output_path", "container"),
             ("out_file", "@func"),
             ("mean_file", "@mean"),
             ("tsnr_file", "@tsnr"),
             ("mask_file", "@mask"),
             ("noise_file", "@noise")]),

    ]
    workflow.connect(processing_edges)

    # Optionally connect QC nodes

    qc_edges = [

        # Registration of each frame to SBRef image

        (run_input, ts2sb_qc,
            [("sb_file", "target_file")]),
        (ts2sb, ts2sb_qc,
            [("par_file", "realign_params")]),

        # Registration of corrected SE-EPI to anatomy

        (session_input, fm2anat_qc,
            [("subject", "subject_id")]),
        (fm2anat, fm2anat_qc,
            [("registered_file", "in_file"),
             ("min_cost_file", "cost_file")]),

        # Registration of SBRef volume to SE-EPI fieldmap

        (sb2fm, sb2fm_qc,
            [("out_file", "in_file")]),
        (finalize_unwarping, sb2fm_qc,
            [("raw_file", "ref_file")]),

        # Ouputs associated with each scanner run

        (run_source, save_info,
            [("run", "parameterization")]),
        (save_info, timeseries_output,
            [("info_file", "qc.@info_json")]),

        (run_input, timeseries_output,
            [("ts_plot", "qc.@raw_gif")]),
        (sb2fm_qc, timeseries_output,
            [("out_file", "qc.@sb2fm_gif")]),
        (ts2sb_qc, timeseries_output,
            [("params_plot", "qc.@params_plot"),
             ("target_plot", "qc.@target_plot")]),
        (finalize_timeseries, timeseries_output,
            [("out_gif", "qc.@ts_gif"),
             ("out_png", "qc.@ts_png"),
             ("mask_plot", "qc.@mask_plot"),
             ("mean_plot", "qc.@ts_mean_plot"),
             ("tsnr_plot", "qc.@ts_tsnr_plot"),
             ("noise_plot", "qc.@noise_plot")]),

        # Outputs associated with the session template

        (finalize_unwarping, template_output,
            [("warp_plot", "qc.@warp_png"),
             ("unwarp_gif", "qc.@unwarp_gif")]),
        (fm2anat_qc, template_output,
            [("out_file", "qc.@reg_png")]),
        (finalize_template, template_output,
            [("out_plot", "qc.@func_png"),
             ("mean_plot", "qc.@mean"),
             ("tsnr_plot", "qc.@tsnr"),
             ("mask_plot", "qc.@mask"),
             ("noise_plot", "qc.@noise")]),

    ]

    if qc:
        workflow.connect(qc_edges)

    return workflow
Esempio n. 24
0
def get_fmri2standard_wf(
        tvols,
        subject_id,
        ACQ_PARAMS="/home/didac/LabScripts/fMRI_preprocess/acparams_hcp.txt"):
    """Estimates transformation from Gradiend Field Distortion-warped BOLD to T1
    
    In general:
        BOLD is field-inhomogeneity corrected and corregistered into standard space (T1).  
        
    To do so, the following steps are carried out:
        1)  Corregister SBref to SEgfmAP (fsl.FLIRT)
        2)  Realign BOLD to corrected SBref (fsl.MCFLIRT)
        3)  Field inhomogeneity correction estimation of SBref from SEfm_AP and SEfm_PA (fsl.TOPUP)
        4)  Apply field inhomogeneity correction to SBref (fsl.ApplyTOPUP)
        5)  Apply field inhomogeneity correction to BOLD (fsl.ApplyTOPUP)
        6)  Transform free-surfer brain mask (brain.mgz) to T1 space (freesurfer.ApplyVolTransform ;mri_vol2vol), 
            then binarized (fsl.UnaryMaths) and the mask is extracted from T1 (fsl.BinaryMaths)
        7)  Corregister BOLD (field-inhomogeneity corrected) to Standard T1 (fsl.Epi2Reg)

    Parameters
    ----------
    tvols: [t_initial, t_final] volumes included in the preprocess
    ACQ_params: Path to txt file containing MRI acquisition parameters; needs to be specified for topup correction
    
    
    Returns
    -------
    Workflow with the transformation
 
    """
    from nipype import Workflow, Node, interfaces
    from nipype.interfaces import fsl, utility, freesurfer

    print("defining workflow...")
    wf = Workflow(name=subject_id, base_dir='')

    #Setting INPUT node...
    print("defines input node...")
    node_input = Node(utility.IdentityInterface(fields=[
        'func_sbref_img', 'func_segfm_ap_img', 'func_segfm_pa_img',
        'func_bold_ap_img', 'T1_img', 'T1_brain_freesurfer_mask'
    ]),
                      name='input_node')

    print(
        "Averages the three repeated Spin-Echo images with same Phase Encoding (AP or PA) for Susceptibility Correction (unwarping)..."
    )
    node_average_SEgfm = Node(fsl.maths.MeanImage(), name='Mean_SEgfm_AP')

    print("Corregister SB-ref to average SEgfm-AP")
    node_coregister_SBref2SEgfm = Node(
        fsl.FLIRT(dof=6  #translation and rotation only
                  ),
        name='Corregister_SBref2SEgfm')

    print("Eliminates first volumes.")
    node_eliminate_first_scans = Node(
        fsl.ExtractROI(
            t_min=tvols[0],  # first included volume
            t_size=tvols[1] -
            tvols[0],  # number of volumes from the first to the last one
        ),
        name="eliminate_first_scans")

    print("Realigns fMRI BOLD volumes to SBref in SEgfm-AP space")
    node_realign_bold = Node(
        fsl.MCFLIRT(
            save_plots=True,  # save transformation matrices
        ),
        name="realign_fmri2SBref")

    print("Concatenates AP and PA SEgfm volumes...")
    # AP AP AP PA PA PA
    node_merge_ap_pa_inputs = Node(
        utility.base.Merge(2  # number of inputs; it concatenates lists
                           ),
        name='Merge_ap_pa_inputs')
    node_merge_SEgfm = Node(
        fsl.Merge(dimension='t'  # ¿? 
                  ),
        name='Merge_SEgfm_AP_PA')

    print(
        "Estimates TopUp inhomogeneity correction from SEfm_AP and SEfm_PA...")
    node_topup_SEgfm = Node(fsl.TOPUP(encoding_file=ACQ_PARAMS, ),
                            name='Topup_SEgfm_estimation')

    print("Applies warp from TOPUP to correct SBref...")
    node_apply_topup_to_SBref = Node(
        fsl.ApplyTOPUP(
            encoding_file=ACQ_PARAMS,
            method='jac',  # jacobian modulation
            interp='spline',  # interpolation method
        ),
        name="apply_topup_to_SBref")

    print("Applies warp from TOPUP to correct realigned BOLD...")
    node_apply_topup = Node(
        fsl.ApplyTOPUP(
            encoding_file=ACQ_PARAMS,
            method='jac',  # jacobian modulation
            interp='spline',  # interpolation method
        ),
        name="apply_topup")

    ## BRAIN MASK

    #Registration to T1. Epireg without fieldmaps combined (see https://www.fmrib.ox.ac.uk/primers/intro_primer/ExBox20/IntroBox20.html)
    #    print ("Eliminates scalp from brain using T1 high res image");
    #    node_mask_T1=Node(fsl.BET(
    #            frac=0.7 # umbral
    #            ),
    #    name="mask_T1");

    print('Transform brain mask T1 from freesurfer space to T1 space')
    node_vol2vol_brain = Node(
        freesurfer.ApplyVolTransform(
            reg_header=True,  # (--regheader)
            transformed_file='brainmask_warped.nii.gz'
            #source_file --mov (INPUT; freesurfer brain.mgz)
            #transformed_file --o (OUTPUT; ...brain.nii.gz)
            #target_file --targ (REFERENCE; ...T1w.nii.gz)
        ),
        name="vol2vol")

    print('Transform brain mask T1 to binary mask')
    node_bin_mask_brain = Node(
        fsl.UnaryMaths(  # fslmaths T1_brain -bin T1_binarized mask
            operation='bin',  # (-bin)
            #in_file (T1_brain)
            #out_file (T1_binarized_mask)
        ),
        name="binarize_mask")

    print('Extract brain mask from T1 using binary mask')
    node_extract_mask = Node(
        fsl.
        BinaryMaths(  # fslmaths T1 -mul T1_binarized_mask T1_extracted_mask
            operation='mul'  # (-mul)
            #in_file (T1)
            #out_file (T1_extracted_mask)
            #operand_file (T1_binarized_mask)
        ),
        name="extract_mask")

    ##
    print("Estimate and appply transformation from SBref to T1")
    node_epireg = Node(
        fsl.EpiReg(
            #t1_head=SUBJECT_FSTRUCT_DIC['anat_T1'],
            out_base='SEgfm2T1'),
        name="epi2reg")
    '''  
    EPI2REG ALREADY APPLIED         
    print ("Apply epi2reg to SBRef..");
    node_apply_epi2reg_SBref= Node(fsl.ApplyXFM(
            ),
    name="node_apply_epi2reg_SBref");
    '''

    print("Estimates inverse transform from epi2reg...")
    # quality control
    node_invert_epi2reg = Node(fsl.ConvertXFM(invert_xfm=True),
                               name="invert_epi2reg")

    print("...")
    node_mask_fMRI = Node(fsl.BET(mask=True, ), name='mask_fMRI')
    #node_fmriMask.overwrite=True
    print("Setting OUTPUT node...")
    node_output = Node(interfaces.utility.IdentityInterface(fields=[
        'SBref2SEgfm_mat',
        'realign_movpar_txt',
        'realign_fmri_img',
        'topup_movpar_txt',
        'topup_field_coef_img',
        'epi2str_mat',
        'epi2str_img',
        'fmri_mask_img',
        'rfmri_unwarped_imgs',
        'sb_ref_unwarped_img',
    ]),
                       name='output_node')

    print("All nodes created; Starts creating connections")

    #Connects nodes
    wf.connect([
        #inputs
        (node_input, node_average_SEgfm, [("func_segfm_ap_img", "in_file")]),
        (node_input, node_coregister_SBref2SEgfm, [("func_sbref_img",
                                                    "in_file")]),
        (node_input, node_eliminate_first_scans, [("func_bold_ap_img",
                                                   "in_file")]),
        (node_input, node_merge_ap_pa_inputs, [("func_segfm_ap_img", "in1"),
                                               ("func_segfm_pa_img", "in2")]),
        (node_merge_ap_pa_inputs, node_merge_SEgfm, [("out", "in_files")]),
        (node_input, node_epireg, [("T1_img", "t1_head")]),
        (node_input, node_vol2vol_brain, [("T1_brain_freesurfer_mask",
                                           "source_file")]),
        (node_input, node_vol2vol_brain, [("T1_img", "target_file")]),
        (node_input, node_extract_mask, [("T1_img", "in_file")]),

        #connections
        (node_eliminate_first_scans, node_realign_bold, [("roi_file",
                                                          "in_file")]),
        (node_average_SEgfm, node_coregister_SBref2SEgfm, [("out_file",
                                                            "reference")]),
        (node_coregister_SBref2SEgfm, node_realign_bold, [("out_file",
                                                           "ref_file")]),

        #T1 brain mask transformations (change space / vol2vol, binarize and extract)
        (node_vol2vol_brain, node_bin_mask_brain, [("transformed_file",
                                                    "in_file")]),
        (node_bin_mask_brain, node_extract_mask, [("out_file", "operand_file")
                                                  ]),

        #(node_realign_bold, node_tsnr, [("out_file", "in_file")]),
        (node_merge_SEgfm, node_topup_SEgfm, [("merged_file", "in_file")]),
        (node_realign_bold, node_apply_topup, [("out_file", "in_files")]),
        (node_topup_SEgfm, node_apply_topup,
         [("out_fieldcoef", "in_topup_fieldcoef"),
          ("out_movpar", "in_topup_movpar")]),
        (node_topup_SEgfm, node_apply_topup_to_SBref,
         [("out_fieldcoef", "in_topup_fieldcoef"),
          ("out_movpar", "in_topup_movpar")]),
        (node_coregister_SBref2SEgfm, node_apply_topup_to_SBref,
         [("out_file", "in_files")]),

        #corregister to T1
        (node_extract_mask, node_epireg, [("out_file", "t1_brain")]),
        (node_apply_topup_to_SBref, node_epireg, [("out_corrected", "epi")]),
        (node_epireg, node_invert_epi2reg, [("epi2str_mat", "in_file")]),
        (node_coregister_SBref2SEgfm, node_mask_fMRI, [("out_file", "in_file")
                                                       ]),

        #yeld relevant data to output node
        (node_coregister_SBref2SEgfm, node_output, [("out_matrix_file",
                                                     "SBref2SEgfm_mat")]),
        (node_realign_bold, node_output, [("par_file", "realign_movpar_txt"),
                                          ("out_file", "realign_fmri_img")]),
        (node_mask_fMRI, node_output, [("mask_file", "fmri_mask_img")]),
        (node_epireg, node_output, [("epi2str_mat", "epi2str_mat")]),
        (node_epireg, node_output, [("out_file", "epi2str_img")]),
        (node_topup_SEgfm, node_output,
         [("out_fieldcoef", "topup_field_coef_img"),
          ("out_corrected", "sb_ref_unwarped_img")]),
        (node_apply_topup, node_output, [("out_corrected",
                                          "rfmri_unwarped_imgs")])
    ])
    print("All connections created")
    return (wf)
Esempio n. 25
0
def create_anat_preproc(method='afni',
                        already_skullstripped=False,
                        c=None,
                        wf_name='anat_preproc'):
    """The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_

    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations

    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image

        outputspec.reorient : string
            Path to RPI oriented anatomical image

        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.

        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.

    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz

    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz

    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500

    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')
    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')

    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_bias_field_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        c.non_local_means_filtering = False
        c.n4_bias_field_correction = False

    if c.non_local_means_filtering and c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not c.non_local_means_filtering and c.n4_bias_field_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')
    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if c.n4_bias_field_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'mask_vol', 'shrink_factor', 'var_shrink_fac',
                'shrink_fac_bot_lim', 'avoid_vent', 'niter', 'pushout',
                'touchup', 'fill_hole', 'avoid_eyes', 'use_edge', 'exp_frac',
                'smooth_final', 'push_to_edge', 'use_skull', 'perc_int',
                'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'mask_vol', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('mask_vol', 'mask_vol'),
                               ('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            # Generate anatomical brain mask

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_brain_mask, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')
            anat_skullstrip.inputs.output_type = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=c.niworkflows_ants_template_path,
                tpl_mask_path=c.niworkflows_ants_mask_path,
                tpl_regmask_path=c.niworkflows_ants_regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            brain_mask_deoblique = pe.Node(interface=afni.Refit(),
                                           name='brain_mask_deoblique')
            brain_mask_deoblique.inputs.deoblique = True
            preproc.connect(inputnode, 'brain_mask', brain_mask_deoblique,
                            'in_file')

            brain_mask_reorient = pe.Node(interface=afni.Resample(),
                                          name='brain_mask_reorient')
            brain_mask_reorient.inputs.orientation = 'RPI'
            brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ'
            preproc.connect(brain_mask_deoblique, 'out_file',
                            brain_mask_reorient, 'in_file')

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')
            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(brain_mask_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(brain_mask_reorient, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'unet':
            """
            UNet
            options (following numbers are default):
            input_slice: 3
            conv_block: 5
            kernel_root: 16
            rescale_dim: 256
            """
            # TODO: add options to pipeline_config
            train_model = UNet2d(dim_in=3, num_conv_block=5, kernel_root=16)
            unet_path = check_for_s3(c.unet_model)
            checkpoint = torch.load(unet_path, map_location={'cuda:0': 'cpu'})
            train_model.load_state_dict(checkpoint['state_dict'])
            model = nn.Sequential(train_model, nn.Softmax2d())

            # create a node called unet_mask
            unet_mask = pe.Node(util.Function(input_names=['model', 'cimg_in'],
                                              output_names=['out_path'],
                                              function=predict_volumes),
                                name='unet_mask')

            unet_mask.inputs.model = model
            preproc.connect(anat_reorient, 'out_file', unet_mask, 'cimg_in')
            """
            Revised mask with ANTs
            """
            # fslmaths <whole head> -mul <mask> brain.nii.gz
            unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='unet_masked_brain')
            unet_masked_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', unet_masked_brain,
                            'in_file')
            preproc.connect(unet_mask, 'out_path', unet_masked_brain,
                            'operand_files')

            # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
            # TODO change it to ANTs linear transform
            native_brain_to_template_brain = pe.Node(
                interface=fsl.FLIRT(), name='native_brain_to_template_brain')
            native_brain_to_template_brain.inputs.reference = c.template_brain_only_for_anat
            native_brain_to_template_brain.inputs.dof = 6
            native_brain_to_template_brain.inputs.interp = 'sinc'
            preproc.connect(unet_masked_brain, 'out_file',
                            native_brain_to_template_brain, 'in_file')

            # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
            # TODO change it to ANTs linear transform
            native_head_to_template_head = pe.Node(
                interface=fsl.FLIRT(), name='native_head_to_template_head')
            native_head_to_template_head.inputs.reference = c.template_skull_for_anat
            native_head_to_template_head.inputs.apply_xfm = True
            preproc.connect(anat_reorient, 'out_file',
                            native_head_to_template_head, 'in_file')
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            native_head_to_template_head, 'in_matrix_file')

            # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
            template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                          name='template_brain_mask')
            template_brain_mask.inputs.in_file = c.template_brain_only_for_anat
            template_brain_mask.inputs.args = '-bin'

            # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
            ants_template_head_to_template = pe.Node(
                interface=ants.Registration(),
                name='template_head_to_template')
            ants_template_head_to_template.inputs.metric = ['CC']
            ants_template_head_to_template.inputs.metric_weight = [1, 5]
            ants_template_head_to_template.inputs.moving_image = c.template_skull_for_anat
            ants_template_head_to_template.inputs.transforms = ['SyN']
            ants_template_head_to_template.inputs.transform_parameters = [
                (0.25, )
            ]
            ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
            ants_template_head_to_template.inputs.number_of_iterations = [[
                60, 50, 20
            ]]
            ants_template_head_to_template.inputs.smoothing_sigmas = [[
                0.6, 0.2, 0.0
            ]]
            ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
            ants_template_head_to_template.inputs.convergence_threshold = [
                1.e-8
            ]
            preproc.connect(native_head_to_template_head, 'out_file',
                            ants_template_head_to_template, 'fixed_image')

            # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz
            template_head_transform_to_template = pe.Node(
                interface=ants.ApplyTransforms(),
                name='template_head_transform_to_template')
            template_head_transform_to_template.inputs.dimension = 3
            preproc.connect(template_brain_mask, 'out_file',
                            template_head_transform_to_template, 'input_image')
            preproc.connect(native_brain_to_template_brain, 'out_file',
                            template_head_transform_to_template,
                            'reference_image')
            preproc.connect(ants_template_head_to_template,
                            'forward_transforms',
                            template_head_transform_to_template, 'transforms')

            # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
            invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
            invt.inputs.invert_xfm = True
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            invt, 'in_file')

            # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
            template_brain_to_native_brain = pe.Node(
                interface=fsl.FLIRT(), name='template_brain_to_native_brain')
            template_brain_to_native_brain.inputs.apply_xfm = True
            preproc.connect(template_head_transform_to_template,
                            'output_image', template_brain_to_native_brain,
                            'in_file')
            preproc.connect(unet_masked_brain, 'out_file',
                            template_brain_to_native_brain, 'reference')
            preproc.connect(invt, 'out_file', template_brain_to_native_brain,
                            'in_matrix_file')

            # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
            refined_mask = pe.Node(interface=fsl.Threshold(),
                                   name='refined_mask')
            refined_mask.inputs.thresh = 0.5
            preproc.connect(template_brain_to_native_brain, 'out_file',
                            refined_mask, 'in_file')

            # get a new brain with mask
            refined_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='refined_brain')
            refined_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', refined_brain,
                            'in_file')
            preproc.connect(refined_mask, 'out_file', refined_brain,
                            'operand_files')

            preproc.connect(refined_mask, 'out_file', outputnode, 'brain_mask')
            preproc.connect(refined_brain, 'out_file', outputnode, 'brain')

    return preproc
Esempio n. 26
0

get_T1_template= pe.Node(interface=fsl.ExtractROI(), name = 'get_T1_template')
#extract_b0.inputs.t_min = 0
get_T1_template.inputs.t_size = 1
get_T1_template.inputs.in_file = T1_Template


T1linTemplate = pe.Node(interface=fsl.FLIRT(), name='T1linTemplate')
#T1linTemplate.inputs.reference=Template
T1linTemplate.inputs.dof = 12
T1linTemplate.inputs.searchr_x = [-180, 180]
T1linTemplate.inputs.searchr_y = [-180, 180]
T1linTemplate.inputs.searchr_z = [-180, 180]

inverse_T1_matrix = pe.Node(interface=fsl.ConvertXFM(), name='inverse_T1_matrix')
inverse_T1_matrix.inputs.invert_xfm = True

T1warpTemplate=pe.Node(interface=fsl.FNIRT(), name='T1warpTemplate')
T1warpTemplate.inputs.field_file=True
T1warpTemplate.inputs.config_file=configfile

inverse_T1_warp=pe.Node(interface=tools.InvWarp(), name='inverse_T1_warp')


apply_T1_warp=pe.MapNode(interface=fsl.ApplyWarp(), name='apply_T1_warp',
                         iterfield=['in_file'])
apply_T1_warp.inputs.interp='nn'


get_masks = pe.MapNode(interface=fsl.ExtractROI(), name = 'get_masks',
def init_fsl_bbr_wf(use_bbr,
                    bold2t1w_dof,
                    bold2t1w_init,
                    sloppy=False,
                    name='fsl_bbr_wf'):
    """
    Build a workflow to run FSL's ``flirt``.

    This workflow uses FSL FLIRT to register a BOLD image to a T1-weighted
    structural image, using a boundary-based registration (BBR) cost function.
    It is a counterpart to :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`,
    which performs the same task using FreeSurfer's ``bbregister``.

    The ``use_bbr`` option permits a high degree of control over registration.
    If ``False``, standard, rigid coregistration will be performed by FLIRT.
    If ``True``, FLIRT-BBR will be seeded with the initial transform found by
    the rigid coregistration.
    If ``None``, after FLIRT-BBR is run, the resulting affine transform
    will be compared to the initial transform found by FLIRT.
    Excessive deviation will result in rejecting the BBR refinement and
    accepting the original, affine registration.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from fmriprep.workflows.bold.registration import init_fsl_bbr_wf
            wf = init_fsl_bbr_wf(use_bbr=True, bold2t1w_dof=9, bold2t1w_init='register')


    Parameters
    ----------
    use_bbr : :obj:`bool` or None
        Enable/disable boundary-based registration refinement.
        If ``None``, test BBR result for distortion before accepting.
    bold2t1w_dof : 6, 9 or 12
        Degrees-of-freedom for BOLD-T1w registration
    bold2t1w_init : str, 'header' or 'register'
        If ``'header'``, use header information for initialization of BOLD and T1 images.
        If ``'register'``, align volumes by their centers.
    name : :obj:`str`, optional
        Workflow name (default: fsl_bbr_wf)

    Inputs
    ------
    in_file
        Reference BOLD image to be registered
    t1w_brain
        Skull-stripped T1-weighted structural image
    t1w_dseg
        FAST segmentation of ``t1w_brain``
    fsnative2t1w_xfm
        Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`)
    subjects_dir
        Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`)
    subject_id
        Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`)

    Outputs
    -------
    itk_bold_to_t1
        Affine transform from ``ref_bold_brain`` to T1w space (ITK format)
    itk_t1_to_bold
        Affine transform from T1 space to BOLD space (ITK format)
    out_report
        Reportlet for assessing registration quality
    fallback
        Boolean indicating whether BBR was rejected (rigid FLIRT registration returned)

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.utils.images import dseg_label as _dseg_label
    from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert
    from niworkflows.interfaces.registration import FLIRTRPT
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD reference was then co-registered to the T1w reference using
`flirt` [FSL {fsl_ver}, @flirt] with the boundary-based registration [@bbr]
cost-function.
Co-registration was configured with nine degrees of freedom to account
for distortions remaining in the BOLD reference.
""".format(fsl_ver=FLIRTRPT().version or '<ver>')

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            'fsnative2t1w_xfm',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1w_dseg',
            't1w_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']),
                         name='outputnode')

    wm_mask = pe.Node(niu.Function(function=_dseg_label), name='wm_mask')
    wm_mask.inputs.label = 2  # BIDS default is WM=2
    flt_bbr_init = pe.Node(FLIRTRPT(dof=6,
                                    generate_report=not use_bbr,
                                    uses_qform=True),
                           name='flt_bbr_init')

    if bold2t1w_init not in ("register", "header"):
        raise ValueError(
            f"Unknown BOLD-T1w initialization option: {bold2t1w_init}")

    if bold2t1w_init == "header":
        raise NotImplementedError(
            "Header-based registration initialization not supported for FSL")

    invt_bbr = pe.Node(fsl.ConvertXFM(invert_xfm=True),
                       name='invt_bbr',
                       mem_gb=DEFAULT_MEMORY_MIN_GB)

    # BOLD to T1 transform matrix is from fsl, using c3 tools to convert to
    # something ANTs will like.
    fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_fwd',
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_inv',
                          mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1w_brain', 'reference')]),
        (inputnode, fsl2itk_fwd, [('t1w_brain', 'reference_file'),
                                  ('in_file', 'source_file')]),
        (inputnode, fsl2itk_inv, [('in_file', 'reference_file'),
                                  ('t1w_brain', 'source_file')]),
        (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]),
        (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]),
        (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]),
    ])

    # Short-circuit workflow building, use rigid registration
    if use_bbr is False:
        workflow.connect([
            (flt_bbr_init, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr_init, fsl2itk_fwd, [('out_matrix_file', 'transform_file')
                                         ]),
            (flt_bbr_init, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = True

        return workflow

    flt_bbr = pe.Node(FLIRTRPT(cost_func='bbr',
                               dof=bold2t1w_dof,
                               generate_report=True),
                      name='flt_bbr')

    FSLDIR = os.getenv('FSLDIR')
    if FSLDIR:
        flt_bbr.inputs.schedule = op.join(FSLDIR, 'etc/flirtsch/bbr.sch')
    else:
        # Should mostly be hit while building docs
        LOGGER.warning("FSLDIR unset - using packaged BBR schedule")
        flt_bbr.inputs.schedule = pkgr.resource_filename(
            'fmriprep', 'data/flirtsch/bbr.sch')

    workflow.connect([
        (inputnode, wm_mask, [('t1w_dseg', 'in_seg')]),
        (inputnode, flt_bbr, [('in_file', 'in_file')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
    ])

    if sloppy is True:
        downsample = pe.Node(niu.Function(
            function=_conditional_downsampling,
            output_names=["out_file", "out_mask"]),
                             name='downsample')
        workflow.connect([
            (inputnode, downsample, [("t1w_brain", "in_file")]),
            (wm_mask, downsample, [("out", "in_mask")]),
            (downsample, flt_bbr, [('out_file', 'reference'),
                                   ('out_mask', 'wm_seg')]),
        ])
    else:
        workflow.connect([
            (inputnode, flt_bbr, [('t1w_brain', 'reference')]),
            (wm_mask, flt_bbr, [('out', 'wm_seg')]),
        ])

    # Short-circuit workflow building, use boundary-based registration
    if use_bbr is True:
        workflow.connect([
            (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]),
            (flt_bbr, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = False

        return workflow

    transforms = pe.Node(niu.Merge(2),
                         run_without_submitting=True,
                         name='transforms')
    reports = pe.Node(niu.Merge(2),
                      run_without_submitting=True,
                      name='reports')

    compare_transforms = pe.Node(niu.Function(function=compare_xforms),
                                 name='compare_transforms')

    select_transform = pe.Node(niu.Select(),
                               run_without_submitting=True,
                               name='select_transform')
    select_report = pe.Node(niu.Select(),
                            run_without_submitting=True,
                            name='select_report')

    fsl_to_lta = pe.MapNode(LTAConvert(out_lta=True),
                            iterfield=['in_fsl'],
                            name='fsl_to_lta')

    workflow.connect([
        (flt_bbr, transforms, [('out_matrix_file', 'in1')]),
        (flt_bbr_init, transforms, [('out_matrix_file', 'in2')]),
        # Convert FSL transforms to LTA (RAS2RAS) transforms and compare
        (inputnode, fsl_to_lta, [('in_file', 'source_file'),
                                 ('t1w_brain', 'target_file')]),
        (transforms, fsl_to_lta, [('out', 'in_fsl')]),
        (fsl_to_lta, compare_transforms, [('out_lta', 'lta_list')]),
        (compare_transforms, outputnode, [('out', 'fallback')]),
        # Select output transform
        (transforms, select_transform, [('out', 'inlist')]),
        (compare_transforms, select_transform, [('out', 'index')]),
        (select_transform, invt_bbr, [('out', 'in_file')]),
        (select_transform, fsl2itk_fwd, [('out', 'transform_file')]),
        (flt_bbr, reports, [('out_report', 'in1')]),
        (flt_bbr_init, reports, [('out_report', 'in2')]),
        (reports, select_report, [('out', 'inlist')]),
        (compare_transforms, select_report, [('out', 'index')]),
        (select_report, outputnode, [('out', 'out_report')]),
    ])

    return workflow
Esempio n. 28
0
def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, name='fsl_bbr_wf'):
    """
    This workflow uses FSL FLIRT to register a BOLD image to a T1-weighted
    structural image, using a boundary-based registration (BBR) cost function.

    It is a counterpart to :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`,
    which performs the same task using FreeSurfer's ``bbregister``.

    The ``use_bbr`` option permits a high degree of control over registration.
    If ``False``, standard, rigid coregistration will be performed by FLIRT.
    If ``True``, FLIRT-BBR will be seeded with the initial transform found by
    the rigid coregistration.
    If ``None``, after FLIRT-BBR is run, the resulting affine transform
    will be compared to the initial transform found by FLIRT.
    Excessive deviation will result in rejecting the BBR refinement and
    accepting the original, affine registration.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.registration import init_fsl_bbr_wf
        wf = init_fsl_bbr_wf(use_bbr=True, bold2t1w_dof=9)


    Parameters

        use_bbr : bool or None
            Enable/disable boundary-based registration refinement.
            If ``None``, test BBR result for distortion before accepting.
        bold2t1w_dof : 6, 9 or 12
            Degrees-of-freedom for BOLD-T1w registration
        name : str, optional
            Workflow name (default: fsl_bbr_wf)


    Inputs

        in_file
            Reference BOLD image to be registered
        t1_brain
            Skull-stripped T1-weighted structural image
        t1_seg
            FAST segmentation of ``t1_brain``
        t1_2_fsnative_reverse_transform
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subjects_dir
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)
        subject_id
            Unused (see :py:func:`~fmriprep.workflows.util.init_bbreg_wf`)


    Outputs

        itk_bold_to_t1
            Affine transform from ``ref_bold_brain`` to T1 space (ITK format)
        itk_t1_to_bold
            Affine transform from T1 space to BOLD space (ITK format)
        out_report
            Reportlet for assessing registration quality
        fallback
            Boolean indicating whether BBR was rejected (rigid FLIRT registration returned)

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The BOLD reference was then co-registered to the T1w reference using
`flirt` [FSL {fsl_ver}, @flirt] with the boundary-based registration [@bbr]
cost-function.
Co-registration was configured with nine degrees of freedom to account
for distortions remaining in the BOLD reference.
""".format(fsl_ver=FLIRTRPT().version or '<ver>')

    inputnode = pe.Node(
        niu.IdentityInterface([
            'in_file',
            't1_2_fsnative_reverse_transform',
            'subjects_dir',
            'subject_id',  # BBRegister
            't1_seg',
            't1_brain'
        ]),  # FLIRT BBR
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        ['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']),
                         name='outputnode')

    wm_mask = pe.Node(niu.Function(function=extract_wm), name='wm_mask')
    flt_bbr_init = pe.Node(FLIRTRPT(dof=6,
                                    generate_report=not use_bbr,
                                    uses_qform=True),
                           name='flt_bbr_init')

    invt_bbr = pe.Node(fsl.ConvertXFM(invert_xfm=True),
                       name='invt_bbr',
                       mem_gb=DEFAULT_MEMORY_MIN_GB)

    #  BOLD to T1 transform matrix is from fsl, using c3 tools to convert to
    #  something ANTs will like.
    fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_fwd',
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True),
                          name='fsl2itk_inv',
                          mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, flt_bbr_init, [('in_file', 'in_file'),
                                   ('t1_brain', 'reference')]),
        (inputnode, fsl2itk_fwd, [('t1_brain', 'reference_file'),
                                  ('in_file', 'source_file')]),
        (inputnode, fsl2itk_inv, [('in_file', 'reference_file'),
                                  ('t1_brain', 'source_file')]),
        (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]),
        (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]),
        (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]),
    ])

    # Short-circuit workflow building, use rigid registration
    if use_bbr is False:
        workflow.connect([
            (flt_bbr_init, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr_init, fsl2itk_fwd, [('out_matrix_file', 'transform_file')
                                         ]),
            (flt_bbr_init, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = True

        return workflow

    flt_bbr = pe.Node(FLIRTRPT(cost_func='bbr',
                               dof=bold2t1w_dof,
                               generate_report=True,
                               schedule=op.join(os.getenv('FSLDIR'),
                                                'etc/flirtsch/bbr.sch')),
                      name='flt_bbr')

    workflow.connect([
        (inputnode, wm_mask, [('t1_seg', 'in_seg')]),
        (inputnode, flt_bbr, [('in_file', 'in_file'),
                              ('t1_brain', 'reference')]),
        (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (wm_mask, flt_bbr, [('out', 'wm_seg')]),
    ])

    # Short-circuit workflow building, use boundary-based registration
    if use_bbr is True:
        workflow.connect([
            (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]),
            (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]),
            (flt_bbr, outputnode, [('out_report', 'out_report')]),
        ])
        outputnode.inputs.fallback = False

        return workflow

    transforms = pe.Node(niu.Merge(2),
                         run_without_submitting=True,
                         name='transforms')
    reports = pe.Node(niu.Merge(2),
                      run_without_submitting=True,
                      name='reports')

    compare_transforms = pe.Node(niu.Function(function=compare_xforms),
                                 name='compare_transforms')

    select_transform = pe.Node(niu.Select(),
                               run_without_submitting=True,
                               name='select_transform')
    select_report = pe.Node(niu.Select(),
                            run_without_submitting=True,
                            name='select_report')

    fsl_to_lta = pe.MapNode(fs.utils.LTAConvert(out_lta=True),
                            iterfield=['in_fsl'],
                            name='fsl_to_lta')

    workflow.connect([
        (flt_bbr, transforms, [('out_matrix_file', 'in1')]),
        (flt_bbr_init, transforms, [('out_matrix_file', 'in2')]),
        # Convert FSL transforms to LTA (RAS2RAS) transforms and compare
        (inputnode, fsl_to_lta, [('in_file', 'source_file'),
                                 ('t1_brain', 'target_file')]),
        (transforms, fsl_to_lta, [('out', 'in_fsl')]),
        (fsl_to_lta, compare_transforms, [('out_lta', 'lta_list')]),
        (compare_transforms, outputnode, [('out', 'fallback')]),
        # Select output transform
        (transforms, select_transform, [('out', 'inlist')]),
        (compare_transforms, select_transform, [('out', 'index')]),
        (select_transform, invt_bbr, [('out', 'in_file')]),
        (select_transform, fsl2itk_fwd, [('out', 'transform_file')]),
        (flt_bbr, reports, [('out_report', 'in1')]),
        (flt_bbr_init, reports, [('out_report', 'in2')]),
        (reports, select_report, [('out', 'inlist')]),
        (compare_transforms, select_report, [('out', 'index')]),
        (select_report, outputnode, [('out', 'out_report')]),
    ])

    return workflow
featNode.inputs.feat_gen = gen_default_feat_config

# ## Generate parcellated ROI-Timeseries

# Register example-func to freesurfer brainmask
exfunc2anat = Node(fsl.FLIRT(bins=256,
                             searchr_x=[90, 90],
                             searchr_y=[90, 90],
                             searchr_z=[90, 90],
                             cost='corratio',
                             interp='trilinear',
                             dof=6),
                   name='Func_2_Anat')

# invert transformation
invt = Node(fsl.ConvertXFM(invert_xfm=True), name='invert_transf')

# transform roimask to functional space using FLIRT (using Nearest Neighbor Interpolation for roimask)
roimask2func = Node(fsl.FLIRT(padding_size=0,
                              interp='nearestneighbour',
                              apply_xfm=True),
                    name='roimask_2_func')

# Export average region time-series
ss = Node(freesurfer.SegStats(), name='SegStats')

# ### Preprocess the data before computing the FC


def segstat_shaping(aparc_stats):
    import os, re
Esempio n. 30
0
def create_connectivity_pipeline(name="connectivity"):
    """Creates a pipeline that does the same connectivity processing as in the
    :ref:`example_dmri_connectivity` example script. Given a subject id (and completed Freesurfer reconstruction)
    diffusion-weighted image, b-values, and b-vectors, the workflow will return the subject's connectome
    as a Connectome File Format (CFF) file for use in Connectome Viewer (http://www.cmtk.org).

    Example
    -------

    >>> from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline
    >>> conmapper = create_connectivity_pipeline("nipype_conmap")
    >>> conmapper.inputs.inputnode.subjects_dir = '.'
    >>> conmapper.inputs.inputnode.subject_id = 'subj1'
    >>> conmapper.inputs.inputnode.dwi = 'data.nii.gz'
    >>> conmapper.inputs.inputnode.bvecs = 'bvecs'
    >>> conmapper.inputs.inputnode.bvals = 'bvals'
    >>> conmapper.run()                 # doctest: +SKIP

    Inputs::

        inputnode.subject_id
        inputnode.subjects_dir
        inputnode.dwi
        inputnode.bvecs
        inputnode.bvals
        inputnode.resolution_network_file

    Outputs::

        outputnode.connectome
        outputnode.cmatrix
        outputnode.gpickled_network
        outputnode.fa
        outputnode.struct
        outputnode.trace
        outputnode.tracts
        outputnode.tensors

    """

    inputnode_within = pe.Node(interface=util.IdentityInterface(fields=["subject_id",
                                                                  "dwi",
                                                                  "bvecs",
                                                                  "bvals",
                                                                  "subjects_dir",
                                                                  "resolution_network_file",
                                                                  ]),
                         name="inputnode_within")

    FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource')

    FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH')
    FreeSurferSourceLH.inputs.hemi = 'lh'

    FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH')
    FreeSurferSourceRH.inputs.hemi = 'rh'

    """
    Since the b values and b vectors come from the FSL course, we must convert it to a scheme file
    for use in Camino.
    """

    fsl2scheme = pe.Node(interface=camino.FSL2Scheme(), name="fsl2scheme")
    fsl2scheme.inputs.usegradmod = True

    """
    FSL's Brain Extraction tool is used to create a mask from the b0 image
    """

    b0Strip = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0')

    """
    FSL's FLIRT function is used to coregister the b0 mask and the structural image.
    A convert_xfm node is then used to obtain the inverse of the transformation matrix.
    FLIRT is used once again to apply the inverse transformation to the parcellated brain image.
    """

    coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister')
    coregister.inputs.cost = ('normmi')

    convertxfm = pe.Node(interface=fsl.ConvertXFM(), name = 'convertxfm')
    convertxfm.inputs.invert_xfm = True

    inverse = pe.Node(interface=fsl.FLIRT(), name = 'inverse')
    inverse.inputs.interp = ('nearestneighbour')

    inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name = 'inverse_AparcAseg')
    inverse_AparcAseg.inputs.interp = ('nearestneighbour')

    """
    A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject.
    Nodes are used to convert the following:
        * Original structural image to NIFTI
        * Parcellated white matter image to NIFTI
        * Parcellated whole-brain image to NIFTI
        * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres
            are converted to GIFTI for visualization in ConnectomeViewer
        * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI
    """

    mri_convert_Brain = pe.Node(interface=fs.MRIConvert(), name='mri_convert_Brain')
    mri_convert_Brain.inputs.out_type = 'nii'

    mri_convert_AparcAseg = mri_convert_Brain.clone('mri_convert_AparcAseg')

    mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH')
    mris_convertLH.inputs.out_datatype = 'gii'
    mris_convertRH = mris_convertLH.clone('mris_convertRH')
    mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite')
    mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite')
    mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated')
    mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated')
    mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere')
    mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere')
    mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels')
    mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels')

    """
    In this section we create the nodes necessary for diffusion analysis.
    First, the diffusion image is converted to voxel order, since this is the format in which Camino does
    its processing.
    """

    image2voxel = pe.Node(interface=camino.Image2Voxel(), name="image2voxel")

    """
    Second, diffusion tensors are fit to the voxel-order data.
    If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface.
    """

    dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit')

    """
    Next, a lookup table is generated from the schemefile and the
    signal-to-noise ratio (SNR) of the unweighted (q=0) data.
    """

    dtlutgen = pe.Node(interface=camino.DTLUTGen(), name="dtlutgen")
    dtlutgen.inputs.snr = 16.0
    dtlutgen.inputs.inversion = 1

    """
    In this tutorial we implement probabilistic tractography using the PICo algorithm.
    PICo tractography requires an estimate of the fibre direction and a model of its uncertainty in each voxel;
    this probabilitiy distribution map is produced using the following node.
    """

    picopdfs = pe.Node(interface=camino.PicoPDFs(), name="picopdfs")
    picopdfs.inputs.inputmodel = 'dt'

    """
    Finally, tractography is performed. In this tutorial, we will use only one iteration for time-saving purposes.
    It is important to note that we use the TrackPICo interface here. This interface now expects the files required
    for PICo tracking (i.e. the output from picopdfs). Similar interfaces exist for alternative types of tracking,
    such as Bayesian tracking with Dirac priors (TrackBayesDirac).
    """

    track = pe.Node(interface=camino.TrackPICo(), name="track")
    track.inputs.iterations = 1

    """
    Currently, the best program for visualizing tracts is TrackVis. For this reason, a node is included to
    convert the raw tract data to .trk format. Solely for testing purposes, another node is added to perform the reverse.
    """

    camino2trackvis = pe.Node(interface=cam2trk.Camino2Trackvis(), name="camino2trackvis")
    camino2trackvis.inputs.min_length = 30
    camino2trackvis.inputs.voxel_order = 'LAS'
    trk2camino = pe.Node(interface=cam2trk.Trackvis2Camino(), name="trk2camino")

    """
    Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview,
    using the following two nodes.
    """

    vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines")
    procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines")

    """
    We can easily produce a variety of scalar values from our fitted tensors. The following nodes generate the
    fractional anisotropy and diffusivity trace maps and their associated headers, and then merge them back
    into a single .nii file.
    """

    fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa')
    trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace')
    dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig')

    analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_fa')
    analyzeheader_fa.inputs.datatype = 'double'
    analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_trace')
    analyzeheader_trace.inputs.datatype = 'double'

    fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii')
    trace2nii = fa2nii.clone("trace2nii")

    """
    This section adds the Connectome Mapping Toolkit (CMTK) nodes.
    These interfaces are fairly experimental and may not function properly.
    In order to perform connectivity mapping using CMTK, the parcellated structural data is rewritten
    using the indices and parcellation scheme from the connectome mapper (CMP). This process has been
    written into the ROIGen interface, which will output a remapped aparc+aseg image as well as a
    dictionary of label information (i.e. name, display colours) pertaining to the original and remapped regions.
    These label values are input from a user-input lookup table, if specified, and otherwise the default
    Freesurfer LUT (/freesurfer/FreeSurferColorLUT.txt).
    """

    roigen = pe.Node(interface=cmtk.ROIGen(), name="ROIGen")
    roigen_structspace = roigen.clone("ROIGen_structspace")

    """
    The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts
    and outputs a number of different files. The most important of which is the connectivity network itself, which is stored
    as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various
    NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and
    standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the
    specific tracts that connect between user-selected regions.
    """

    createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes")
    creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix")
    creatematrix.inputs.count_region_intersections = True

    """
    Here we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use
    the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file.
    """

    CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter")

    giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces")
    giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels")
    niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes")
    fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays")
    gpickledNetworks = pe.Node(interface=util.Merge(1), name="NetworkFiles")

    """
    Since we have now created all our nodes, we can define our workflow and start making connections.
    """

    mapping = pe.Workflow(name='mapping')

    """
    First, we connect the input node to the early conversion functions.
    FreeSurfer input nodes:
    """


    mapping.connect([(inputnode_within, FreeSurferSource,[("subjects_dir","subjects_dir")])])
    mapping.connect([(inputnode_within, FreeSurferSource,[("subject_id","subject_id")])])

    mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subjects_dir","subjects_dir")])])
    mapping.connect([(inputnode_within, FreeSurferSourceLH,[("subject_id","subject_id")])])

    mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subjects_dir","subjects_dir")])])
    mapping.connect([(inputnode_within, FreeSurferSourceRH,[("subject_id","subject_id")])])

    """
    Required conversions for processing in Camino:
    """

    mapping.connect([(inputnode_within, image2voxel, [("dwi", "in_file")]),
                           (inputnode_within, fsl2scheme, [("bvecs", "bvec_file"),
                                                    ("bvals", "bval_file")]),
                           (image2voxel, dtifit,[['voxel_order','in_file']]),
                           (fsl2scheme, dtifit,[['scheme','scheme_file']])
                          ])

    """
    Nifti conversions for the subject's stripped brain image from Freesurfer:
    """


    mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])])

    """
    Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres)
    """

    mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])])
    mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])])
    mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])])
    mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])])

    """
    The annotation files are converted using the pial surface as a map via the MRIsConvert interface.
    One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files
    specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource.
    """

    mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])])
    mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])])
    mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])])

    """
    This section coregisters the diffusion-weighted and parcellated white-matter / whole brain images.
    At present the conmap node connection is left commented, as there have been recent changes in Camino
    code that have presented some users with errors.
    """

    mapping.connect([(inputnode_within, b0Strip,[('dwi','in_file')])])
    mapping.connect([(inputnode_within, b0Strip,[('dwi','t2_guided')])]) # Added to improve damaged brain extraction
    mapping.connect([(b0Strip, coregister,[('out_file','in_file')])])
    mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])])
    mapping.connect([(coregister, convertxfm,[('out_matrix_file','in_file')])])
    mapping.connect([(b0Strip, inverse,[('out_file','reference')])])
    mapping.connect([(convertxfm, inverse,[('out_file','in_matrix_file')])])
    mapping.connect([(mri_convert_Brain, inverse,[('out_file','in_file')])])

    """
    The tractography pipeline consists of the following nodes. Further information about the tractography
    can be found in nipype/examples/dmri_camino_dti.py.
    """

    mapping.connect([(b0Strip, track,[("mask_file","seed_file")])])
    mapping.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])])
    mapping.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])])
    mapping.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])])
    mapping.connect([(picopdfs, track,[("pdfs","in_file")])])

    """
    Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the
    tensor fitting. This is also where our voxel- and data-grabbing functions come in. We pass these functions,
    along with the original DWI image from the input node, to the header-generating nodes. This ensures that the
    files will be correct and readable.
    """

    mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])])
    mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])])
    mapping.connect([(inputnode_within, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'),
        (('dwi', get_data_dims), 'data_dims')])])
    mapping.connect([(fa, fa2nii,[('fa','data_file')])])
    mapping.connect([(inputnode_within, fa2nii,[(('dwi', get_affine), 'affine')])])
    mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])])


    mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])])
    mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])])
    mapping.connect([(inputnode_within, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'),
        (('dwi', get_data_dims), 'data_dims')])])
    mapping.connect([(trace, trace2nii,[('trace','data_file')])])
    mapping.connect([(inputnode_within, trace2nii,[(('dwi', get_affine), 'affine')])])
    mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])])

    mapping.connect([(dtifit, dteig,[("tensor_fitted","in_file")])])

    """
    The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing
    functions defined at the beginning of the pipeline.
    """

    mapping.connect([(track, camino2trackvis, [('tracked','in_file')]),
                           (track, vtkstreamlines,[['tracked','in_file']]),
                           (camino2trackvis, trk2camino,[['trackvis','in_file']])
                          ])
    mapping.connect([(inputnode_within, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'),
        (('dwi', get_data_dims), 'data_dims')])])

    """
    Here the CMTK connectivity mapping nodes are connected.
    The original aparc+aseg image is converted to NIFTI, then registered to
    the diffusion image and delivered to the ROIGen node. The remapped parcellation,
    original tracts, and label file are then given to CreateMatrix.
    """

    mapping.connect(inputnode_within, 'resolution_network_file',
                    createnodes, 'resolution_network_file')
    mapping.connect(createnodes, 'node_network',
                    creatematrix, 'resolution_network_file')
    mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, [(('aparc_aseg', select_aparc), 'in_file')])])

    mapping.connect([(b0Strip, inverse_AparcAseg,[('out_file','reference')])])
    mapping.connect([(convertxfm, inverse_AparcAseg,[('out_file','in_matrix_file')])])
    mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg,[('out_file','in_file')])])
    mapping.connect([(mri_convert_AparcAseg, roigen_structspace,[('out_file','aparc_aseg_file')])])
    mapping.connect([(roigen_structspace, createnodes,[("roi_file","roi_file")])])

    mapping.connect([(inverse_AparcAseg, roigen,[("out_file","aparc_aseg_file")])])
    mapping.connect([(roigen, creatematrix,[("roi_file","roi_file")])])
    mapping.connect([(camino2trackvis, creatematrix,[("trackvis","tract_file")])])
    mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_file")])])
    mapping.connect([(inputnode_within, creatematrix,[("subject_id","out_matrix_mat_file")])])

    """
    The merge nodes defined earlier are used here to create lists of the files which are
    destined for the CFFConverter.
    """

    mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])])
    mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])])
    mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])])
    mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])])
    mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])])
    mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])])
    mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])])
    mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])])

    mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])])
    mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])])

    mapping.connect([(roigen, niftiVolumes,[("roi_file","in1")])])
    mapping.connect([(inputnode_within, niftiVolumes,[("dwi","in2")])])
    mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])])

    mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])])
    mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])])
    mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])])
    mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])])

    """
    This block actually connects the merged lists to the CFF converter. We pass the surfaces
    and volumes that are to be included, as well as the tracts and the network itself. The currently
    running pipeline (dmri_connectivity.py) is also scraped and included in the CFF file. This
    makes it easy for the user to examine the entire processing pathway used to generate the end
    product.
    """

    CFFConverter.inputs.script_files = op.abspath(inspect.getfile(inspect.currentframe()))
    mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])])
    mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])])
    mapping.connect([(creatematrix, CFFConverter,[("matrix_files","gpickled_networks")])])

    mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])])
    mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])])
    mapping.connect([(camino2trackvis, CFFConverter,[("trackvis","tract_files")])])
    mapping.connect([(inputnode_within, CFFConverter,[("subject_id","title")])])

    """
    Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes
    declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding
    their names to the subject list and their data to the proper folders.
    """

    inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file"]), name="inputnode")

    outputnode = pe.Node(interface = util.IdentityInterface(fields=["fa",
                                                                "struct",
                                                                "trace",
                                                                "tracts",
                                                                "connectome",
                                                                "cmatrix",
                                                                "networks",
                                                                "rois",
                                                                "mean_fiber_length",
                                                                "fiber_length_std",
                                                                "tensors"]),
                                        name="outputnode")

    connectivity = pe.Workflow(name="connectivity")
    connectivity.base_output_dir=name

    connectivity.connect([(inputnode, mapping, [("dwi", "inputnode_within.dwi"),
                                              ("bvals", "inputnode_within.bvals"),
                                              ("bvecs", "inputnode_within.bvecs"),
                                              ("subject_id", "inputnode_within.subject_id"),
                                              ("subjects_dir", "inputnode_within.subjects_dir"),
                                              ("resolution_network_file", "inputnode_within.resolution_network_file")])
                                              ])

    connectivity.connect([(mapping, outputnode, [("camino2trackvis.trackvis", "tracts"),
        ("CFFConverter.connectome_file", "connectome"),
        ("CreateMatrix.matrix_mat_file", "cmatrix"),
        ("CreateMatrix.mean_fiber_length_matrix_mat_file", "mean_fiber_length"),
        ("CreateMatrix.fiber_length_std_matrix_mat_file", "fiber_length_std"),
        ("fa2nii.nifti_file", "fa"),
        ("CreateMatrix.matrix_files", "networks"),
        ("ROIGen.roi_file", "rois"),
        ("mri_convert_Brain.out_file", "struct"),
        ("trace2nii.nifti_file", "trace"),
        ("dtifit.tensor_fitted", "tensors")])
        ])

    return connectivity