Example #1
0
def output_smooth(workflow,
                  output_name,
                  mask_name,
                  fwhm,
                  strat,
                  num_strat,
                  map_node=False):

    if map_node:
        output_smooth = pe.MapNode(interface=fsl.MultiImageMaths(),
                                   name='{0}_smooth_{1}'.format(
                                       output_name, num_strat),
                                   iterfield=['in_file'])
    else:
        output_smooth = pe.Node(interface=fsl.MultiImageMaths(),
                                name='{0}_smooth_{1}'.format(
                                    output_name, num_strat))

    # TODO review connetion to config, is the node really necessary?
    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input_{0}_{1}'.format(
                                 output_name, num_strat))
    inputnode_fwhm.iterables = ("fwhm", fwhm)

    # get the resource to be smoothed
    node, out_file = strat[output_name]

    workflow.connect(node, out_file, output_smooth, 'in_file')

    # get the parameters for fwhm
    workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), output_smooth,
                     'op_string')

    # get the mask
    if type(mask_name) == str:
        node, out_file = strat[mask_name]
        workflow.connect(node, out_file, output_smooth, 'operand_files')
    else:
        # mask_name is a direct file path and not the name of a
        # resource pool key
        workflow.connect(mask_name, 'local_path', output_smooth,
                         'operand_files')

    strat.append_name(output_smooth.name)
    strat.update_resource_pool(
        {'{0}_smooth'.format(output_name): (output_smooth, 'out_file')})

    return strat
Example #2
0
def remove_bias(name='bias_correct'):
    """
    This workflow estimates a single multiplicative bias field from the
    averaged *b0* image, as suggested in [Jeurissen2014]_.

    .. admonition:: References

      .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained
        spherical deconvolution for improved analysis of multi-shell diffusion
        MRI data <https://doi.org/10.1016/j.neuroimage.2014.07.061>`_.
        NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061


    Example
    -------

    >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias
    >>> bias = remove_bias()
    >>> bias.inputs.inputnode.in_file = 'epi.nii'
    >>> bias.inputs.inputnode.in_bval = 'diffusion.bval'
    >>> bias.inputs.inputnode.in_mask = 'mask.nii'
    >>> bias.run() # doctest: +SKIP

    """
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'in_bval', 'in_mask']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                         name='outputnode')

    avg_b0 = pe.Node(niu.Function(input_names=['in_dwi', 'in_bval'],
                                  output_names=['out_file'],
                                  function=b0_average),
                     name='b0_avg')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                            save_bias=True,
                                            bspline_fitting_distance=600),
                 name='Bias_b0')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    mult = pe.MapNode(fsl.MultiImageMaths(op_string='-div %s'),
                      iterfield=['in_file'],
                      name='RemoveBiasOfDWIs')
    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.utils.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, avg_b0, [('in_file', 'in_dwi'),
                                     ('in_bval', 'in_bval')]),
                (avg_b0, n4, [('out_file', 'input_image')]),
                (inputnode, n4, [('in_mask', 'mask_image')]),
                (inputnode, split, [('in_file', 'in_file')]),
                (n4, mult, [('bias_image', 'operand_files')]),
                (split, mult, [('out_files', 'in_file')]),
                (mult, thres, [('out_file', 'in_file')]),
                (thres, merge, [('out_file', 'in_files')]),
                (merge, outputnode, [('merged_file', 'out_file')])])
    return wf
Example #3
0
def kellyk(c):
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs

    wf = pe.Workflow(name="KellyKapowski")

    infosource = pe.Node(niu.IdentityInterface(fields=["subject_id"]),
                         name="subjects")
    infosource.iterables = ("subject_id", c.subjects)

    seg = fs_segment()
    wf.connect(infosource, "subject_id", seg, "inputspec.subject_id")
    seg.inputs.inputspec.subjects_dir = c.surf_dir

    combine = pe.Node(fsl.MultiImageMaths(op_string='-mul 1.5 -add %s -mul 2'),
                      name="add23")
    wf.connect(seg, 'outputspec.wm', combine, 'in_file')
    wf.connect(seg, ('outputspec.gm', tolist), combine, 'operand_files')

    sink = pe.Node(nio.DataSink(), name="sinker")
    wf.connect(infosource, "subject_id", sink, "container")
    sink.inputs.base_directory = c.sink_dir
    wf.connect(combine, "out_file", sink, "kellykapowski.segment")

    return wf
Example #4
0
def apply_all_corrections(name='UnwarpArtifacts'):
    """
    Combines two lists of linear transforms with the deformation field
    map obtained typically after the SDC process.
    Additionally, computes the corresponding bspline coefficients and
    the map of determinants of the jacobian.
    """

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_sdc',
                        'in_hmc', 'in_ecc', 'in_dwi']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_warp',
                         'out_coeff', 'out_jacobian']), name='outputnode')
    warps = pe.MapNode(fsl.ConvertWarp(relwarp=True),
                       iterfield=['premat', 'postmat'],
                       name='ConvertWarp')

    selref = pe.Node(niu.Select(index=[0]), name='Reference')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    unwarp = pe.MapNode(fsl.ApplyWarp(), iterfield=['in_file', 'field_file'],
                        name='UnwarpDWIs')

    coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'),
                        iterfield=['in_file'], name='CoeffComp')
    jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True),
                          iterfield=['in_file'], name='JacobianComp')
    jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'),
                         iterfield=['in_file', 'operand_files'],
                         name='ModulateDWIs')

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,   warps,      [('in_sdc', 'warp1'),
                                   ('in_hmc', 'premat'),
                                   ('in_ecc', 'postmat'),
                                   ('in_dwi', 'reference')]),
        (inputnode,   split,      [('in_dwi', 'in_file')]),
        (split,       selref,     [('out_files', 'inlist')]),
        (warps,       unwarp,     [('out_file', 'field_file')]),
        (split,       unwarp,     [('out_files', 'in_file')]),
        (selref,      unwarp,     [('out', 'ref_file')]),
        (selref,      coeffs,     [('out', 'reference')]),
        (warps,       coeffs,     [('out_file', 'in_file')]),
        (selref,      jacobian,   [('out', 'reference')]),
        (coeffs,      jacobian,   [('out_file', 'in_file')]),
        (unwarp,      jacmult,    [('out_file', 'in_file')]),
        (jacobian,    jacmult,    [('out_jacobian', 'operand_files')]),
        (jacmult,     thres,      [('out_file', 'in_file')]),
        (thres,       merge,      [('out_file', 'in_files')]),
        (warps,       outputnode, [('out_file', 'out_warp')]),
        (coeffs,      outputnode, [('out_file', 'out_coeff')]),
        (jacobian,    outputnode, [('out_jacobian', 'out_jacobian')]),
        (merge,       outputnode, [('merged_file', 'out_file')])
    ])
    return wf
Example #5
0
def thresh_albert_gm(outputs_dir):
    in_file = os.path.join(outputs_dir,
                           get_file_name(outputs_dir, "*Albert_WTA*"))
    mask_file = os.path.join(
        outputs_dir, "Fast_PVE",
        get_file_name(os.path.join(outputs_dir, "Fast_PVE"), "*pve_2*"))
    maths = fsl.MultiImageMaths()
    maths.inputs.in_file = in_file
    maths.inputs.op_string = "-mas %s "
    maths.inputs.operand_files = [mask_file]
    maths.inputs.out_file = os.path.join(outputs_dir, "Albert_GM.nii.gz")
    maths.cmdline
    maths.run()
Example #6
0
def cleanup_edge_pipeline(name='Cleanup'):
    """
    Perform some de-spiking filtering to clean up the edge of the fieldmap
    (copied from fsl_prepare_fieldmap)
    """
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'in_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                         name='outputnode')

    fugue = pe.Node(fsl.FUGUE(save_fmap=True,
                              despike_2dfilter=True,
                              despike_threshold=2.1),
                    name='Despike')
    erode = pe.Node(fsl.maths.MathsCommand(nan2zeros=True,
                                           args='-kernel 2D -ero'),
                    name='MskErode')
    newmsk = pe.Node(fsl.MultiImageMaths(op_string='-sub %s -thr 0.5 -bin'),
                     name='NewMask')
    applymsk = pe.Node(fsl.ApplyMask(nan2zeros=True), name='ApplyMask')
    join = pe.Node(niu.Merge(2), name='Merge')
    addedge = pe.Node(fsl.MultiImageMaths(op_string='-mas %s -add %s'),
                      name='AddEdge')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, fugue, [('in_file', 'fmap_in_file'),
                                    ('in_mask', 'mask_file')]),
                (inputnode, erode, [('in_mask', 'in_file')]),
                (inputnode, newmsk, [('in_mask', 'in_file')]),
                (erode, newmsk, [('out_file', 'operand_files')]),
                (fugue, applymsk, [('fmap_out_file', 'in_file')]),
                (newmsk, applymsk, [('out_file', 'mask_file')]),
                (erode, join, [('out_file', 'in1')]),
                (applymsk, join, [('out_file', 'in2')]),
                (inputnode, addedge, [('in_file', 'in_file')]),
                (join, addedge, [('out', 'operand_files')]),
                (addedge, outputnode, [('out_file', 'out_file')])])
    return wf
Example #7
0
def perform_b0_registration(b0_files, out_dir, force_run = True):
    """ 
        Apply FLIRT to b0 files, and then average the result
        @params - force_run: overwrites existing registered files
        @return - super b0 path
    """
    registered_dir = os.path.join(out_dir, 'registered_b0')
    logger.info("Creating registered b0 directory {}".format(registered_dir))

    make_dir_safe(registered_dir)

    base_b0 = b0_files[0]

    logger.debug("Using '{}' as baseline b0 file".format(base_b0))

    flirt = fsl.FLIRT()
    flirt.inputs.reference = base_b0
    flirt.inputs.cost = 'leastsq'
    flirt.inputs.dof = 12
    flirt.inputs.verbose = 1
    flirt.inputs.output_type = 'NIFTI'

    flirt_out_files  = []

    for i, b0_file in enumerate(b0_files[1:]):
        flirt.inputs.in_file = b0_file
        flirt.inputs.out_file = os.path.join(registered_dir, "flirt-{num:02d}.nii".format(num = i + 1))
        
        flirt_out_files.append(flirt.inputs.out_file)

        flirt.inputs.out_matrix_file = os.path.join(registered_dir, "flirt-{num:02d}.txt".format(num = i + 1))

        if not os.path.isfile(flirt.inputs.out_matrix_file) or force_run:
            flirt.run()

        

    maths = fsl.MultiImageMaths()
    maths.inputs.in_file = base_b0
    maths.inputs.op_string = "-add %s -add %s -add %s -add %s -div 5"
    maths.inputs.operand_files = flirt_out_files
    maths.inputs.output_type = 'NIFTI'
    b0_super = os.path.join(registered_dir, 'bse.nii')
    maths.inputs.out_file = b0_super

    if not os.path.isfile(maths.inputs.out_file) or force_run:
        loger.info("Performing averaging of b0 files")
        maths.run()

    return b0_super
Example #8
0
def extractWhitematter(input_image, wmoutline_image, output_image):
    from nipype.interfaces import fsl

    maths = fsl.MultiImageMaths()
    maths.inputs.in_file = input_image
    maths.inputs.out_file = output_image
    maths.inputs.op_string = ' -add %s -uthr 41 -thr 41'
    maths.inputs.operand_files = wmoutline_image
    maths.run()

    #maths.inputs.in_file = output_image
    maths.inputs.op_string = '-uthr 2 -thr 2 -add %s'
    maths.inputs.operand_files = output_image
    maths.run()

    maths.inputs.op_string = '-uthr 255 -thr 251 -add %s -add %s -bin'
    maths.inputs.operand_files = [output_image, wmoutline_image]
    maths.run()

    return output_image
Example #9
0
def extractWhitematter(input_image, wmoutline_image, output_image):
    thresHolder = fsl.MultiImageMaths(input_file=input_image,
                                      out_file=output_image)
    thresHolder.inputs.op_string = '-uthr 41 -thr 41'
    thresHolder.run()

    thresHolder.inputs.input_file = output_image
    thresHolder.inputs.op_string = '-uthr 2 -thr 2'
    thresHolder.run()

    thresHolder.inputs.op_string = '-uthr 255 -thr 251'
    thresHolder.run()

    # Combine and binarize
    combinizer = fsl.BinaryMaths(operation='add',
                                 in_file=output_image,
                                 operand_file=wmoutline_image,
                                 out_file=output_image)
    binarizer = fsl.UnaryMaths(operation='bin',
                               in_file=output_image,
                               out_file=output_image)

    return output_image
Example #10
0
def kellyk(c):
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.fsl as fsl

    wf = pe.Workflow(name="KellyKapowski")

    infosource = pe.Node(niu.IdentityInterface(fields=["subject_id"]),
                         name="subjects")
    infosource.iterables = ("subject_id", c.subjects)

    seg = fs_segment()
    wf.connect(infosource, "subject_id", seg, "inputspec.subject_id")
    seg.inputs.inputspec.subjects_dir = c.surf_dir

    combine = pe.Node(fsl.MultiImageMaths(op_string='-mul 1.5 -add %s -mul 2'),
                      name="add23")
    wf.connect(seg, 'outputspec.wm', combine, 'in_file')
    wf.connect(seg, ('outputspec.gm', tolist), combine, 'operand_files')

    #kelly = pe.Node(niu.Function(input_names=['in_file'],output_names=['outfile'],function=run_kelly),name='kellyk')

    sink = pe.Node(nio.DataSink(), name="sinker")

    def get_subs(subject_id):
        subs = []
        subs.append(('_subject_id_%s/' % subject_id, '%s_' % subject_id))
        return subs

    wf.connect(infosource, "subject_id", sink, "container")
    sink.inputs.base_directory = c.sink_dir
    wf.connect(infosource, ("subject_id", get_subs), sink, 'substitutions')
    wf.connect(combine, "out_file", sink, "kellykapowski.segment")
    #wf.connect(kelly,'outfile',sink,'kellykapowski')

    return wf
Example #11
0
def doAverage(infile1, infile2, outfile):  # Doing average
    '''
    Parameters
    ----------
    infile1 : str
        path containing the input image one.
    infile2 : str
        path containing the input image two.
    outfile : str
        path to save the average of two input images.
    
    Returns
    -------
    an image which is average of two input images
    '''
    print('doing average of', infile1, infile2)
    avg = fsl.MultiImageMaths()
    avg.inputs.in_file = infile1
    avg.inputs.operand_files = infile2
    avg.inputs.op_string = "-add %s -div 2"
    avg.inputs.out_file = outfile
    avg.inputs.output_type = 'NIFTI_GZ'
    avg.run()
    print('averaging done', outfile, '\n')
Example #12
0
def epi_pipeline(name="susceptibility_distortion_correction_using_t1"):
    """
    This workflow allows to correct for echo-planareinduced susceptibility artifacts without fieldmap
    (e.g. ADNI Database) by elastically register DWIs to their respective baseline T1-weighted
    structural scans using an inverse consistent registration algorithm with a mutual information cost
    function (SyN algorithm). This workflow allows also a coregistration of DWIs with their respective
    baseline T1-weighted structural scans in order to latter combine tracks and cortex parcelation.
    ..  warning:: This workflow rotates the `b`-vectors'
    .. References
      .. Nir et al. (Neurobiology of Aging 2015)- Connectivity network measures predict volumetric atrophy in mild cognitive impairment

        Leow et al. (IEEE Trans Med Imaging 2007)- Statistical Properties of Jacobian Maps and the Realization of Unbiased Large Deformation Nonlinear Image Registration
    Example
    -------
    >>> epi = epi_pipeline()
    >>> epi.inputs.inputnode.DWI = 'DWI.nii'
    >>> epi.inputs.inputnode.bvec = 'bvec.txt'
    >>> epi.inputs.inputnode.T1 = 'T1.nii'
    >>> epi.run() # doctest: +SKIP
    """

    import nipype.interfaces.c3 as c3
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    from clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils import (
        ants_combin_transform,
        ants_registration_syn_quick,
        ants_warp_image_multi_transform,
        change_itk_transform_type,
        create_jacobian_determinant_image,
        expend_matrix_list,
        rotate_bvecs,
    )

    inputnode = pe.Node(niu.IdentityInterface(fields=["T1", "DWI", "bvec"]),
                        name="inputnode")

    split = pe.Node(fsl.Split(dimension="t"), name="SplitDWIs")
    pick_ref = pe.Node(niu.Select(), name="Pick_b0")
    pick_ref.inputs.index = [0]

    flirt_b0_2_T1 = pe.Node(interface=fsl.FLIRT(dof=6), name="flirt_B0_2_T1")
    flirt_b0_2_T1.inputs.interp = "spline"
    flirt_b0_2_T1.inputs.cost = "normmi"
    flirt_b0_2_T1.inputs.cost_func = "normmi"

    apply_xfm = pe.Node(interface=fsl.preprocess.ApplyXFM(), name="apply_xfm")
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(
        interface=niu.Function(
            input_names=["in_matrix", "in_bvec"],
            output_names=["out_matrix_list"],
            function=expend_matrix_list,
        ),
        name="expend_matrix",
    )

    rot_bvec = pe.Node(
        niu.Function(
            input_names=["in_matrix", "in_bvec"],
            output_names=["out_file"],
            function=rotate_bvecs,
        ),
        name="Rotate_Bvec",
    )

    antsRegistrationSyNQuick = pe.Node(
        interface=niu.Function(
            input_names=["fix_image", "moving_image"],
            output_names=[
                "image_warped",
                "affine_matrix",
                "warp",
                "inverse_warped",
                "inverse_warp",
            ],
            function=ants_registration_syn_quick,
        ),
        name="antsRegistrationSyNQuick",
    )

    c3d_flirt2ants = pe.Node(c3.C3dAffineTool(), name="fsl_reg_2_itk")
    c3d_flirt2ants.inputs.itk_transform = True
    c3d_flirt2ants.inputs.fsl2ras = True

    change_transform = pe.Node(
        niu.Function(
            input_names=["input_affine_file"],
            output_names=["updated_affine_file"],
            function=change_itk_transform_type,
        ),
        name="change_transform_type",
    )

    merge_transform = pe.Node(niu.Merge(3), name="MergeTransforms")

    apply_transform = pe.MapNode(
        interface=niu.Function(
            input_names=["fix_image", "moving_image", "ants_warp_affine"],
            output_names=["out_warp_field", "out_warped"],
            function=ants_combin_transform,
        ),
        iterfield=["moving_image"],
        name="warp_filed",
    )

    jacobian = pe.MapNode(
        interface=niu.Function(
            input_names=["imageDimension", "deformationField", "outputImage"],
            output_names=["outputImage"],
            function=create_jacobian_determinant_image,
        ),
        iterfield=["deformationField"],
        name="jacobian",
    )

    jacobian.inputs.imageDimension = 3
    jacobian.inputs.outputImage = "Jacobian_image.nii.gz"

    jacmult = pe.MapNode(
        fsl.MultiImageMaths(op_string="-mul %s"),
        iterfield=["in_file", "operand_files"],
        name="ModulateDWIs",
    )

    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=["in_file"],
                       name="RemoveNegative")

    merge = pe.Node(fsl.Merge(dimension="t"), name="MergeDWIs")

    outputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "DWI_2_T1_Coregistration_matrix",
            "epi_correction_deformation_field",
            "epi_correction_affine_transform",
            "epi_correction_image_warped",
            "DWIs_epicorrected",
            "warp_epi",
            "out_bvec",
        ]),
        name="outputnode",
    )

    wf = pe.Workflow(name="epi_pipeline")

    wf.connect([(inputnode, split, [("DWI", "in_file")])])
    wf.connect([(split, pick_ref, [("out_files", "inlist")])])
    wf.connect([(pick_ref, flirt_b0_2_T1, [("out", "in_file")])])
    wf.connect([(inputnode, flirt_b0_2_T1, [("T1", "reference")])])
    wf.connect([(inputnode, rot_bvec, [("bvec", "in_bvec")])])
    wf.connect([(flirt_b0_2_T1, expend_matrix, [("out_matrix_file",
                                                 "in_matrix")])])
    wf.connect([(inputnode, expend_matrix, [("bvec", "in_bvec")])])
    wf.connect([(expend_matrix, rot_bvec, [("out_matrix_list", "in_matrix")])])
    wf.connect([(inputnode, antsRegistrationSyNQuick, [("T1", "fix_image")])])
    wf.connect([(flirt_b0_2_T1, antsRegistrationSyNQuick, [("out_file",
                                                            "moving_image")])])

    wf.connect([(inputnode, c3d_flirt2ants, [("T1", "reference_file")])])
    wf.connect([(pick_ref, c3d_flirt2ants, [("out", "source_file")])])
    wf.connect([(flirt_b0_2_T1, c3d_flirt2ants, [("out_matrix_file",
                                                  "transform_file")])])
    wf.connect([(c3d_flirt2ants, change_transform, [("itk_transform",
                                                     "input_affine_file")])])

    wf.connect([(antsRegistrationSyNQuick, merge_transform, [("warp", "in1")])
                ])
    wf.connect([(antsRegistrationSyNQuick, merge_transform, [("affine_matrix",
                                                              "in2")])])
    wf.connect([(change_transform, merge_transform, [("updated_affine_file",
                                                      "in3")])])
    wf.connect([(inputnode, apply_transform, [("T1", "fix_image")])])
    wf.connect([(split, apply_transform, [("out_files", "moving_image")])])

    wf.connect([(merge_transform, apply_transform, [("out", "ants_warp_affine")
                                                    ])])
    wf.connect([(apply_transform, jacobian, [("out_warp_field",
                                              "deformationField")])])
    wf.connect([(apply_transform, jacmult, [("out_warped", "operand_files")])])
    wf.connect([(jacobian, jacmult, [("outputImage", "in_file")])])
    wf.connect([(jacmult, thres, [("out_file", "in_file")])])
    wf.connect([(thres, merge, [("out_file", "in_files")])])

    wf.connect([(merge, outputnode, [("merged_file", "DWIs_epicorrected")])])
    wf.connect([(
        flirt_b0_2_T1,
        outputnode,
        [("out_matrix_file", "DWI_2_T1_Coregistration_matrix")],
    )])
    wf.connect([(
        antsRegistrationSyNQuick,
        outputnode,
        [
            ("warp", "epi_correction_deformation_field"),
            ("affine_matrix", "epi_correction_affine_transform"),
            ("image_warped", "epi_correction_image_warped"),
        ],
    )])
    wf.connect([(merge_transform, outputnode, [("out", "warp_epi")])])
    wf.connect([(rot_bvec, outputnode, [("out_file", "out_bvec")])])

    return wf
Example #13
0
def easy_thresh(wf_name):
    """
    Workflow for carrying out cluster-based thresholding 
    and colour activation overlaying
    
    Parameters
    ----------
    wf_name : string 
        Workflow name
        
    Returns
    -------
    easy_thresh : object 
        Easy thresh workflow object
    
    Notes
    -----
    
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/easy_thresh/easy_thresh.py>`_
        
    Workflow Inputs::
    
        inputspec.z_stats : string (nifti file)
            z_score stats output for t or f contrast from flameo
        
        inputspec.merge_mask : string (nifti file)
            mask generated from 4D Merged derivative file
        
        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to 
            determine what level of activation would be statistically significant. 
            Increasing this will result in higher estimates of required effect.
        
        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.
        
        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information
            
    Workflow Outputs::
    
        outputspec.cluster_threshold : string (nifti files)
           the thresholded Z statistic image for each t contrast
        
        outputspec.cluster_index : string (nifti files)
            image of clusters for each t contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
        
        outputspec.overlay_threshold : string (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image : string (nifti files)
           2D color rendered stats overlay picture for each t contrast
        
        outputspec.cluster_localmax_txt : string (text files)
            local maxima text file, defines the coordinates of maximum value
            in the cluster
    
    
    Order of commands:
    
    - Estimate smoothness of the image::
        
        smoothest --mask= merge_mask.nii.gz --zstat=.../flameo/stats/zstat1.nii.gz
        
        arguments
        --mask  :  brain mask volume
        --zstat :  filename of zstat/zfstat image
    
    - Create mask. For details see `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm#fslutils>`_::
        
        fslmaths ../flameo/stats/zstat1.nii.gz 
                 -mas merge_mask.nii.gz 
                 zstat1_mask.nii.gz
        
        arguments
        -mas   : use (following image>0) to mask current image

    - Copy Geometry image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another::
    
        fslcpgeom MNI152_T1_2mm_brain.nii.gz zstat1_mask.nii.gz
    
    - Cluster based thresholding. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
        
        cluster --dlh = 0.0023683100 
                --in = zstat1_mask.nii.gz 
                --oindex = zstat1_cluster_index.nii.gz 
                --olmax = zstat1_cluster_localmax.txt
                --othresh = zstat1_cluster_threshold.nii.gz 
                --pthresh = 0.0500000000 
                --thresh = 2.3000000000 
                --volume = 197071
                
        arguments 
        --in    :    filename of input volume
        --dlh   :    smoothness estimate = sqrt(det(Lambda))
        --oindex  :  filename for output of cluster index
        --othresh :  filename for output of thresholded image
        --olmax   :  filename for output of local maxima text file
        --volume  :  number of voxels in the mask
        --pthresh :  p-threshold for clusters
        --thresh  :  threshold for input volume
        
     Z statistic image is thresholded to show which voxels or clusters of voxels are activated at a particular significance level.
     A Z statistic threshold is used to define contiguous clusters. Then each cluster's estimated significance level (from GRF-theory) 
     is compared with the cluster probability threshold. Significant clusters are then used to mask the original Z statistic image.
    
    - Get the maximum intensity value of the output thresholded image. This used is while rendering the Z statistic image:: 
        
        fslstats zstat1_cluster_threshold.nii.gz -R
        
        arguments
        -R  : output <min intensity> <max intensity>

    - Rendering. For details see `FEAT <http://www.fmrib.ox.ac.uk/fsl/feat5/detail.html#poststats>`_::
         
        overlay 1 0 MNI152_T1_2mm_brain.nii.gz 
               -a zstat1_cluster_threshold.nii.gz 
               2.30 15.67 
               zstat1_cluster_threshold_overlay.nii.gz
               
        slicer zstat1_cluster_threshold_overlay.nii.gz 
               -L  -A 750 
               zstat1_cluster_threshold_overlay.png
    
      The Z statistic range selected for rendering is automatically calculated by default, 
      to run from red (minimum Z statistic after thresholding) to yellow (maximum Z statistic, here 
      maximum intensity).
      
    High Level Workflow Graph:
    
    .. image:: ../images/easy_thresh.dot.png
       :width: 800
    
    
    Detailed Workflow Graph:
    
    .. image:: ../images/easy_thresh_detailed.dot.png
       :width: 800
               
    Examples
    --------
    
    >>> import easy_thresh
    >>> preproc = easy_thresh.easy_thresh("new_workflow")
    >>> preproc.inputs.inputspec.z_stats= 'flameo/stats/zstat1.nii.gz'
    >>> preproc.inputs.inputspec.merge_mask = 'merge_mask/alff_Z_fn2standard_merged_mask.nii.gz'
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preproc.inputs.inputspec.parameters = ('/usr/local/fsl/', 'MNI152')
    >>> preporc.run()  -- SKIP doctest
    
    """

    easy_thresh = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=[
        'z_stats', 'merge_mask', 'z_threshold', 'p_threshold', 'parameters'
    ]),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'cluster_threshold', 'cluster_index', 'cluster_localmax_txt',
        'overlay_threshold', 'rendered_image'
    ]),
                         name='outputspec')

    ### fsl easythresh
    # estimate image smoothness
    smooth_estimate = pe.MapNode(interface=fsl.SmoothEstimate(),
                                 name='smooth_estimate',
                                 iterfield=['zstat_file'])

    # run clustering after fixing stats header for talspace
    zstat_mask = pe.MapNode(interface=fsl.MultiImageMaths(),
                            name='zstat_mask',
                            iterfield=['in_file'])
    #operations to perform
    #-mas use (following image>0) to mask current image
    zstat_mask.inputs.op_string = '-mas %s'

    #fslcpgeom
    #copy certain parts of the header information (image dimensions,
    #voxel dimensions, voxel dimensions units string, image orientation/origin
    #or qform/sform info) from one image to another
    copy_geometry = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=copy_geom),
                               name='copy_geometry',
                               iterfield=['infile_a', 'infile_b'])

    ##cluster-based thresholding
    #After carrying out the initial statistical test, the resulting
    #Z statistic image is then normally thresholded to show which voxels or
    #clusters of voxels are activated at a particular significance level.
    #A Z statistic threshold is used to define contiguous clusters.
    #Then each cluster's estimated significance level (from GRF-theory) is
    #compared with the cluster probability threshold. Significant clusters
    #are then used to mask the original Z statistic image for later production
    #of colour blobs.This method of thresholding is an alternative to
    #Voxel-based correction, and is normally more sensitive to activation.
    #    cluster = pe.MapNode(interface=fsl.Cluster(),
    #                            name='cluster',
    #                            iterfield=['in_file', 'volume', 'dlh'])
    #    #output of cluster index (in size order)
    #    cluster.inputs.out_index_file = True
    #    #thresholded image
    #    cluster.inputs.out_threshold_file = True
    #    #local maxima text file
    #    #defines the cluster cordinates
    #    cluster.inputs.out_localmax_txt_file = True

    cluster = pe.MapNode(util.Function(
        input_names=[
            'in_file', 'volume', 'dlh', 'threshold', 'pthreshold', 'parameters'
        ],
        output_names=['index_file', 'threshold_file', 'localmax_txt_file'],
        function=call_cluster),
                         name='cluster',
                         iterfield=['in_file', 'volume', 'dlh'])

    #max and minimum intensity values
    image_stats = pe.MapNode(interface=fsl.ImageStats(),
                             name='image_stats',
                             iterfield=['in_file'])
    image_stats.inputs.op_string = '-R'

    #create tuple of z_threshold and max intensity value of threshold file
    create_tuple = pe.MapNode(util.Function(
        input_names=['infile_a', 'infile_b'],
        output_names=['out_file'],
        function=get_tuple),
                              name='create_tuple',
                              iterfield=['infile_b'])

    #colour activation overlaying
    overlay = pe.MapNode(interface=fsl.Overlay(),
                         name='overlay',
                         iterfield=['stat_image', 'stat_thresh'])
    overlay.inputs.transparency = True
    overlay.inputs.auto_thresh_bg = True
    overlay.inputs.out_type = 'float'

    #colour rendering
    slicer = pe.MapNode(interface=fsl.Slicer(),
                        name='slicer',
                        iterfield=['in_file'])
    #set max picture width
    slicer.inputs.image_width = 750
    # set output all axial slices into one picture
    slicer.inputs.all_axial = True

    #function mapnode to get the standard fsl brain image
    #based on parameters as FSLDIR,MNI and voxel size
    get_backgroundimage = pe.MapNode(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                     name='get_bckgrndimg1',
                                     iterfield=['in_file'])

    #function node to get the standard fsl brain image
    #outputs single file
    get_backgroundimage2 = pe.Node(util.Function(
        input_names=['in_file', 'file_parameters'],
        output_names=['out_file'],
        function=get_standard_background_img),
                                   name='get_backgrndimg2')

    #connections
    easy_thresh.connect(inputnode, 'z_stats', smooth_estimate, 'zstat_file')
    easy_thresh.connect(inputnode, 'merge_mask', smooth_estimate, 'mask_file')

    easy_thresh.connect(inputnode, 'z_stats', zstat_mask, 'in_file')
    easy_thresh.connect(inputnode, 'merge_mask', zstat_mask, 'operand_files')

    easy_thresh.connect(zstat_mask, 'out_file', get_backgroundimage, 'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage, 'out_file', copy_geometry,
                        'infile_a')
    easy_thresh.connect(zstat_mask, 'out_file', copy_geometry, 'infile_b')

    easy_thresh.connect(copy_geometry, 'out_file', cluster, 'in_file')
    easy_thresh.connect(inputnode, 'z_threshold', cluster, 'threshold')
    easy_thresh.connect(inputnode, 'p_threshold', cluster, 'pthreshold')
    easy_thresh.connect(smooth_estimate, 'volume', cluster, 'volume')
    easy_thresh.connect(smooth_estimate, 'dlh', cluster, 'dlh')
    easy_thresh.connect(inputnode, 'parameters', cluster, 'parameters')

    easy_thresh.connect(cluster, 'threshold_file', image_stats, 'in_file')

    easy_thresh.connect(image_stats, 'out_stat', create_tuple, 'infile_b')
    easy_thresh.connect(inputnode, 'z_threshold', create_tuple, 'infile_a')

    easy_thresh.connect(cluster, 'threshold_file', overlay, 'stat_image')
    easy_thresh.connect(create_tuple, 'out_file', overlay, 'stat_thresh')

    easy_thresh.connect(inputnode, 'merge_mask', get_backgroundimage2,
                        'in_file')
    easy_thresh.connect(inputnode, 'parameters', get_backgroundimage2,
                        'file_parameters')

    easy_thresh.connect(get_backgroundimage2, 'out_file', overlay,
                        'background_image')

    easy_thresh.connect(overlay, 'out_file', slicer, 'in_file')

    easy_thresh.connect(cluster, 'threshold_file', outputnode,
                        'cluster_threshold')
    easy_thresh.connect(cluster, 'index_file', outputnode, 'cluster_index')
    easy_thresh.connect(cluster, 'localmax_txt_file', outputnode,
                        'cluster_localmax_txt')
    easy_thresh.connect(overlay, 'out_file', outputnode, 'overlay_threshold')
    easy_thresh.connect(slicer, 'out_file', outputnode, 'rendered_image')

    return easy_thresh
Example #14
0
def get_zscore(wf_name='z_score'):
    """
    Workflow to calculate z-scores
    
    Parameters
    ----------
    wf_name : string
        name of the workflow
        
    Returns
    -------
    wf : workflow object
    
    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/network_centrality/z_score.py>`_
    
    
    Workflow Inputs::
        
        inputspec.input_file : string
            path to input functional derivative file for which z score has to be calculated
        inputspec.mask_file : string
            path to whole brain functional mask file required to calculate zscore
    
    Workflow Outputs::
        
        outputspec.z_score_img : string
             path to image containing Normalized Input Image Z scores across full brain.
    
    High Level Workflow Graph:
    
    .. image:: ../images/zscore.dot.png
       :width: 500
    
    
    Detailed Workflow Graph:
    
    .. image:: ../images/zscore_detailed.dot.png
       :width: 500
    
    Example
    -------
    >>> import get_zscore as z
    >>> wf = z.get_zscore()
    >>> wf.inputs.inputspec.input_file = '/home/data/graph_working_dir/calculate_centrality/degree_centrality_binarize.nii.gz'
    >>> wf.inputs.inputspec.mask_file = '/home/data/graphs/GraphGeneration/new_mask_3m.nii.gz'
    >>> wf.run()
    
    """

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl

    wflow = pe.Workflow(name=wf_name)

    inputNode = pe.Node(
        util.IdentityInterface(fields=['input_file', 'mask_file']),
        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=['z_score_img']),
                         name='outputspec')

    mean = pe.Node(interface=fsl.ImageStats(), name='mean')
    mean.inputs.op_string = '-k %s -m'
    wflow.connect(inputNode, 'input_file', mean, 'in_file')
    wflow.connect(inputNode, 'mask_file', mean, 'mask_file')

    standard_deviation = pe.Node(interface=fsl.ImageStats(),
                                 name='standard_deviation')
    standard_deviation.inputs.op_string = '-k %s -s'
    wflow.connect(inputNode, 'input_file', standard_deviation, 'in_file')
    wflow.connect(inputNode, 'mask_file', standard_deviation, 'mask_file')

    op_string = pe.Node(util.Function(input_names=['mean', 'std_dev'],
                                      output_names=['op_string'],
                                      function=get_operand_string),
                        name='op_string')
    wflow.connect(mean, 'out_stat', op_string, 'mean')
    wflow.connect(standard_deviation, 'out_stat', op_string, 'std_dev')

    z_score = pe.Node(interface=fsl.MultiImageMaths(), name='z_score')
    wflow.connect(op_string, 'op_string', z_score, 'op_string')
    wflow.connect(inputNode, 'input_file', z_score, 'in_file')
    wflow.connect(inputNode, 'mask_file', z_score, 'operand_files')

    wflow.connect(z_score, 'out_file', outputNode, 'z_score_img')

    return wflow
Example #15
0
def remove_bias(name="bias_correct"):
    """
    This workflow estimates a single multiplicative bias field from the
    averaged *b0* image, as suggested in [Jeurissen2014]_.
    .. admonition:: References
      .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained
        spherical deconvolution for improved analysis of multi-shell diffusion
        MRI data <http://dx.doi.org/10.1016/j.neuroimage.2014.07.061>`_.squeue

        NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061
    Example
    -------
    >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias
    >>> bias = remove_bias()
    >>> bias.inputs.inputnode.in_file = 'epi.nii'
    >>> bias.inputs.inputnode.in_bval = 'diffusion.bval'
    >>> bias.inputs.inputnode.in_mask = 'mask.nii'
    >>> bias.run() # doctest: +SKIP
    """
    import nipype.interfaces.ants as ants
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]),
                        name="inputnode")

    outputnode = pe.Node(niu.IdentityInterface(fields=["out_file", "b0_mask"]),
                         name="outputnode")

    get_b0 = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="get_b0")

    mask_b0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True),
                      name="mask_b0")

    n4 = pe.Node(
        ants.N4BiasFieldCorrection(dimension=3,
                                   save_bias=True,
                                   bspline_fitting_distance=600),
        name="Bias_b0",
    )
    split = pe.Node(fsl.Split(dimension="t"), name="SplitDWIs")
    mult = pe.MapNode(
        fsl.MultiImageMaths(op_string="-div %s"),
        iterfield=["in_file"],
        name="RemoveBiasOfDWIs",
    )
    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=["in_file"],
                       name="RemoveNegative")
    merge = pe.Node(fsl.utils.Merge(dimension="t"), name="MergeDWIs")

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, get_b0, [("in_file", "in_file")]),
        (get_b0, n4, [("roi_file", "input_image")]),
        (get_b0, mask_b0, [("roi_file", "in_file")]),
        (mask_b0, n4, [("mask_file", "mask_image")]),
        (inputnode, split, [("in_file", "in_file")]),
        (n4, mult, [("bias_image", "operand_files")]),
        (split, mult, [("out_files", "in_file")]),
        (mult, thres, [("out_file", "in_file")]),
        (thres, merge, [("out_file", "in_files")]),
        (merge, outputnode, [("merged_file", "out_file")]),
        (mask_b0, outputnode, [("mask_file", "b0_mask")]),
    ])
    return wf
def create_reg_and_label_wf(name="reg_wf", manual_seg_rois=False):
    inputfields = [
        "subject_id", "aparc_aseg", "fa", "wm_mask", "termination_mask"
    ]

    if manual_seg_rois:
        inputfields.append("manual_seg_rois")

    inputnode = pe.Node(interface=util.IdentityInterface(fields=inputfields),
                        name="inputnode")

    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "dwi_to_t1_matrix", "t1_to_dwi_matrix", "rois_to_dwi", "rois",
        "wmmask_to_dwi", "termmask_to_dwi", "highres_t1_to_dwi_matrix"
    ]),
                         name="outputnode")

    dmn_labels_if = util.Function(input_names=["in_file", "out_filename"],
                                  output_names=["out_file"],
                                  function=dmn_labels_combined)
    dmn_labelling = pe.Node(interface=dmn_labels_if, name='dmn_labelling')

    align_wmmask_to_dwi = coreg_without_resample("align_wmmask_to_fa")
    align_wmmask_to_dwi.inputs.inputnode.interp = "nearestneighbour"

    rois_to_dwi = pe.Node(interface=fsl.ApplyXfm(), name='rois_to_dwi')
    rois_to_dwi.inputs.interp = "nearestneighbour"

    threshold_fa = pe.Node(interface=fsl.ImageMaths(), name='threshold_fa')
    threshold_fa.inputs.op_string = "-thr 0.2 -bin"

    multiply_rois_by_termmask = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='multiply_rois_by_termmask')
    multiply_rois_by_termmask.inputs.op_string = "-mul %s"

    termmask_to_dwi = rois_to_dwi.clone("termmask_to_dwi")

    invertxfm = pe.Node(interface=fsl.ConvertXFM(), name='invertxfm')
    invertxfm.inputs.invert_xfm = True
    '''
    Define renaming nodes
    '''
    rename_t1_to_dwi_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_t1_to_dwi_matrix"),
        name='rename_t1_to_dwi_mat')
    rename_t1_to_dwi_mat.inputs.keep_ext = True

    rename_dwi_to_t1_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_dwi_to_t1_matrix"),
        name='rename_dwi_to_t1_mat')
    rename_dwi_to_t1_mat.inputs.keep_ext = True

    rename_rois_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois_dwi"),
        name='rename_rois_dwi')
    rename_rois_dwi.inputs.keep_ext = True

    rename_rois = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois"),
        name='rename_rois')
    rename_rois.inputs.keep_ext = True

    rename_termmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_term_mask_dwi"),
        name='rename_termmask_dwi')
    rename_termmask_dwi.inputs.keep_ext = True

    rename_wmmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_wm_mask_dwi"),
        name='rename_wmmask_dwi')
    rename_wmmask_dwi.inputs.keep_ext = True

    rename_highres_matrix_file = pe.Node(interface=util.Rename(
        format_string="%(subject_id)s_t1_to_dwi_NoResample"),
                                         name='rename_highres_matrix_file')
    rename_highres_matrix_file.inputs.keep_ext = True

    workflow = pe.Workflow(name=name)

    workflow.connect([(inputnode, align_wmmask_to_dwi,
                       [("wm_mask", "inputnode.moving_image")])])
    workflow.connect([(inputnode, threshold_fa, [("fa", "in_file")])])
    workflow.connect([(threshold_fa, align_wmmask_to_dwi,
                       [("out_file", "inputnode.fixed_image")])])

    if manual_seg_rois:
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "in_file")])])
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "reference")])])
        workflow.connect([(inputnode, outputnode, [("manual_seg_rois", "rois")
                                                   ])])

    else:
        workflow.connect([(inputnode, dmn_labelling, [
            (('subject_id', add_subj_name_to_rois), 'out_filename')
        ])])
        workflow.connect([(inputnode, dmn_labelling, [("aparc_aseg", "in_file")
                                                      ])])

        workflow.connect([(dmn_labelling, multiply_rois_by_termmask,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, multiply_rois_by_termmask,
                           [("termination_mask", "operand_files")])])
        workflow.connect([(multiply_rois_by_termmask, rename_rois,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, rename_rois, [("subject_id",
                                                     "subject_id")])])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "in_file")])
                          ])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "reference")
                                                      ])])
        workflow.connect([(rename_rois, outputnode, [("out_file", "rois")])])

    workflow.connect([(align_wmmask_to_dwi, rois_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "in_file")])])
    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "reference")])])
    workflow.connect([(align_wmmask_to_dwi, termmask_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(align_wmmask_to_dwi, invertxfm,
                       [("outputnode.lowres_matrix_file", "in_file")])])

    workflow.connect([(inputnode, rename_t1_to_dwi_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_t1_to_dwi_mat,
                       [("outputnode.lowres_matrix_file", "in_file")])])
    workflow.connect([(rename_t1_to_dwi_mat, outputnode,
                       [("out_file", "t1_to_dwi_matrix")])])

    workflow.connect([(inputnode, rename_dwi_to_t1_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(invertxfm, rename_dwi_to_t1_mat, [("out_file",
                                                          "in_file")])])
    workflow.connect([(rename_dwi_to_t1_mat, outputnode,
                       [("out_file", "dwi_to_t1_matrix")])])

    workflow.connect([(inputnode, rename_rois_dwi, [("subject_id",
                                                     "subject_id")])])
    workflow.connect([(rois_to_dwi, rename_rois_dwi, [("out_file", "in_file")])
                      ])
    workflow.connect([(rename_rois_dwi, outputnode, [("out_file",
                                                      "rois_to_dwi")])])

    workflow.connect([(inputnode, rename_termmask_dwi, [("subject_id",
                                                         "subject_id")])])
    workflow.connect([(termmask_to_dwi, rename_termmask_dwi, [("out_file",
                                                               "in_file")])])
    workflow.connect([(rename_termmask_dwi, outputnode,
                       [("out_file", "termmask_to_dwi")])])

    workflow.connect([(inputnode, rename_wmmask_dwi, [("subject_id",
                                                       "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_wmmask_dwi,
                       [("outputnode.out_file", "in_file")])])
    workflow.connect([(rename_wmmask_dwi, outputnode, [("out_file",
                                                        "wmmask_to_dwi")])])

    workflow.connect([(inputnode, rename_highres_matrix_file,
                       [("subject_id", "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_highres_matrix_file,
                       [("outputnode.highres_matrix_file", "in_file")])])
    workflow.connect([(rename_highres_matrix_file, outputnode,
                       [("out_file", "highres_t1_to_dwi_matrix")])])
    return workflow
def create_dmn_pipeline_step1(name="dmn_step1",
                              scale_by_glycemia=True,
                              manual_seg_rois=False):
    inputfields = [
        "subjects_dir", "subject_id", "dwi", "bvecs", "bvals", "fdgpet",
        "dose", "weight", "delay", "glycemie", "scan_time"
    ]

    if manual_seg_rois:
        inputfields.append("manual_seg_rois")

    inputnode = pe.Node(interface=util.IdentityInterface(fields=inputfields),
                        name="inputnode")

    outputnode = pe.Node(
        interface=util.IdentityInterface(
            fields=[  # Outputs from the DWI workflow
                "single_fiber_mask",
                "fa",
                "rgb_fa",
                "md",
                "mode",
                "t1",
                "t1_brain",
                "wm_mask",
                "term_mask",
                "aparc_aseg",
                "tissue_class_files",
                "gm_prob",
                "wm_prob",
                "csf_prob",

                # Outputs from registration and labelling
                "rois",
                "rois_to_dwi",
                "wmmask_to_dwi",
                "termmask_to_dwi",
                "dwi_to_t1_matrix",
                "highres_t1_to_dwi_matrix",

                # Outputs from the PET workflow after SUV calculation
                "SUV_corrected_pet_to_t1",
                "AIF_corrected_pet_to_t1",
                "pet_results_npz",
                "pet_results_mat",
                "orig_pet_to_t1",

                # T1 in DWI space for reference
                "t1_to_dwi",
                "single_fiber_mask_cortex_only",
            ]),
        name="outputnode")

    t1_to_dwi = pe.Node(interface=fsl.ApplyXfm(), name='t1_to_dwi')

    termmask_to_dwi = t1_to_dwi.clone("termmask_to_dwi")

    compute_cmr_glc_interface = util.Function(
        input_names=[
            "subject_id", "in_file", "dose", "weight", "delay", "glycemie",
            "scan_time"
        ],
        output_names=["out_file", "cax2", "mecalc", "denom"],
        function=CMR_glucose)
    compute_AIF_PET = pe.Node(interface=compute_cmr_glc_interface,
                              name='compute_AIF_PET')

    compute_SUV_interface = util.Function(input_names=[
        "subject_id", "in_file", "dose", "weight", "delay", "scan_time",
        "isotope", 'height', "glycemie"
    ],
                                          output_names=["out_file"],
                                          function=calculate_SUV)
    compute_SUV_norm_glycemia = pe.Node(interface=compute_SUV_interface,
                                        name='compute_SUV_norm_glycemia')

    scale_PVC_matrix_interface = util.Function(
        input_names=[
            "subject_id", "in_file", "dose", "weight", "delay", "scan_time",
            "isotope", 'height', "glycemie", "scale_SUV_by_glycemia"
        ],
        output_names=["out_npz", "out_matlab_mat"],
        function=scale_PVC_matrix_fn)
    scale_PVC_matrix = pe.Node(interface=scale_PVC_matrix_interface,
                               name='scale_PVC_matrix')
    scale_PVC_matrix.inputs.scale_SUV_by_glycemia = scale_by_glycemia

    single_fiber_mask_cortex_only = pe.Node(
        interface=fsl.MultiImageMaths(), name='single_fiber_mask_cortex_only')
    single_fiber_mask_cortex_only.inputs.op_string = "-mul %s"

    dtiproc = damaged_brain_dti_processing("dtiproc")
    reg_label = create_reg_and_label_wf("reg_label", manual_seg_rois)
    petquant = create_pet_quantification_wf("petquant", segment_t1=False)

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    workflow.connect([(inputnode, dtiproc,
                       [("subjects_dir", "inputnode.subjects_dir"),
                        ("subject_id", "inputnode.subject_id"),
                        ("dwi", "inputnode.dwi"), ("bvecs", "inputnode.bvecs"),
                        ("bvals", "inputnode.bvals")])])

    workflow.connect([(inputnode, reg_label, [("subject_id",
                                               "inputnode.subject_id")])])

    if manual_seg_rois:
        workflow.connect([(inputnode, reg_label, [
            ("manual_seg_rois", "inputnode.manual_seg_rois")
        ])])

    workflow.connect([(dtiproc, reg_label, [
        ("outputnode.wm_mask", "inputnode.wm_mask"),
        ("outputnode.term_mask", "inputnode.termination_mask"),
        ("outputnode.fa", "inputnode.fa"),
        ("outputnode.aparc_aseg", "inputnode.aparc_aseg"),
    ])])

    workflow.connect([(reg_label, t1_to_dwi, [("outputnode.t1_to_dwi_matrix",
                                               "in_matrix_file")])])
    workflow.connect([(dtiproc, t1_to_dwi, [("outputnode.t1", "in_file")])])
    workflow.connect([(dtiproc, t1_to_dwi, [("outputnode.fa", "reference")])])
    workflow.connect([(inputnode, t1_to_dwi, [
        (('subject_id', add_subj_name_to_T1_dwi), 'out_file')
    ])])

    workflow.connect([(reg_label, termmask_to_dwi,
                       [("outputnode.t1_to_dwi_matrix", "in_matrix_file")])])
    workflow.connect([(dtiproc, termmask_to_dwi, [("outputnode.term_mask",
                                                   "in_file")])])
    workflow.connect([(dtiproc, termmask_to_dwi, [("outputnode.fa",
                                                   "reference")])])

    workflow.connect([(inputnode, single_fiber_mask_cortex_only, [
        (('subject_id', add_subj_name_to_cortex_sfmask), 'out_file')
    ])])
    workflow.connect([(termmask_to_dwi, single_fiber_mask_cortex_only,
                       [("out_file", "operand_files")])])
    workflow.connect([(dtiproc, single_fiber_mask_cortex_only,
                       [("outputnode.single_fiber_mask", "in_file")])])

    workflow.connect([(inputnode, compute_SUV_norm_glycemia, [
        ("subject_id", "subject_id"),
        ("dose", "dose"),
        ("weight", "weight"),
        ("delay", "delay"),
        ("scan_time", "scan_time"),
    ])])

    if scale_by_glycemia == True:
        workflow.connect([(inputnode, compute_SUV_norm_glycemia,
                           [("glycemie", "glycemie")])])

    # This is for the arterial input function approximation for the FDG uptake
    workflow.connect([(inputnode, compute_AIF_PET, [
        ("subject_id", "subject_id"),
        ("dose", "dose"),
        ("weight", "weight"),
        ("delay", "delay"),
        ("glycemie", "glycemie"),
        ("scan_time", "scan_time"),
    ])])

    workflow.connect([(inputnode, scale_PVC_matrix, [
        ("subject_id", "subject_id"),
        ("dose", "dose"),
        ("weight", "weight"),
        ("delay", "delay"),
        ("glycemie", "glycemie"),
        ("scan_time", "scan_time"),
    ])])

    workflow.connect([(dtiproc, petquant, [
        (('outputnode.tissue_class_files', select_GM), 'inputnode.gm_mask')
    ])])

    workflow.connect([(dtiproc, petquant, [
        ("outputnode.t1", "inputnode.t1"),
        ("outputnode.wm_prob", "inputnode.wm_prob"),
        ("outputnode.gm_prob", "inputnode.gm_prob"),
        ("outputnode.csf_prob", "inputnode.csf_prob"),
    ])])

    workflow.connect([(inputnode, petquant, [("fdgpet", "inputnode.pet")])])
    workflow.connect([(inputnode, petquant, [("subject_id",
                                              "inputnode.subject_id")])])

    if manual_seg_rois:
        workflow.connect([(inputnode, petquant, [("manual_seg_rois",
                                                  "inputnode.rois")])])
    else:
        workflow.connect([(reg_label, petquant, [("outputnode.rois",
                                                  "inputnode.rois")])])

    workflow.connect([(petquant, compute_AIF_PET,
                       [("outputnode.corrected_pet_to_t1", "in_file")])])
    workflow.connect([(petquant, compute_SUV_norm_glycemia,
                       [("outputnode.corrected_pet_to_t1", "in_file")])])
    workflow.connect([(petquant, scale_PVC_matrix,
                       [("outputnode.pet_results_npz", "in_file")])])
    '''
    Connect outputnode
    '''

    workflow.connect([(t1_to_dwi, outputnode, [("out_file", "t1_to_dwi")])])

    workflow.connect([(dtiproc, outputnode, [
        ("outputnode.t1", "t1"),
        ("outputnode.wm_prob", "wm_prob"),
        ("outputnode.gm_prob", "gm_prob"),
        ("outputnode.csf_prob", "csf_prob"),
        ("outputnode.single_fiber_mask", "single_fiber_mask"),
        ("outputnode.fa", "fa"),
        ("outputnode.rgb_fa", "rgb_fa"),
        ("outputnode.md", "md"),
        ("outputnode.mode", "mode"),
        ("outputnode.t1_brain", "t1_brain"),
        ("outputnode.wm_mask", "wm_mask"),
        ("outputnode.term_mask", "term_mask"),
        ("outputnode.aparc_aseg", "aparc_aseg"),
        ("outputnode.tissue_class_files", "tissue_class_files"),
    ])])

    workflow.connect([(reg_label, outputnode, [
        ("outputnode.rois_to_dwi", "rois_to_dwi"),
        ("outputnode.wmmask_to_dwi", "wmmask_to_dwi"),
        ("outputnode.termmask_to_dwi", "termmask_to_dwi"),
        ("outputnode.dwi_to_t1_matrix", "dwi_to_t1_matrix"),
        ("outputnode.highres_t1_to_dwi_matrix", "highres_t1_to_dwi_matrix"),
    ])])
    if manual_seg_rois:
        workflow.connect([(inputnode, outputnode, [("manual_seg_rois", "rois")
                                                   ])])
    else:
        workflow.connect([(reg_label, outputnode, [("outputnode.rois", "rois")
                                                   ])])

    workflow.connect([(compute_AIF_PET, outputnode,
                       [("out_file", "SUV_corrected_pet_to_t1")])])
    workflow.connect([(compute_SUV_norm_glycemia, outputnode,
                       [("out_file", "AIF_corrected_pet_to_t1")])])
    workflow.connect([(petquant, outputnode, [("outputnode.orig_pet_to_t1",
                                               "orig_pet_to_t1")])])
    workflow.connect([(scale_PVC_matrix, outputnode, [("out_npz",
                                                       "pet_results_npz")])])
    workflow.connect([(scale_PVC_matrix, outputnode, [("out_matlab_mat",
                                                       "pet_results_mat")])])
    workflow.connect([(single_fiber_mask_cortex_only, outputnode,
                       [("out_file", "single_fiber_mask_cortex_only")])])
    return workflow
Example #18
0
#node_fsl_sqr2.inputs.in_file = root_dir + nipype_dir + nlin_displacement_field_4d_only_split2
node_fsl_sqr2.inputs.out_file = root_dir + nipype_dir + nlin_displacement_field_4d_only_split2

# Node 16
node_fsl_sqr3 = pe.Node(interface=fsl.UnaryMaths(), name='node_fsl_sqr3')
node_fsl_sqr3.inputs.operation = 'sqr'
#node_fsl_sqr3.inputs.in_file = root_dir + nipype_dir + nlin_displacement_field_4d_only_split3
node_fsl_sqr3.inputs.out_file = root_dir + nipype_dir + nlin_displacement_field_4d_only_split3

# Node 17
node_merge = pe.Node(interface=util.Merge(2), name='node_merge')
node_merge.inputs.in1 = root_dir + nipype_dir + nlin_displacement_field_4d_only_split2
node_merge.inputs.in2 = root_dir + nipype_dir + nlin_displacement_field_4d_only_split3

# Node 18
node_fsl_add_all = pe.Node(interface=fsl.MultiImageMaths(),
                           name='node_fsl_add_all')
#node_fsl_add_all.inputs.in_file = root_dir + nipype_dir + nlin_displacement_field_4d_only_split1
node_fsl_add_all.inputs.op_string = '-add %s -add %s'
#node_fsl_add_all.inputs.operand_files = [root_dir + nipype_dir + nlin_displacement_field_4d_only_split2, root_dir + nipype_dir + nlin_displacement_field_4d_only_split3]
node_fsl_add_all.inputs.out_file = root_dir + nipype_dir + nlin_displacement_sqr

# Node 19
node_fsl_sqrt = pe.Node(interface=fsl.UnaryMaths(), name='node_fsl_sqrt')
#node_fsl_sqrt.inputs.in_file = root_dir + nipype_dir + nlin_displacement_sqr
node_fsl_sqrt.inputs.operation = 'sqrt'
node_fsl_sqrt.inputs.out_file = root_dir + nipype_dir + nlin_displacement

# Node 20
node_3T_MNI_reg_aladin1 = pe.Node(interface=w_reg_aladin.reg_aladin(),
                                  name='node_3T_MNI_reg_aladin1')
Example #19
0
def create_tbss_1_preproc(name='tbss_1_preproc'):
    """Preprocess FA data for TBSS: erodes a little and zero end slicers and
    creates masks(for use in FLIRT & FNIRT from FSL).
    A pipeline that does the same as tbss_1_preproc script in FSL

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss1 = tbss.create_tbss_1_preproc()
    >>> tbss1.inputs.inputnode.fa_list = ['s1_FA.nii', 's2_FA.nii', 's3_FA.nii']

    Inputs::

        inputnode.fa_list

    Outputs::

        outputnode.fa_list
        outputnode.mask_list
        outputnode.slices

    """

    # Define the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list"]),
                        name="inputnode")

    # Prep the FA images
    prepfa = pe.MapNode(fsl.ImageMaths(suffix="_prep"),
                        name="prepfa",
                        iterfield=['in_file', 'op_string'])

    # Slicer
    slicer = pe.MapNode(fsl.Slicer(all_axial=True, image_width=1280),
                        name='slicer',
                        iterfield=['in_file'])

    # Create a mask
    getmask1 = pe.MapNode(fsl.ImageMaths(op_string="-bin", suffix="_mask"),
                          name="getmask1",
                          iterfield=['in_file'])
    getmask2 = pe.MapNode(
        fsl.MultiImageMaths(op_string="-dilD -dilD -sub 1 -abs -add %s"),
        name="getmask2",
        iterfield=['in_file', 'operand_files'])

    #    $FSLDIR/bin/fslmaths FA/${f}_FA_mask -dilD -dilD -sub 1 -abs -add FA/${f}_FA_mask FA/${f}_FA_mask -odt char
    # Define the tbss1 workflow
    tbss1 = pe.Workflow(name=name)
    tbss1.connect([
        (inputnode, prepfa, [("fa_list", "in_file")]),
        (inputnode, prepfa, [(("fa_list", tbss1_op_string), "op_string")]),
        (prepfa, getmask1, [("out_file", "in_file")]),
        (getmask1, getmask2, [("out_file", "in_file"),
                              ("out_file", "operand_files")]),
        (prepfa, slicer, [('out_file', 'in_file')]),
    ])

    # Define the outputnode
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=["fa_list", "mask_list", "slices"]),
                         name="outputnode")
    tbss1.connect([(prepfa, outputnode, [("out_file", "fa_list")]),
                   (getmask2, outputnode, [("out_file", "mask_list")]),
                   (slicer, outputnode, [('out_file', 'slices')])])
    return tbss1
Example #20
0
def spatial_smoothing(wf_name,
                      fwhm,
                      input_image_type='func_derivative',
                      opt=None):

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['in_file', 'mask']),
                        name='inputspec')

    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input')
    inputnode_fwhm.iterables = ("fwhm", fwhm)

    image_types = [
        'func_derivative', 'func_derivative_multi', 'func_4d', 'func_mask'
    ]

    if input_image_type not in image_types:
        raise ValueError('Input image type {0} should be one of '
                         '{1}'.format(input_image_type,
                                      ', '.join(image_types)))

    if opt == 'FSL':
        output_smooth_mem_gb = 4.0
        if input_image_type == 'func_derivative_multi':
            output_smooth = pe.MapNode(interface=fsl.MultiImageMaths(),
                                       name='smooth_multi',
                                       iterfield=['in_file'],
                                       mem_gb=output_smooth_mem_gb)
        else:
            output_smooth = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='smooth',
                                    mem_gb=output_smooth_mem_gb)

    elif opt == 'AFNI':
        if input_image_type == 'func_derivative_multi':
            output_smooth = pe.MapNode(interface=afni.BlurToFWHM(),
                                       name='smooth_multi',
                                       iterfield=['in_file'])
        else:
            output_smooth = pe.Node(interface=afni.BlurToFWHM(),
                                    name='smooth',
                                    iterfield=['in_file'])
        output_smooth.inputs.outputtype = 'NIFTI_GZ'

    if opt == 'FSL':
        # wire in the resource to be smoothed
        wf.connect(inputnode, 'in_file', output_smooth, 'in_file')
        # get the parameters for fwhm
        wf.connect(inputnode_fwhm, ('fwhm', set_gauss), output_smooth,
                   'op_string')
        wf.connect(inputnode, 'mask', output_smooth, 'operand_files')
    elif opt == 'AFNI':
        wf.connect(inputnode, 'in_file', output_smooth, 'in_file')
        wf.connect(inputnode_fwhm, 'fwhm', output_smooth, 'fwhm')
        wf.connect(inputnode, 'mask', output_smooth, 'mask')

    outputnode = pe.Node(util.IdentityInterface(fields=['out_file', 'fwhm']),
                         name='outputspec')

    wf.connect(output_smooth, 'out_file', outputnode, 'out_file')
    wf.connect(inputnode_fwhm, 'fwhm', outputnode, 'fwhm')

    return wf
def apply_all_corrections_using_ants(name='UnwarpArtifacts'):
    """
    Combines two lists of linear transforms with the deformation field
    map obtained epi_correction by Ants.
    Additionally, computes the corresponding bspline coefficients and
    the map of determinants of the jacobian.
    """
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_sdc_syb', 'in_hmc', 'in_ecc', 'in_dwi', 'in_t1']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_warp', 'out_coeff', 'out_jacobian']),
        name='outputnode')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')

    concat_hmc_ecc = pe.MapNode(fsl.ConvertXFM(), name="concat_hmc_ecc",
                                iterfield=['in_file', 'in_file2'])
    concat_hmc_ecc.inputs.concat_xfm = True

    warps = pe.MapNode(fsl.ConvertWarp(), iterfield=['premat'],
                       name='ConvertWarp')

    unwarp = pe.MapNode(interface=fsl.ApplyWarp(),
                        iterfield=['in_file', 'field_file'],
                        name='unwarp_warp')
    unwarp.inputs.interp = 'spline'

    coeffs = pe.MapNode(fsl.WarpUtils(out_format='spline'),
                        iterfield=['in_file'], name='CoeffComp')
    jacobian = pe.MapNode(fsl.WarpUtils(write_jacobian=True),
                          iterfield=['in_file'], name='JacobianComp')
    jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'),
                         iterfield=['in_file', 'operand_files'],
                         name='ModulateDWIs')

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, concat_hmc_ecc, [('in_ecc', 'in_file2')]),  # noqa
        (inputnode, concat_hmc_ecc, [('in_hmc', 'in_file')]),  # noqa

        (concat_hmc_ecc, warps, [('out_file',            'premat')]),  # noqa
        (inputnode,      warps, [('in_sdc_syb',           'warp1')]),  # noqa
        (inputnode,      warps, [('in_t1',            'reference')]),  # noqa
        (inputnode,      split, [('in_dwi',             'in_file')]),  # noqa
        (warps,         unwarp, [('out_file',        'field_file')]),  # noqa
        (split,         unwarp, [('out_files',          'in_file')]),  # noqa
        (inputnode,     unwarp, [('in_t1',             'ref_file')]),  # noqa
        (inputnode,     coeffs, [('in_t1',            'reference')]),  # noqa
        (warps,         coeffs, [('out_file',           'in_file')]),  # noqa
        (inputnode,   jacobian, [('in_t1',            'reference')]),  # noqa
        (coeffs,      jacobian, [('out_file',           'in_file')]),  # noqa
        (unwarp,       jacmult, [('out_file',           'in_file')]),  # noqa
        (jacobian,     jacmult, [('out_jacobian', 'operand_files')]),  # noqa
        (jacmult,        thres, [('out_file',           'in_file')]),  # noqa
        (thres,          merge, [('out_file',          'in_files')]),  # noqa
        (warps,     outputnode, [('out_file',          'out_warp')]),  # noqa
        (coeffs,    outputnode, [('out_file',         'out_coeff')]),  # noqa
        (jacobian,  outputnode, [('out_jacobian',  'out_jacobian')]),  # noqa
        (merge,     outputnode, [('merged_file',        'out_file')])  # noqa
    ])

    return wf
Example #22
0
def spatial_smooth(workflow,
                   func_key,
                   mask_key,
                   output_name,
                   strat,
                   num_strat,
                   pipeline_config_object,
                   input_image_type='func_derivative'):

    image_types = [
        'func_derivative', 'func_derivative_multi', 'func_4d', 'func_mask'
    ]

    if input_image_type not in image_types:
        raise ValueError('Input image type {0} should be one of {1}'.format(
            input_image_type, ', '.join(image_types)))

    if input_image_type == 'func_derivative_multi':
        output_smooth = pe.MapNode(interface=fsl.MultiImageMaths(),
                                   name='{0}_multi_{1}'.format(
                                       output_name, num_strat),
                                   iterfield=['in_file'])
    else:
        output_smooth = pe.Node(interface=fsl.MultiImageMaths(),
                                name='{0}_{1}'.format(output_name, num_strat))

    if isinstance(func_key, str):

        if func_key == 'leaf':
            func_node, func_file = strat.get_leaf_properties()
        else:
            try:
                func_node, func_file = strat[func_key]
            except KeyError as e:
                print('Could not find func_key {0} in resource pool'.format(
                    func_key))

    elif isinstance(func_key, tuple):
        func_node, func_file = func_key

    if isinstance(mask_key, str):
        mask_node, mask_file = strat[mask_key]
    elif isinstance(mask_key, tuple):
        mask_node, mask_file = mask_key
    else:
        raise ValueError('mask {0} ({1}) could not be deciphered'.format(
            mask_key, type(mask_key)))

    # TODO review connetion to config, is the node really necessary?
    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input_{0}_{1}'.format(
                                 output_name, num_strat))

    inputnode_fwhm.iterables = ("fwhm", pipeline_config_object.fwhm)

    # wire in the resource to be smoothed
    workflow.connect(func_node, func_file, output_smooth, 'in_file')

    # get the parameters for fwhm
    workflow.connect(inputnode_fwhm, ('fwhm', set_gauss), output_smooth,
                     'op_string')

    workflow.connect(mask_node, mask_file, output_smooth, 'operand_files')

    # output the resource
    strat.append_name(output_smooth.name)
    strat.update_resource_pool({output_name: (output_smooth, 'out_file')})

    return workflow, strat
Example #23
0
def create_reho():
    """
    Regional Homogeneity(ReHo) approach to fMRI data analysis

    This workflow computes the ReHo map, z-score on map

    Parameters
    ----------

    None

    Returns
    -------
    reHo : workflow
        Regional Homogeneity Workflow

    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/reho/reho.py>`_

    Workflow Inputs: ::

        inputspec.rest_res_filt : string (existing nifti file)
            Input EPI 4D Volume

        inputspec.rest_mask : string (existing nifti file)
            Input Whole Brain Mask of EPI 4D Volume

        inputspec.cluster_size : integer
            For a brain voxel the number of neighbouring brain voxels to use for KCC.
            Possible values are 27, 19, 7. Recommended value 27


    Workflow Outputs: ::

        outputspec.raw_reho_map : string (nifti file)

        outputspec.z_score : string (nifti file)


    ReHo Workflow Procedure:

    1. Generate ReHo map from the input EPI 4D volume, EPI mask and cluster_size
    2. Compute Z score of the ReHo map by subtracting mean and dividing by standard deviation


    Workflow Graph:

    .. image:: ../images/reho.dot.png
        :width: 500

    Detailed Workflow Graph:

    .. image:: ../images/reho_detailed.dot.png
        :width: 500
        
    References
    ---------- 
    .. [1] Zang, Y., Jiang, T., Lu, Y., He, Y.,  Tian, L. (2004). Regional homogeneity approach to fMRI data analysis. NeuroImage, 22(1), 394, 400. doi:10.1016/j.neuroimage.2003.12.030

    Examples
    --------
    >>> from CPAC import reho
    >>> wf = reho.create_reho()
    >>> wf.inputs.inputspec.rest_res_filt = '/home/data/Project/subject/func/rest_res_filt.nii.gz'
    >>> wf.inputs.inputspec.rest_mask = '/home/data/Project/subject/func/rest_mask.nii.gz'
    >>> wf.inputs.inputspec.cluster_size = 27
    >>> wf.run()
    """

    reHo = pe.Workflow(name='reHo')
    inputNode = pe.Node(util.IdentityInterface(
        fields=['cluster_size', 'rest_res_filt', 'rest_mask']),
                        name='inputspec')

    outputNode = pe.Node(
        util.IdentityInterface(fields=['raw_reho_map', 'z_score']),
        name='outputspec')

    op_string = pe.Node(util.Function(input_names=['mean', 'std_dev'],
                                      output_names=['op_string'],
                                      function=getOpString),
                        name='op_string')

    raw_reho_map = pe.Node(util.Function(
        input_names=['in_file', 'mask_file', 'cluster_size'],
        output_names=['out_file'],
        function=compute_reho),
                           name='reho_map')

    mean = pe.Node(interface=fsl.ImageStats(), name='mean')
    mean.inputs.op_string = '-k %s -m'

    standard_deviation = pe.Node(interface=fsl.ImageStats(),
                                 name='standard_deviation')
    standard_deviation.inputs.op_string = '-k %s -s'

    z_score = pe.Node(interface=fsl.MultiImageMaths(), name='z_score')

    reHo.connect(inputNode, 'rest_res_filt', raw_reho_map, 'in_file')
    reHo.connect(inputNode, 'rest_mask', raw_reho_map, 'mask_file')
    reHo.connect(inputNode, 'cluster_size', raw_reho_map, 'cluster_size')

    reHo.connect(raw_reho_map, 'out_file', mean, 'in_file')
    reHo.connect(inputNode, 'rest_mask', mean, 'mask_file')
    reHo.connect(raw_reho_map, 'out_file', standard_deviation, 'in_file')
    reHo.connect(inputNode, 'rest_mask', standard_deviation, 'mask_file')
    reHo.connect(mean, 'out_stat', op_string, 'mean')
    reHo.connect(standard_deviation, 'out_stat', op_string, 'std_dev')
    reHo.connect(raw_reho_map, 'out_file', z_score, 'in_file')
    reHo.connect(op_string, 'op_string', z_score, 'op_string')
    reHo.connect(inputNode, 'rest_mask', z_score, 'operand_files')

    reHo.connect(z_score, 'out_file', outputNode, 'z_score')

    reHo.connect(raw_reho_map, 'out_file', outputNode, 'raw_reho_map')

    return reHo
Example #24
0
def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
    """
    UNet
    options (following numbers are default):
    input_slice: 3
    conv_block: 5
    kernel_root: 16
    rescale_dim: 256
    """

    unet_mask = pe.Node(util.Function(input_names=['model_path', 'cimg_in'],
                                      output_names=['out_path'],
                                      function=predict_volumes),
                        name=f'unet_mask_{pipe_num}')

    node, out = strat_pool.get_data('unet_model')
    wf.connect(node, out, unet_mask, 'model_path')

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, unet_mask, 'cimg_in')
    """
    Revised mask with ANTs
    """
    # fslmaths <whole head> -mul <mask> brain.nii.gz
    unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                name=f'unet_masked_brain_{pipe_num}')
    unet_masked_brain.inputs.op_string = "-mul %s"

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, unet_masked_brain, 'in_file')
    wf.connect(unet_mask, 'out_path', unet_masked_brain, 'operand_files')

    # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
    # TODO: antsRegistration -z 0 -d 3 -r [NMT_SS_0.5mm.nii.gz,brain.nii.gz,0] -o [transform,brain_rot2atl.nii.gz,brain_inv_rot2atl.nii.gz] -t Rigid[0.1] -m MI[NMT_SS_0.5mm.nii.gz,brain.nii.gz,1,32,Regular,0.25] -c [1000x500x250x100,1e-08,10] -s 3.0x2.0x1.0x0.0 -f 8x4x2x1 -u 1 -t Affine[0.1] -m MI[NMT_SS_0.5mm.nii.gz,brain.nii.gz,1,32,Regular,0.25] -c [1000x500x250x100,1e-08,10] -s 3.0x2.0x1.0x0.0 -f 8x4x2x1 -u 1
    native_brain_to_template_brain = pe.Node(interface=fsl.FLIRT(),
                                             name=f'native_brain_to_template_'
                                             f'brain_{pipe_num}')
    native_brain_to_template_brain.inputs.dof = 6
    native_brain_to_template_brain.inputs.interp = 'sinc'
    wf.connect(unet_masked_brain, 'out_file', native_brain_to_template_brain,
               'in_file')

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, native_brain_to_template_brain, 'reference')

    # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
    # TODO: antsApplyTransforms -d 3 -i head.nii.gz -r NMT_0.5mm.nii.gz -n Linear -o head_rot2atl.nii.gz -v -t transform1Rigid.mat -t transform2Affine.mat -t transform0DerivedInitialMovingTranslation.mat
    native_head_to_template_head = pe.Node(interface=fsl.FLIRT(),
                                           name=f'native_head_to_template_'
                                           f'head_{pipe_num}')
    native_head_to_template_head.inputs.apply_xfm = True

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, native_head_to_template_head, 'in_file')

    wf.connect(native_brain_to_template_brain, 'out_matrix_file',
               native_head_to_template_head, 'in_matrix_file')

    node, out = strat_pool.get_data('T1w_template')
    wf.connect(node, out, native_head_to_template_head, 'reference')

    # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
    template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                  name=f'template_brain_mask_{pipe_num}')
    template_brain_mask.inputs.args = '-bin'

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, template_brain_mask, 'in_file')

    # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
    ants_template_head_to_template = pe.Node(interface=ants.Registration(),
                                             name=f'template_head_to_'
                                             f'template_{pipe_num}')
    ants_template_head_to_template.inputs.metric = ['CC']
    ants_template_head_to_template.inputs.metric_weight = [1, 5]
    ants_template_head_to_template.inputs.transforms = ['SyN']
    ants_template_head_to_template.inputs.transform_parameters = [(0.25, )]
    ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
    ants_template_head_to_template.inputs.number_of_iterations = [[60, 50, 20]]
    ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]]
    ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
    ants_template_head_to_template.inputs.convergence_threshold = [1.e-8]
    wf.connect(native_head_to_template_head, 'out_file',
               ants_template_head_to_template, 'fixed_image')

    node, out = strat_pool.get_data('T1w_brain_template')
    wf.connect(node, out, ants_template_head_to_template, 'moving_image')

    # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz

    template_head_transform_to_template = pe.Node(
        interface=ants.ApplyTransforms(),
        name=f'template_head_transform_to_template_{pipe_num}')
    template_head_transform_to_template.inputs.dimension = 3

    wf.connect(template_brain_mask, 'out_file',
               template_head_transform_to_template, 'input_image')
    wf.connect(native_brain_to_template_brain, 'out_file',
               template_head_transform_to_template, 'reference_image')
    wf.connect(ants_template_head_to_template, 'forward_transforms',
               template_head_transform_to_template, 'transforms')

    # TODO: replace convert_xfm and flirt with:
    # antsApplyTransforms -d 3 -i brain_rot2atl_mask.nii.gz -r brain.nii.gz -n linear -o brain_mask.nii.gz -t [transform0DerivedInitialMovingTranslation.mat,1] -t [transform2Affine.mat,1] -t [transform1Rigid.mat,1]
    # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
    invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
    invt.inputs.invert_xfm = True
    wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt,
               'in_file')

    # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
    template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(),
                                             name=f'template_brain_to_native_'
                                             f'brain_{pipe_num}')
    template_brain_to_native_brain.inputs.apply_xfm = True
    wf.connect(template_head_transform_to_template, 'output_image',
               template_brain_to_native_brain, 'in_file')
    wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain,
               'reference')
    wf.connect(invt, 'out_file', template_brain_to_native_brain,
               'in_matrix_file')

    # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
    refined_mask = pe.Node(interface=fsl.Threshold(),
                           name=f'refined_mask'
                           f'_{pipe_num}')
    refined_mask.inputs.thresh = 0.5
    refined_mask.inputs.args = '-bin'
    wf.connect(template_brain_to_native_brain, 'out_file', refined_mask,
               'in_file')

    outputs = {'space-T1w_desc-brain_mask': (refined_mask, 'out_file')}

    return (wf, outputs)
Example #25
0
def create_vmhc(use_ants, name='vmhc_workflow', ants_threads=1):
    """
    Compute the map of brain functional homotopy, the high degree of synchrony in spontaneous activity between geometrically corresponding interhemispheric (i.e., homotopic) regions.



    Parameters
    ----------

    None

    Returns
    -------

    vmhc_workflow : workflow

        Voxel Mirrored Homotopic Connectivity Analysis Workflow



    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/vmhc/vmhc.py>`_ 

    Workflow Inputs::

        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)

        inputspec.symmetric_brain : string (existing nifti file)
            MNI152_T1_2mm_symmetric_brain.nii.gz
 
        inputspec.rest_res_filt : string (existing nifti file)
            Band passed Image with nuisance signal regressed out(and optionally scrubbed). Recommended bandpass filter (0.001,0.1) )

        inputspec.reorient : string (existing nifti file)
            RPI oriented anatomical data

        inputspec.example_func2highres_mat : string (existing affine transformation .mat file)
            Specifies an affine transform that should be applied to the example_func before non linear warping

        inputspec.standard_for_func: string (existing nifti file)
            MNI152_T1_standard_resolution_brain.nii.gz

        inputspec.symmetric_skull : string (existing nifti file)
            MNI152_T1_2mm_symmetric.nii.gz

        inputspec.twomm_brain_mask_dil : string (existing nifti file)
            MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz

        inputspec.config_file_twomm_symmetric : string (existing .cnf file)
            T1_2_MNI152_2mm_symmetric.cnf

        inputspec.rest_mask : string (existing nifti file)
            A mask functional volume(derived by dilation from motion corrected functional volume)

        fwhm_input.fwhm : list (float) 
            For spatial smoothing the Z-transformed correlations in MNI space.
            Generally the value of this parameter is 1.5 or 2 times the voxel size of the input Image.

        inputspec.mean_functional : string (existing nifti file)
            The mean functional image for use in the func-to-anat registration matrix conversion
            to ITK (ANTS) format, if the user selects to use ANTS.

        
    Workflow Outputs::

        outputspec.highres2symmstandard : string (nifti file)
            Linear registration of T1 image to symmetric standard image

        outputspec.highres2symmstandard_mat : string (affine transformation .mat file)
            An affine transformation .mat file from linear registration and used in non linear registration

        outputspec.highres2symmstandard_warp : string (nifti file)
            warp file from Non Linear registration of T1 to symmetrical standard brain

        outputspec.fnirt_highres2symmstandard : string (nifti file)
            Non Linear registration of T1 to symmetrical standard brain

        outputspec.highres2symmstandard_jac : string (nifti file)
            jacobian determinant image from Non Linear registration of T1 to symmetrical standard brain

        outputspec.rest_res_2symmstandard : string (nifti file)
            nonlinear registration (func to standard) image

        outputspec.VMHC_FWHM_img : string (nifti file)
            pearson correlation between res2standard and flipped res2standard

        outputspec.VMHC_Z_FWHM_img : string (nifti file)
            Fisher Z transform map

        outputspec.VMHC_Z_stat_FWHM_img : string (nifti file)
            Z statistic map

    Order of commands:

    - Perform linear registration of Anatomical brain in T1 space to symmetric standard space. For details see `flirt <http://www.fmrib.ox.ac.uk/fsl/flirt/index.html>`_::

        flirt
        -ref MNI152_T1_2mm_symmetric_brain.nii.gz
        -in mprage_brain.nii.gz
        -out highres2symmstandard.nii.gz
        -omat highres2symmstandard.mat
        -cost corratio
        -searchcost corratio
        -dof 12
        -interp trilinear    
        
    - Perform nonlinear registration (higres to standard) to symmetric standard brain. For details see `fnirt <http://fsl.fmrib.ox.ac.uk/fsl/fnirt/>`_::
    
        fnirt
        --in=head.nii.gz
        --aff=highres2symmstandard.mat
        --cout=highres2symmstandard_warp.nii.gz
        --iout=fnirt_highres2symmstandard.nii.gz
        --jout=highres2symmstandard_jac.nii.gz
        --config=T1_2_MNI152_2mm_symmetric.cnf
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --refmask=MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz
        --warpres=10,10,10 

    - Perform spatial smoothing on the input functional image(inputspec.rest_res_filt).  For details see `PrinciplesSmoothing <http://imaging.mrc-cbu.cam.ac.uk/imaging/PrinciplesSmoothing>`_ `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm>`_::

        fslmaths rest_res_filt.nii.gz
        -kernel gauss FWHM/ sqrt(8-ln(2))
        -fmean -mas rest_mask.nii.gz
        rest_res_filt_FWHM.nii.gz
        
    - Apply nonlinear registration (func to standard). For details see  `applywarp <http://www.fmrib.ox.ac.uk/fsl/fnirt/warp_utils.html#applywarp>`_::
        
        applywarp
        --ref=MNI152_T1_2mm_symmetric.nii.gz
        --in=rest_res_filt_FWHM.nii.gz
        --out=rest_res_2symmstandard.nii.gz
        --warp=highres2symmstandard_warp.nii.gz
        --premat=example_func2highres.mat
        
        
    - Copy and L/R swap the output of applywarp command (rest_res_2symmstandard.nii.gz). For details see  `fslswapdim <http://fsl.fmrib.ox.ac.uk/fsl/fsl4.0/avwutils/index.html>`_::

        fslswapdim
        rest_res_2symmstandard.nii.gz
        -x y z
        tmp_LRflipped.nii.gz


    - Calculate pearson correlation between rest_res_2symmstandard.nii.gz and flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz). For details see  `3dTcorrelate <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dTcorrelate.html>`_::
        
        3dTcorrelate
        -pearson
        -polort -1
        -prefix VMHC_FWHM.nii.gz
        rest_res_2symmstandard.nii.gz
        tmp_LRflipped.nii.gz
    
    
    - Fisher Z Transform the correlation. For details see `3dcalc <http://afni.nimh.nih.gov/pub/dist/doc/program_help/3dcalc.html>`_::
        
        3dcalc
        -a VMHC_FWHM.nii.gz
        -expr 'log((a+1)/(1-a))/2'
        -prefix VMHC_FWHM_Z.nii.gz
    
        
    - Calculate the number of volumes(nvols) in flipped rest_res_2symmstandard.nii.gz(tmp_LRflipped.nii.gz) ::
        
        -Use Nibabel to do this
        
        
    - Compute the Z statistic map ::
        
        3dcalc
        -a VMHC_FWHM_Z.nii.gz
        -expr 'a*sqrt('${nvols}'-3)'
        -prefix VMHC_FWHM_Z_stat.nii.gz
    
    
    Workflow:
    
    .. image:: ../images/vmhc_graph.dot.png
        :width: 500 
    
    Workflow Detailed:
    
    .. image:: ../images/vmhc_detailed_graph.dot.png
        :width: 500 
    

    References
    ----------
    
    .. [1] Zuo, X.-N., Kelly, C., Di Martino, A., Mennes, M., Margulies, D. S., Bangaru, S., Grzadzinski, R., et al. (2010). Growing together and growing apart: regional and sex differences in the lifespan developmental trajectories of functional homotopy. The Journal of neuroscience : the official journal of the Society for Neuroscience, 30(45), 15034-43. doi:10.1523/JNEUROSCI.2612-10.2010


    Examples
    --------
    
    >>> vmhc_w = create_vmhc()
    >>> vmhc_w.inputs.inputspec.symmetric_brain = 'MNI152_T1_2mm_symmetric_brain.nii.gz'
    >>> vmhc_w.inputs.inputspec.symmetric_skull = 'MNI152_T1_2mm_symmetric.nii.gz'
    >>> vmhc_w.inputs.inputspec.twomm_brain_mask_dil = 'MNI152_T1_2mm_brain_mask_symmetric_dil.nii.gz'
    >>> vmhc_w.inputs.inputspec.config_file_twomm = 'T1_2_MNI152_2mm_symmetric.cnf'
    >>> vmhc_w.inputs.inputspec.standard_for_func= 'MNI152_T1_2mm.nii.gz'
    >>> vmhc_w.inputs.fwhm_input.fwhm = [4.5, 6]
    >>> vmhc_w.get_node('fwhm_input').iterables = ('fwhm', [4.5, 6])
    >>> vmhc_w.inputs.inputspec.rest_res = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_res_filt.nii.gz')
    >>> vmhc_w.inputs.inputspec.reorient = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_RPI.nii.gz')
    >>> vmhc_w.inputs.inputspec.brain = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/anat/mprage_brain.nii.gz')
    >>> vmhc_w.inputs.inputspec.example_func2highres_mat = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/reg/example_func2highres.mat')
    >>> vmhc_w.inputs.inputspec.rest_mask = os.path.abspath('/home/data/Projects/Pipelines_testing/Dickstein/subjects/s1001/func/original/rest_mask.nii.gz')
    >>> vmhc_w.run() # doctest: +SKIP

    """

    vmhc = pe.Workflow(name=name)

    inputNode = pe.Node(util.IdentityInterface(fields=[
        'rest_res', 'example_func2highres_mat', 'rest_mask',
        'standard_for_func', 'mean_functional', 'brain',
        'fnirt_nonlinear_warp', 'ants_symm_initial_xfm', 'ants_symm_rigid_xfm',
        'ants_symm_affine_xfm', 'ants_symm_warp_field'
    ]),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'rest_res_2symmstandard', 'VMHC_FWHM_img', 'VMHC_Z_FWHM_img',
        'VMHC_Z_stat_FWHM_img'
    ]),
                         name='outputspec')

    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input')

    if use_ants == False:
        # Apply nonlinear registration (func to standard)
        nonlinear_func_to_standard = pe.Node(interface=fsl.ApplyWarp(),
                                             name='nonlinear_func_to_standard')

    elif use_ants == True:
        # ANTS warp image etc.
        fsl_to_itk_vmhc = create_wf_c3d_fsl_to_itk(0, name='fsl_to_itk_vmhc')

        collect_transforms_vmhc = create_wf_collect_transforms(
            0, name='collect_transforms_vmhc')

        apply_ants_xfm_vmhc = create_wf_apply_ants_warp(
            0, name='apply_ants_xfm_vmhc', ants_threads=ants_threads)

        # this has to be 3 instead of default 0 because it is a 4D file
        apply_ants_xfm_vmhc.inputs.inputspec.input_image_type = 3

    # copy and L/R swap file
    copy_and_L_R_swap = pe.Node(interface=fsl.SwapDimensions(),
                                name='copy_and_L_R_swap')
    copy_and_L_R_swap.inputs.new_dims = ('-x', 'y', 'z')

    # calculate vmhc
    pearson_correlation = pe.Node(interface=preprocess.TCorrelate(),
                                  name='pearson_correlation')
    pearson_correlation.inputs.pearson = True
    pearson_correlation.inputs.polort = -1
    pearson_correlation.inputs.outputtype = 'NIFTI_GZ'

    try:
        z_trans = pe.Node(interface=preprocess.Calc(), name='z_trans')
        z_stat = pe.Node(interface=preprocess.Calc(), name='z_stat')
    except AttributeError:
        from nipype.interfaces.afni import utils as afni_utils
        z_trans = pe.Node(interface=afni_utils.Calc(), name='z_trans')
        z_stat = pe.Node(interface=afni_utils.Calc(), name='z_stat')

    z_trans.inputs.expr = 'log((1+a)/(1-a))/2'
    z_trans.inputs.outputtype = 'NIFTI_GZ'
    z_stat.inputs.outputtype = 'NIFTI_GZ'

    NVOLS = pe.Node(util.Function(input_names=['in_files'],
                                  output_names=['nvols'],
                                  function=get_img_nvols),
                    name='NVOLS')

    generateEXP = pe.Node(util.Function(input_names=['nvols'],
                                        output_names=['expr'],
                                        function=get_operand_expression),
                          name='generateEXP')

    smooth = pe.Node(interface=fsl.MultiImageMaths(), name='smooth')

    if use_ants == False:
        vmhc.connect(inputNode, 'rest_res', smooth, 'in_file')
        vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss), smooth, 'op_string')
        vmhc.connect(inputNode, 'rest_mask', smooth, 'operand_files')
        vmhc.connect(smooth, 'out_file', nonlinear_func_to_standard, 'in_file')
        vmhc.connect(inputNode, 'standard_for_func',
                     nonlinear_func_to_standard, 'ref_file')
        vmhc.connect(inputNode, 'fnirt_nonlinear_warp',
                     nonlinear_func_to_standard, 'field_file')
        ## func->anat matrix (bbreg)
        vmhc.connect(inputNode, 'example_func2highres_mat',
                     nonlinear_func_to_standard, 'premat')
        vmhc.connect(nonlinear_func_to_standard, 'out_file', copy_and_L_R_swap,
                     'in_file')
        vmhc.connect(nonlinear_func_to_standard, 'out_file',
                     pearson_correlation, 'xset')

    elif use_ants == True:
        # connections for ANTS stuff

        # functional apply warp stuff
        vmhc.connect(inputNode, 'rest_res', smooth, 'in_file')
        vmhc.connect(inputnode_fwhm, ('fwhm', set_gauss), smooth, 'op_string')
        vmhc.connect(inputNode, 'rest_mask', smooth, 'operand_files')

        vmhc.connect(smooth, 'out_file', apply_ants_xfm_vmhc,
                     'inputspec.input_image')

        vmhc.connect(inputNode, 'ants_symm_initial_xfm',
                     collect_transforms_vmhc, 'inputspec.linear_initial')

        vmhc.connect(inputNode, 'ants_symm_rigid_xfm', collect_transforms_vmhc,
                     'inputspec.linear_rigid')

        vmhc.connect(inputNode, 'ants_symm_affine_xfm',
                     collect_transforms_vmhc, 'inputspec.linear_affine')

        vmhc.connect(inputNode, 'ants_symm_warp_field',
                     collect_transforms_vmhc, 'inputspec.warp_file')

        # func->anat matrix (bbreg)
        vmhc.connect(inputNode, 'example_func2highres_mat', fsl_to_itk_vmhc,
                     'inputspec.affine_file')

        vmhc.connect(inputNode, 'brain', fsl_to_itk_vmhc,
                     'inputspec.reference_file')

        vmhc.connect(inputNode, 'mean_functional', fsl_to_itk_vmhc,
                     'inputspec.source_file')

        vmhc.connect(fsl_to_itk_vmhc, 'outputspec.itk_transform',
                     collect_transforms_vmhc, 'inputspec.fsl_to_itk_affine')

        vmhc.connect(inputNode, 'standard_for_func', apply_ants_xfm_vmhc,
                     'inputspec.reference_image')

        vmhc.connect(collect_transforms_vmhc,
                     'outputspec.transformation_series', apply_ants_xfm_vmhc,
                     'inputspec.transforms')

        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     copy_and_L_R_swap, 'in_file')

        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     pearson_correlation, 'xset')

    vmhc.connect(copy_and_L_R_swap, 'out_file', pearson_correlation, 'yset')
    vmhc.connect(pearson_correlation, 'out_file', z_trans, 'in_file_a')
    vmhc.connect(copy_and_L_R_swap, 'out_file', NVOLS, 'in_files')
    vmhc.connect(NVOLS, 'nvols', generateEXP, 'nvols')
    vmhc.connect(z_trans, 'out_file', z_stat, 'in_file_a')
    vmhc.connect(generateEXP, 'expr', z_stat, 'expr')

    if use_ants == False:
        vmhc.connect(nonlinear_func_to_standard, 'out_file', outputNode,
                     'rest_res_2symmstandard')

    elif use_ants == True:
        # ANTS warp outputs to outputnode
        vmhc.connect(apply_ants_xfm_vmhc, 'outputspec.output_image',
                     outputNode, 'rest_res_2symmstandard')

    vmhc.connect(pearson_correlation, 'out_file', outputNode, 'VMHC_FWHM_img')
    vmhc.connect(z_trans, 'out_file', outputNode, 'VMHC_Z_FWHM_img')
    vmhc.connect(z_stat, 'out_file', outputNode, 'VMHC_Z_stat_FWHM_img')

    return vmhc
Example #26
0
def create_CT_seg_wf(name="CT_seg_wf"):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(interface=util.IdentityInterface(fields=["CT"]),
                        name="inputnode")
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "skin_mask", "skull_mask", "spongiform_mask", "instrument_mask",
        "implant_mask", "volumes"
    ]),
                         name="outputnode")

    skull_low = 200
    skull_high = 2000

    instrument_low = 1700

    implant_low = 155
    implant_high = 195

    sponge_low = 500
    sponge_high = 700

    skull_seg = pe.Node(interface=fsl.ImageMaths(), name="skull_seg")
    skull_seg.inputs.op_string = "-thr %f -uthr %f" % (skull_low, skull_high)
    skull_seg.inputs.out_file = "skull.nii.gz"

    spongiform_seg = pe.Node(interface=fsl.ImageMaths(), name="spongiform_seg")
    spongiform_seg.inputs.op_string = "-thr %f -uthr %f -fmedian" % (
        sponge_low, sponge_high)
    spongiform_seg.inputs.out_file = "spongiform_bone.nii.gz"

    instrument_seg = pe.Node(interface=fsl.ImageMaths(), name="instrument_seg")
    instrument_seg.inputs.op_string = "-thr %f" % instrument_low
    instrument_seg.inputs.out_file = "instruments.nii.gz"

    implant_seg = pe.Node(interface=fsl.ImageMaths(), name="implant_seg")
    implant_seg.inputs.op_string = "-thr %f -uthr %f -fmedian -fmedian" % (
        implant_low, implant_high)
    implant_seg.inputs.out_file = "implant.nii.gz"

    init_high_thresh = 150
    skin_low = -705
    skin_high = -8

    tissue_low = -1000

    create_head_mask = pe.Node(interface=fsl.ImageMaths(),
                               name="create_head_mask")
    create_head_mask.inputs.op_string = "-thr %f -bin -fillh26 -dilM -dilM -dilM -dilM" % tissue_low

    step1_remove_high_val = pe.Node(interface=fsl.ImageMaths(),
                                    name="step1_remove_high_val")
    step1_remove_high_val.inputs.op_string = "-uthr %f -fmedian" % init_high_thresh

    step2_seg_skin = pe.Node(interface=fsl.ImageMaths(), name="step2_seg_skin")
    step2_seg_skin.inputs.op_string = "-thr %f -uthr %f" % (skin_low,
                                                            skin_high)

    create_skin_mask = pe.Node(interface=fsl.MultiImageMaths(),
                               name="create_skin_mask")
    create_skin_mask.inputs.op_string = "-mul %s"
    create_skin_mask.inputs.out_file = "skin.nii.gz"

    merge_outputs = pe.Node(interface=util.Merge(5),
                            name='merge_for_dilate_subcortical_mask')

    workflow.connect([(inputnode, skull_seg, [("CT", "in_file")])])
    workflow.connect([(inputnode, spongiform_seg, [("CT", "in_file")])])
    workflow.connect([(inputnode, implant_seg, [("CT", "in_file")])])
    workflow.connect([(inputnode, instrument_seg, [("CT", "in_file")])])
    workflow.connect([(inputnode, step1_remove_high_val, [("CT", "in_file")])])
    workflow.connect([(inputnode, create_head_mask, [("CT", "in_file")])])
    workflow.connect([(step1_remove_high_val, step2_seg_skin, [("out_file",
                                                                "in_file")])])
    workflow.connect([(step2_seg_skin, create_skin_mask, [("out_file",
                                                           "in_file")])])
    workflow.connect([(create_head_mask, create_skin_mask,
                       [("out_file", "operand_files")])])
    workflow.connect([(create_skin_mask, outputnode, [("out_file", "skin_mask")
                                                      ])])
    workflow.connect([(skull_seg, outputnode, [("out_file", "skull_mask")])])
    workflow.connect([(spongiform_seg, outputnode, [("out_file",
                                                     "spongiform_mask")])])
    workflow.connect([(instrument_seg, outputnode, [("out_file",
                                                     "instrument_mask")])])
    workflow.connect([(implant_seg, outputnode, [("out_file", "implant_mask")])
                      ])
    workflow.connect([(create_skin_mask, merge_outputs, [("out_file", "in1")])
                      ])
    workflow.connect([(skull_seg, merge_outputs, [("out_file", "in2")])])
    workflow.connect([(instrument_seg, merge_outputs, [("out_file", "in3")])])
    workflow.connect([(implant_seg, merge_outputs, [("out_file", "in4")])])
    workflow.connect([(create_head_mask, merge_outputs, [("out_file", "in5")])
                      ])
    workflow.connect([(merge_outputs, outputnode, [("out", "volumes")])])

    return workflow
Example #27
0
def create_anat_preproc(method='afni',
                        already_skullstripped=False,
                        c=None,
                        wf_name='anat_preproc'):
    """The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_

    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations

    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image

        outputspec.reorient : string
            Path to RPI oriented anatomical image

        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.

        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.

    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz

    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz

    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500

    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')
    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')

    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_bias_field_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        c.non_local_means_filtering = False
        c.n4_bias_field_correction = False

    if c.non_local_means_filtering and c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not c.non_local_means_filtering and c.n4_bias_field_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')
    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if c.n4_bias_field_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'mask_vol', 'shrink_factor', 'var_shrink_fac',
                'shrink_fac_bot_lim', 'avoid_vent', 'niter', 'pushout',
                'touchup', 'fill_hole', 'avoid_eyes', 'use_edge', 'exp_frac',
                'smooth_final', 'push_to_edge', 'use_skull', 'perc_int',
                'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'mask_vol', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('mask_vol', 'mask_vol'),
                               ('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            # Generate anatomical brain mask

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_brain_mask, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')
            anat_skullstrip.inputs.output_type = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=c.niworkflows_ants_template_path,
                tpl_mask_path=c.niworkflows_ants_mask_path,
                tpl_regmask_path=c.niworkflows_ants_regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            brain_mask_deoblique = pe.Node(interface=afni.Refit(),
                                           name='brain_mask_deoblique')
            brain_mask_deoblique.inputs.deoblique = True
            preproc.connect(inputnode, 'brain_mask', brain_mask_deoblique,
                            'in_file')

            brain_mask_reorient = pe.Node(interface=afni.Resample(),
                                          name='brain_mask_reorient')
            brain_mask_reorient.inputs.orientation = 'RPI'
            brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ'
            preproc.connect(brain_mask_deoblique, 'out_file',
                            brain_mask_reorient, 'in_file')

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')
            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(brain_mask_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(brain_mask_reorient, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'unet':
            """
            UNet
            options (following numbers are default):
            input_slice: 3
            conv_block: 5
            kernel_root: 16
            rescale_dim: 256
            """
            # TODO: add options to pipeline_config
            train_model = UNet2d(dim_in=3, num_conv_block=5, kernel_root=16)
            unet_path = check_for_s3(c.unet_model)
            checkpoint = torch.load(unet_path, map_location={'cuda:0': 'cpu'})
            train_model.load_state_dict(checkpoint['state_dict'])
            model = nn.Sequential(train_model, nn.Softmax2d())

            # create a node called unet_mask
            unet_mask = pe.Node(util.Function(input_names=['model', 'cimg_in'],
                                              output_names=['out_path'],
                                              function=predict_volumes),
                                name='unet_mask')

            unet_mask.inputs.model = model
            preproc.connect(anat_reorient, 'out_file', unet_mask, 'cimg_in')
            """
            Revised mask with ANTs
            """
            # fslmaths <whole head> -mul <mask> brain.nii.gz
            unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='unet_masked_brain')
            unet_masked_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', unet_masked_brain,
                            'in_file')
            preproc.connect(unet_mask, 'out_path', unet_masked_brain,
                            'operand_files')

            # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
            # TODO change it to ANTs linear transform
            native_brain_to_template_brain = pe.Node(
                interface=fsl.FLIRT(), name='native_brain_to_template_brain')
            native_brain_to_template_brain.inputs.reference = c.template_brain_only_for_anat
            native_brain_to_template_brain.inputs.dof = 6
            native_brain_to_template_brain.inputs.interp = 'sinc'
            preproc.connect(unet_masked_brain, 'out_file',
                            native_brain_to_template_brain, 'in_file')

            # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
            # TODO change it to ANTs linear transform
            native_head_to_template_head = pe.Node(
                interface=fsl.FLIRT(), name='native_head_to_template_head')
            native_head_to_template_head.inputs.reference = c.template_skull_for_anat
            native_head_to_template_head.inputs.apply_xfm = True
            preproc.connect(anat_reorient, 'out_file',
                            native_head_to_template_head, 'in_file')
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            native_head_to_template_head, 'in_matrix_file')

            # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
            template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                          name='template_brain_mask')
            template_brain_mask.inputs.in_file = c.template_brain_only_for_anat
            template_brain_mask.inputs.args = '-bin'

            # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
            ants_template_head_to_template = pe.Node(
                interface=ants.Registration(),
                name='template_head_to_template')
            ants_template_head_to_template.inputs.metric = ['CC']
            ants_template_head_to_template.inputs.metric_weight = [1, 5]
            ants_template_head_to_template.inputs.moving_image = c.template_skull_for_anat
            ants_template_head_to_template.inputs.transforms = ['SyN']
            ants_template_head_to_template.inputs.transform_parameters = [
                (0.25, )
            ]
            ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
            ants_template_head_to_template.inputs.number_of_iterations = [[
                60, 50, 20
            ]]
            ants_template_head_to_template.inputs.smoothing_sigmas = [[
                0.6, 0.2, 0.0
            ]]
            ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
            ants_template_head_to_template.inputs.convergence_threshold = [
                1.e-8
            ]
            preproc.connect(native_head_to_template_head, 'out_file',
                            ants_template_head_to_template, 'fixed_image')

            # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz
            template_head_transform_to_template = pe.Node(
                interface=ants.ApplyTransforms(),
                name='template_head_transform_to_template')
            template_head_transform_to_template.inputs.dimension = 3
            preproc.connect(template_brain_mask, 'out_file',
                            template_head_transform_to_template, 'input_image')
            preproc.connect(native_brain_to_template_brain, 'out_file',
                            template_head_transform_to_template,
                            'reference_image')
            preproc.connect(ants_template_head_to_template,
                            'forward_transforms',
                            template_head_transform_to_template, 'transforms')

            # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
            invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
            invt.inputs.invert_xfm = True
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            invt, 'in_file')

            # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
            template_brain_to_native_brain = pe.Node(
                interface=fsl.FLIRT(), name='template_brain_to_native_brain')
            template_brain_to_native_brain.inputs.apply_xfm = True
            preproc.connect(template_head_transform_to_template,
                            'output_image', template_brain_to_native_brain,
                            'in_file')
            preproc.connect(unet_masked_brain, 'out_file',
                            template_brain_to_native_brain, 'reference')
            preproc.connect(invt, 'out_file', template_brain_to_native_brain,
                            'in_matrix_file')

            # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
            refined_mask = pe.Node(interface=fsl.Threshold(),
                                   name='refined_mask')
            refined_mask.inputs.thresh = 0.5
            preproc.connect(template_brain_to_native_brain, 'out_file',
                            refined_mask, 'in_file')

            # get a new brain with mask
            refined_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='refined_brain')
            refined_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', refined_brain,
                            'in_file')
            preproc.connect(refined_mask, 'out_file', refined_brain,
                            'operand_files')

            preproc.connect(refined_mask, 'out_file', outputnode, 'brain_mask')
            preproc.connect(refined_brain, 'out_file', outputnode, 'brain')

    return preproc
Example #28
0
                     interface=Function(input_names=[
                         'ADClist', 'TumorValueList', 'NormalValueList', 'tmln'
                     ],
                                        output_names=[
                                            'time1', 'TumorValue1',
                                            'NormalValue1', 'time2',
                                            'TumorValue2', 'NormalValue2'
                                        ],
                                        function=getScans))

ScanSource.inputs.tmln = tmln

outstring = '-add %s ' * (len(Tumor) - 1)
outstring = outstring[:-1]

CreateTumorMask = pe.Node(interface=fsl.MultiImageMaths(),
                          name='CreateTumorMask')
CreateTumorMask.inputs.in_file = Tumor[0]
CreateTumorMask.inputs.operand_files = Tumor[1:]
CreateTumorMask.inputs.op_string = outstring

ThresholdTumor = pe.Node(interface=fsl.Threshold(), name='ThresholdTumor')
ThresholdTumor.inputs.thresh = 1

CreateNormalMask = pe.Node(interface=fsl.MultiImageMaths(),
                           name='CreateNormalMask')
CreateNormalMask.inputs.in_file = Normal[0]
CreateNormalMask.inputs.operand_files = Normal[1:]
CreateNormalMask.inputs.op_string = outstring

ThresholdNormal = pe.Node(interface=fsl.Threshold(), name='ThresholdNormal')
Example #29
0
def process_segment_map(wf_name, use_ants):
    """
    This is a sub workflow used inside segmentation workflow to process 
    probability maps obtained in segmententation. Steps include overlapping 
    of the prior tissue with probability maps, thresholding and binarizing 
    it and creating a mask thst is used in further analysis. 


    Parameters
    ----------
    wf_name : string
        Workflow Name

    Returns
    -------
    preproc : workflow
        Workflow Object for process_segment_map Workflow


    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/seg_preproc/seg_preproc.py>`_ 
    

    Workflow Inputs::
    
        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)
    
        inputspec.standard2highres_mat : string (existing affine transformation .mat file)
            path to transformation matrix from mni space to anatomical space
    
        inputspec.tissue_prior : string (existing nifti file)
            path to FSL Standard Tissue prior image 
            
        inputspec.threshold : string (float)
            threshold of Segmentation Probaility Maps
            
        inputspec.probability_map : string (nifti file)
            tissue Probability map obtained from fsl FAST
        
    Workflow Outputs::

        outputspec.segment_mni2t1 : string (nifti file)
            path to output CSF prior template(in MNI space) registered to anatomical space
    
        outputspec.segment_combo : string (nifti file)
            path to output image containing overlap between csf probability map and segment_mni2t1
    
        outputspec.segment_bin : string (nifti file)
            path to output image after Thresholding and binarizing segment_combo
    
        outputspec.segment_mask : string (nifti file)
            path to output image after masking segment_combo with its tissue prior in t1 space
        
        
    Order of commands:
 
    - Register tissue prior in MNI space to t1 space. 
    
    - Find overlap between segment probability map and tissue prior in t1 native space.
    
    - Threshold and binarize segment probability map 
    
    - Generate segment mask, by applying tissue prior in t1 space to thresholded binarized segment probability map

    
    High Level Graph:
    
    .. image:: ../images/process_segment_map.dot.png
        :width: 1100
        :height: 480
        
    Detailed Graph:
    
    .. image:: ../images/process_segment_map_detailed.dot.png
        :width: 1100
        :height: 480
        
    """

    import nipype.interfaces.utility as util

    preproc = pe.Workflow(name=wf_name)

    inputNode = pe.Node(util.IdentityInterface(fields=[
        'tissue_prior', 'threshold', 'brain', 'probability_map',
        'standard2highres_init', 'standard2highres_mat', 'standard2highres_rig'
    ]),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'tissueprior_mni2t1', 'segment_combo', 'segment_bin', 'segment_mask'
    ]),
                         name='outputspec')

    def form_threshold_string(threshold):
        return '-thr %f -bin ' % (threshold)

    if use_ants == True:

        collect_linear_transforms = pe.Node(
            util.Merge(3), name='%s_collect_linear_transforms' % (wf_name))

        tissueprior_mni_to_t1 = pe.Node(interface=ants.ApplyTransforms(),
                                        name='%s_prior_mni_to_t1' % (wf_name))
        tissueprior_mni_to_t1.inputs.invert_transform_flags = [
            True, True, True
        ]
        tissueprior_mni_to_t1.inputs.interpolation = 'NearestNeighbor'

        overlap_segmentmap_with_prior = pe.Node(
            interface=fsl.MultiImageMaths(),
            name='overlap_%s_map_with_prior' % (wf_name))
        overlap_segmentmap_with_prior.inputs.op_string = '-mas %s '

        binarize_threshold_segmentmap = pe.Node(interface=fsl.ImageMaths(),
                                                name='binarize_threshold_%s' %
                                                (wf_name))

        segment_mask = pe.Node(interface=fsl.MultiImageMaths(),
                               name='%s_mask' % (wf_name))
        segment_mask.inputs.op_string = '-mas %s '

        #mni to t1
        preproc.connect(inputNode, 'tissue_prior', tissueprior_mni_to_t1,
                        'input_image')

        preproc.connect(inputNode, 'brain', tissueprior_mni_to_t1,
                        'reference_image')

        preproc.connect(inputNode, 'standard2highres_init',
                        collect_linear_transforms, 'in1')
        preproc.connect(inputNode, 'standard2highres_rig',
                        collect_linear_transforms, 'in2')
        preproc.connect(inputNode, 'standard2highres_mat',
                        collect_linear_transforms, 'in3')

        preproc.connect(collect_linear_transforms, 'out',
                        tissueprior_mni_to_t1, 'transforms')

        #overlapping
        preproc.connect(inputNode, 'probability_map',
                        overlap_segmentmap_with_prior, 'in_file')
        preproc.connect(tissueprior_mni_to_t1, 'output_image',
                        overlap_segmentmap_with_prior, 'operand_files')

        #binarize
        preproc.connect(overlap_segmentmap_with_prior, 'out_file',
                        binarize_threshold_segmentmap, 'in_file')
        preproc.connect(inputNode, ('threshold', form_threshold_string),
                        binarize_threshold_segmentmap, 'op_string')

        #create segment mask
        preproc.connect(binarize_threshold_segmentmap, 'out_file',
                        segment_mask, 'in_file')
        preproc.connect(tissueprior_mni_to_t1, 'output_image', segment_mask,
                        'operand_files')

        #connect to output nodes
        preproc.connect(tissueprior_mni_to_t1, 'output_image', outputNode,
                        'tissueprior_mni2t1')

        preproc.connect(overlap_segmentmap_with_prior, 'out_file', outputNode,
                        'segment_combo')

        preproc.connect(binarize_threshold_segmentmap, 'out_file', outputNode,
                        'segment_bin')

        preproc.connect(segment_mask, 'out_file', outputNode, 'segment_mask')

    else:

        tissueprior_mni_to_t1 = pe.Node(interface=fsl.FLIRT(),
                                        name='%s_prior_mni_to_t1' % (wf_name))
        tissueprior_mni_to_t1.inputs.apply_xfm = True
        tissueprior_mni_to_t1.inputs.interp = 'nearestneighbour'

        overlap_segmentmap_with_prior = pe.Node(
            interface=fsl.MultiImageMaths(),
            name='overlap_%s_map_with_prior' % (wf_name))
        overlap_segmentmap_with_prior.inputs.op_string = '-mas %s '

        binarize_threshold_segmentmap = pe.Node(interface=fsl.ImageMaths(),
                                                name='binarize_threshold_%s' %
                                                (wf_name))

        segment_mask = pe.Node(interface=fsl.MultiImageMaths(),
                               name='%s_mask' % (wf_name))
        segment_mask.inputs.op_string = '-mas %s '

        #mni to t1
        preproc.connect(inputNode, 'tissue_prior', tissueprior_mni_to_t1,
                        'in_file')
        preproc.connect(inputNode, 'brain', tissueprior_mni_to_t1, 'reference')
        preproc.connect(inputNode, 'standard2highres_mat',
                        tissueprior_mni_to_t1, 'in_matrix_file')

        #overlapping
        preproc.connect(inputNode, 'probability_map',
                        overlap_segmentmap_with_prior, 'in_file')
        preproc.connect(tissueprior_mni_to_t1, 'out_file',
                        overlap_segmentmap_with_prior, 'operand_files')

        #binarize
        preproc.connect(overlap_segmentmap_with_prior, 'out_file',
                        binarize_threshold_segmentmap, 'in_file')
        preproc.connect(inputNode, ('threshold', form_threshold_string),
                        binarize_threshold_segmentmap, 'op_string')

        #create segment mask
        preproc.connect(binarize_threshold_segmentmap, 'out_file',
                        segment_mask, 'in_file')
        preproc.connect(tissueprior_mni_to_t1, 'out_file', segment_mask,
                        'operand_files')

        #connect to output nodes
        preproc.connect(tissueprior_mni_to_t1, 'out_file', outputNode,
                        'tissueprior_mni2t1')
        preproc.connect(overlap_segmentmap_with_prior, 'out_file', outputNode,
                        'segment_combo')
        preproc.connect(binarize_threshold_segmentmap, 'out_file', outputNode,
                        'segment_bin')
        preproc.connect(segment_mask, 'out_file', outputNode, 'segment_mask')

    return preproc
Example #30
0
def fmri_cleanup_wf(wf_name="fmri_cleanup"):
    """ Run the resting-state fMRI pre-processing workflow against the rest files in `data_dir`.

    Tasks:
    - Trim first 6 volumes of the rs-fMRI file.
    - Slice Timing correction.
    - Motion and nuisance correction.
    - Calculate brain mask in fMRI space.
    - Bandpass frequency filtering for resting-state fMRI.
    - Smoothing.
    - Tissue maps co-registration to fMRI space.

    Parameters
    ----------
    wf_name: str

    Nipype Inputs
    -------------
    rest_input.in_file: traits.File
        The resting-state fMRI file.

    rest_input.anat: traits.File
        Path to the high-contrast anatomical image.

    rest_input.tissues: list of traits.File
        Paths to the tissue segmentations in anatomical space.
        Expected to have this order: GM, WM and CSF.

    rest_input.highpass_sigma:traits.Float
        Band pass timeseries filter higher bound in Hz.

    rest_input.lowpass_sigma: traits.Float
        Band pass timeseries filter lower bound in Hz.

    Nipype Outputs
    --------------
    rest_output.smooth: traits.File
        The isotropically smoothed time filtered nuisance corrected image.

    rest_output.nuis_corrected: traits.File
        The nuisance corrected fMRI file.

    rest_output.motion_params: traits.File
        The affine transformation file.

    rest_output.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    rest_output.epi_brain_mask: traits.File
        An estimated brain mask from mean EPI volume.

    rest_output.tissues_brain_mask: traits.File
        A brain mask calculated from the addition of coregistered
        GM, WM and CSF segmentation volumes from the anatomical
        segmentation.

    rest_output.tissues: list of traits.File
        The tissues segmentation volume in fMRI space.
        Expected to have this order: GM, WM and CSF.

    rest_output.anat: traits.File
        The T1w image in fMRI space.

    rest_output.avg_epi: traits.File
        The average EPI image in fMRI space after slice-time and motion correction.

    rest_output.motion_regressors: traits.File

    rest_output.compcor_regressors: traits.File

    rest_output.art_displacement_files
        One image file containing the voxel-displacement timeseries.

    rest_output.art_intensity_files
        One file containing the global intensity values determined from the brainmask.

    rest_output.art_norm_files
        One file containing the composite norm.

    rest_output.art_outlier_files
         One file containing a list of 0-based indices corresponding to outlier volumes.

    rest_output.art_plot_files
        One image file containing the detected outliers.

    rest_output.art_statistic_files
        One file containing information about the different types of artifacts and if design info is provided then
        details of stimulus correlated motion and a listing or artifacts by event type.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat",
        "atlas_anat",
        "coreg_target",
        "tissues",
        "lowpass_freq",
        "highpass_freq",
    ]

    out_fields = [
        "motion_corrected",
        "motion_params",
        "tissues",
        "anat",
        "avg_epi",
        "time_filtered",
        "smooth",
        "tsnr_file",
        "epi_brain_mask",
        "tissues_brain_mask",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "nuis_corrected",
        "art_displacement_files",
        "art_intensity_files",
        "art_norm_files",
        "art_outlier_files",
        "art_plot_files",
        "art_statistic_files",
    ]

    # input identities
    rest_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                            name="rest_input")

    # rs-fMRI preprocessing nodes
    trim = setup_node(Trim(), name="trim")

    stc_wf = auto_spm_slicetime()
    realign = setup_node(nipy_motion_correction(), name='realign')

    # average
    average = setup_node(
        Function(
            function=mean_img,
            input_names=["in_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name='average_epi'
    )

    mean_gunzip = setup_node(Gunzip(), name="mean_gunzip")

    # co-registration nodes
    coreg = setup_node(spm_coregister(cost_function="mi"), name="coreg_fmri")
    brain_sel = setup_node(Select(index=[0, 1, 2]), name="brain_sel")

    # brain mask made with EPI
    epi_mask = setup_node(ComputeMask(), name='epi_mask')

    # brain mask made with the merge of the tissue segmentations
    tissue_mask = setup_node(fsl.MultiImageMaths(), name='tissue_mask')
    tissue_mask.inputs.op_string = "-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin"
    tissue_mask.inputs.out_file = "tissue_brain_mask.nii.gz"

    # select tissues
    gm_select = setup_node(Select(index=[0]), name="gm_sel")
    wmcsf_select = setup_node(Select(index=[1, 2]), name="wmcsf_sel")

    # noise filter
    noise_wf = rest_noise_filter_wf()
    wm_select = setup_node(Select(index=[1]), name="wm_sel")
    csf_select = setup_node(Select(index=[2]), name="csf_sel")

    # bandpass filtering
    bandpass = setup_node(
        Function(
            input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'],
            output_names=['out_files'],
            function=bandpass_filter
        ),
        name='bandpass'
    )

    # smooth
    smooth = setup_node(
        Function(
            function=smooth_img,
            input_names=["in_file", "fwhm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name="smooth"
    )
    smooth.inputs.fwhm = get_config_setting('fmri_smooth.fwhm', default=8)
    smooth.inputs.out_file = "smooth_std_{}.nii.gz".format(wf_name)

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields), name="rest_output")

    # Connect the nodes
    wf.connect([
        # trim
        (rest_input, trim, [("in_file", "in_file")]),

        # slice time correction
        (trim, stc_wf, [("out_file", "stc_input.in_file")]),

        # motion correction
        (stc_wf, realign, [("stc_output.timecorrected_files", "in_file")]),

        # coregistration target
        (realign, average, [("out_file", "in_file")]),
        (average, mean_gunzip, [("out_file", "in_file")]),
        (mean_gunzip, coreg, [("out_file", "target")]),

        # epi brain mask
        (average, epi_mask, [("out_file", "mean_volume")]),

        # coregistration
        (rest_input, coreg, [("anat", "source")]),
        (rest_input, brain_sel, [("tissues", "inlist")]),
        (brain_sel, coreg, [(("out", flatten_list), "apply_to_files")]),

        # tissue brain mask
        (coreg, gm_select, [("coregistered_files", "inlist")]),
        (coreg, wmcsf_select, [("coregistered_files", "inlist")]),
        (gm_select, tissue_mask, [(("out", flatten_list), "in_file")]),
        (wmcsf_select, tissue_mask, [(("out", flatten_list), "operand_files")]),

        # nuisance correction
        (coreg, wm_select, [("coregistered_files", "inlist",)]),
        (coreg, csf_select, [("coregistered_files", "inlist",)]),
        (realign, noise_wf, [("out_file", "rest_noise_input.in_file",)]),
        (tissue_mask, noise_wf, [("out_file", "rest_noise_input.brain_mask")]),
        (wm_select, noise_wf, [(("out", flatten_list), "rest_noise_input.wm_mask")]),
        (csf_select, noise_wf, [(("out", flatten_list), "rest_noise_input.csf_mask")]),

        (realign, noise_wf, [("par_file", "rest_noise_input.motion_params",)]),

        # temporal filtering
        (noise_wf, bandpass, [("rest_noise_output.nuis_corrected", "files")]),
        # (realign,     bandpass,    [("out_file", "files")]),
        (stc_wf, bandpass, [("stc_output.time_repetition", "tr")]),
        (rest_input, bandpass, [
            ("lowpass_freq", "lowpass_freq"),
            ("highpass_freq", "highpass_freq"),
        ]),
        (bandpass, smooth, [("out_files", "in_file")]),

        # output
        (epi_mask, rest_output, [("brain_mask", "epi_brain_mask")]),
        (tissue_mask, rest_output, [("out_file", "tissues_brain_mask")]),
        (realign, rest_output, [
            ("out_file", "motion_corrected"),
            ("par_file", "motion_params"),
        ]),
        (coreg, rest_output, [
            ("coregistered_files", "tissues"),
            ("coregistered_source", "anat"),
        ]),
        (noise_wf, rest_output, [
            ("rest_noise_output.motion_regressors", "motion_regressors"),
            ("rest_noise_output.compcor_regressors", "compcor_regressors"),
            ("rest_noise_output.gsr_regressors", "gsr_regressors"),
            ("rest_noise_output.nuis_corrected", "nuis_corrected"),
            ("rest_noise_output.tsnr_file", "tsnr_file"),
            ("rest_noise_output.art_displacement_files", "art_displacement_files"),
            ("rest_noise_output.art_intensity_files", "art_intensity_files"),
            ("rest_noise_output.art_norm_files", "art_norm_files"),
            ("rest_noise_output.art_outlier_files", "art_outlier_files"),
            ("rest_noise_output.art_plot_files", "art_plot_files"),
            ("rest_noise_output.art_statistic_files", "art_statistic_files"),
        ]),
        (average, rest_output, [("out_file", "avg_epi")]),
        (bandpass, rest_output, [("out_files", "time_filtered")]),
        (smooth, rest_output, [("out_file", "smooth")]),
    ])

    return wf