Exemple #1
0
def init_prepare_epi_wf(omp_nthreads, matched_pe=False,
                        name="prepare_epi_wf"):
    """
    Prepare opposed-PE EPI images for PE-POLAR SDC.

    This workflow takes in a set of EPI files and returns two 3D volumes with
    matching and opposed PE directions, ready to be used in field distortion
    estimation.

    The procedure involves: estimating a robust template using FreeSurfer's
    ``mri_robust_template``, bias field correction using ANTs ``N4BiasFieldCorrection``
    and AFNI ``3dUnifize``, skullstripping using FSL BET and AFNI ``3dAutomask``,
    and rigid coregistration to the reference using ANTs.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.pepolar import init_prepare_epi_wf
            wf = init_prepare_epi_wf(omp_nthreads=8)

    Parameters
    ----------
    matched_pe : bool
        Whether the input ``fmaps_epi`` will contain images with matched
        PE blips or not. Please use :func:`sdcflows.workflows.pepolar.check_pes`
        to determine whether they exist or not.
    name : str
        Name for this workflow
    omp_nthreads : int
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    epi_pe : str
        Phase-encoding direction of the EPI image to be corrected.
    maps_pe : list of tuple(pathlike, str)
        list of 3D or 4D NIfTI images
    ref_brain
        coregistration reference (skullstripped and bias field corrected)

    Outputs
    -------
    opposed_pe : pathlike
        single 3D NIfTI file
    matched_pe : pathlike
        single 3D NIfTI file

    """
    inputnode = pe.Node(niu.IdentityInterface(fields=['epi_pe', 'maps_pe', 'ref_brain']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['opposed_pe', 'matched_pe']),
                         name='outputnode')

    ants_settings = pkgr.resource_filename('sdcflows',
                                           'data/translation_rigid.json')

    split = pe.Node(niu.Function(function=_split_epi_lists), name='split')

    merge_op = pe.Node(
        StructuralReference(auto_detect_sensitivity=True,
                            initial_timepoint=1,
                            fixed_timepoint=True,  # Align to first image
                            intensity_scaling=True,
                            # 7-DOF (rigid + intensity)
                            no_iteration=True,
                            subsample_threshold=200,
                            out_file='template.nii.gz'),
        name='merge_op')

    ref_op_wf = init_enhance_and_skullstrip_bold_wf(
        omp_nthreads=omp_nthreads, name='ref_op_wf')

    op2ref_reg = pe.Node(ants.Registration(
        from_file=ants_settings, output_warped_image=True),
        name='op2ref_reg', n_procs=omp_nthreads)

    workflow = Workflow(name=name)
    workflow.connect([
        (inputnode, split, [('maps_pe', 'in_files'),
                            ('epi_pe', 'pe_dir')]),
        (split, merge_op, [(('out', _front), 'in_files')]),
        (merge_op, ref_op_wf, [('out_file', 'inputnode.in_file')]),
        (ref_op_wf, op2ref_reg, [
            ('outputnode.skull_stripped_file', 'moving_image')]),
        (inputnode, op2ref_reg, [('ref_brain', 'fixed_image')]),
        (op2ref_reg, outputnode, [('warped_image', 'opposed_pe')]),
    ])

    if not matched_pe:
        workflow.connect([
            (inputnode, outputnode, [('ref_brain', 'matched_pe')]),
        ])
        return workflow

    merge_ma = pe.Node(
        StructuralReference(auto_detect_sensitivity=True,
                            initial_timepoint=1,
                            fixed_timepoint=True,  # Align to first image
                            intensity_scaling=True,
                            # 7-DOF (rigid + intensity)
                            no_iteration=True,
                            subsample_threshold=200,
                            out_file='template.nii.gz'),
        name='merge_ma')

    ref_ma_wf = init_enhance_and_skullstrip_bold_wf(
        omp_nthreads=omp_nthreads, name='ref_ma_wf')

    ma2ref_reg = pe.Node(ants.Registration(
        from_file=ants_settings, output_warped_image=True),
        name='ma2ref_reg', n_procs=omp_nthreads)

    workflow.connect([
        (split, merge_ma, [(('out', _last), 'in_files')]),
        (merge_ma, ref_ma_wf, [('out_file', 'inputnode.in_file')]),
        (ref_ma_wf, ma2ref_reg, [
            ('outputnode.skull_stripped_file', 'moving_image')]),
        (inputnode, ma2ref_reg, [('ref_brain', 'fixed_image')]),
        (ma2ref_reg, outputnode, [('warped_image', 'matched_pe')]),
    ])
    return workflow
Exemple #2
0
def prepare_b0(num_b0s, low_bval=5, name='prepare_b0'):
    """
    Create a pipelines that prepare the data for further corrections. This
    pipelines coregister the B0 images and then
    average it in order to obtain only one average B0 images.
    The b-vectors and b-values are updated according to the modifications.

    Args:
        num_b0s (int): Number of b0 in the DWI dataset
        low_bval:
        name (Optional[str]): Name of the workflow

    Inputnode:
        in_dwi (str): Input DWI file.
        in_bvec (str): Vector file of the diffusion directions
            of the dwi dataset.
        in_bval (str): B-values file.

    Outputnode:
        dwi_b0_merge: Average of B0 images merged to the DWIs
        b0_reference: Average of the B0 images or the only B0 image
        out_bvec: Updated gradient vectors table
        out_bvals: Updated gradient values table
        mask_b0: Binary mask obtained from the average of the B0 images

    Returns:
        The workflow
    """
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    from nipype.workflows.dmri.fsl.utils import b0_average

    from clinica.utils.dwi import b0_dwi_split, insert_b0_into_dwi

    inputnode = pe.Node(interface=niu.IdentityInterface(
        fields=["in_dwi", "in_bvec", "in_bval"]),
        name="inputnode")

    b0_dwi_split = pe.Node(niu.Function(
        input_names=['in_file', 'in_bvals', 'in_bvecs', 'low_bval'],
        output_names=['out_b0', 'out_dwi', 'out_bvals', 'out_bvecs'],
        function=b0_dwi_split), name='b0_dwi_split')
    b0_dwi_split.inputs.lowbval = low_bval

    b0_flirt = b0_flirt_pipeline(num_b0s=num_b0s, name='b0_co_registration')

    b0_avg = pe.Node(niu.Function(
        input_names=['in_file'], output_names=['out_file'],
        function=b0_average), name='b0_average')

    mask_b0 = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True),
                      name='mask_b0')

    insert_b0_into_dwi = pe.Node(niu.Function(
        input_names=['in_b0', 'in_dwi', 'in_bvals', 'in_bvecs'],
        output_names=['out_dwi', 'out_bvals', 'out_bvecs'],
        function=insert_b0_into_dwi), name='insert_b0avg_into_dwi')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['mask_b0', 'b0_reference', 'dwi_b0_merge',
                'out_bvecs', 'out_bvals']),
        name='outputnode')

    wf = pe.Workflow(name=name)

    if num_b0s == 1:
        wf.connect([
            # Split dataset into two datasets (b0, b>low_bval)
            (inputnode,          b0_dwi_split, [('in_bval',       'in_bvals'),
                                                ('in_bvec',       'in_bvecs'),
                                                ('in_dwi',       'in_file')]),
            # Merge datasets such that bval(DWI) = (0 b1 ... bn)
            (b0_dwi_split, insert_b0_into_dwi, [('out_b0',           'in_b0'),
                                                ('out_dwi',         'in_dwi'),
                                                ('out_bvals',     'in_bvals'),
                                                ('out_bvecs',   'in_bvecs')]),
            # Compute b0 mask
            (b0_dwi_split,            mask_b0, [('out_b0',       'in_file')]),
            # Outputnode
            (insert_b0_into_dwi,   outputnode, [('out_dwi',   'dwi_b0_merge'),
                                                ('out_bvals',    'out_bvals'),
                                                ('out_bvecs',  'out_bvecs')]),
            (mask_b0,              outputnode, [('mask_file',    'mask_b0')]),
            (b0_dwi_split,         outputnode, [('out_b0',  'b0_reference')])
        ])
    elif num_b0s > 1:
        wf.connect([
            # Split dataset into two datasets (b0s, b>low_bval)
            (inputnode,             b0_dwi_split, [('in_bval',              'in_bvals'),  # noqa
                                                   ('in_bvec',              'in_bvecs'),  # noqa
                                                   ('in_dwi',              'in_file')]),  # noqa
            # Register the b0 onto the first b0
            (b0_dwi_split,              b0_flirt, [('out_b0',    'inputnode.in_file')]),  # noqa
            # Average the b0s
            (b0_flirt,                    b0_avg, [('outputnode.out_file', 'in_file')]),  # noqa
            # Compute b0 mask from b0avg
            (b0_avg,                     mask_b0, [('out_file',            'in_file')]),  # noqa
            # Merge datasets such that bval(DWI) = (0 b1 ... bn)
            (b0_avg,          insert_b0_into_dwi, [('out_file',              'in_b0')]),  # noqa
            (b0_dwi_split,    insert_b0_into_dwi, [('out_dwi',                'in_dwi'),  # noqa
                                                   ('out_bvals',            'in_bvals'),  # noqa
                                                   ('out_bvecs',          'in_bvecs')]),  # noqa
            # Outputnode
            (insert_b0_into_dwi,      outputnode, [('out_dwi',          'dwi_b0_merge'),  # noqa
                                                   ('out_bvals',           'out_bvals'),  # noqa
                                                   ('out_bvecs',         'out_bvecs')]),  # noqa
            (mask_b0,                 outputnode, [('mask_file',           'mask_b0')]),  # noqa
            (b0_avg,                  outputnode, [('out_file',       'b0_reference')])   # noqa
        ])
    else:
        raise ValueError('The number of b0s should be strictly positive.')

    return wf
Exemple #3
0
def epi_pipeline(name='susceptibility_distortion_correction_using_t1'):
    """
    This workflow allows to correct for echo-planareinduced susceptibility artifacts without fieldmap
    (e.g. ADNI Database) by elastically register DWIs to their respective baseline T1-weighted
    structural scans using an inverse consistent registration algorithm with a mutual information cost
    function (SyN algorithm). This workflow allows also a coregistration of DWIs with their respective
    baseline T1-weighted structural scans in order to latter combine tracks and cortex parcelation.
    ..  warning:: This workflow rotates the `b`-vectors'
    .. References
      .. Nir et al. (Neurobiology of Aging 2015)- Connectivity network measures predict volumetric atrophy in mild cognitive impairment

        Leow et al. (IEEE Trans Med Imaging 2007)- Statistical Properties of Jacobian Maps and the Realization of Unbiased Large Deformation Nonlinear Image Registration
    Example
    -------
    >>> epi = epi_pipeline()
    >>> epi.inputs.inputnode.DWI = 'DWI.nii'
    >>> epi.inputs.inputnode.bvec = 'bvec.txt'
    >>> epi.inputs.inputnode.T1 = 'T1.nii'
    >>> epi.run() # doctest: +SKIP
    """

    from clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils import create_jacobian_determinant_image, change_itk_transform_type, expend_matrix_list, rotate_bvecs, ants_registration_syn_quick, ants_warp_image_multi_transform, ants_combin_transform
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.c3 as c3

    inputnode = pe.Node(niu.IdentityInterface(fields=['T1', 'DWI', 'bvec']), name='inputnode')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    pick_ref = pe.Node(niu.Select(), name='Pick_b0')
    pick_ref.inputs.index = [0]

    flirt_b0_2_T1 = pe.Node(interface=fsl.FLIRT(dof=6), name='flirt_B0_2_T1')
    flirt_b0_2_T1.inputs.interp = "spline"
    flirt_b0_2_T1.inputs.cost = 'normmi'
    flirt_b0_2_T1.inputs.cost_func = 'normmi'

    apply_xfm = pe.Node(interface=fsl.preprocess.ApplyXFM(), name='apply_xfm')
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(
            interface=niu.Function(
                input_names=['in_matrix', 'in_bvec'],
                output_names=['out_matrix_list'],
                function=expend_matrix_list),
            name='expend_matrix')

    rot_bvec = pe.Node(
            niu.Function(
                input_names=['in_matrix', 'in_bvec'],
                output_names=['out_file'],
                function=rotate_bvecs),
            name='Rotate_Bvec')

    antsRegistrationSyNQuick = pe.Node(
            interface=niu.Function(
                input_names=['fix_image', 'moving_image'],
                output_names=['image_warped',
                              'affine_matrix',
                              'warp',
                              'inverse_warped',
                              'inverse_warp'],
                function=ants_registration_syn_quick),
            name='antsRegistrationSyNQuick')

    c3d_flirt2ants = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk')
    c3d_flirt2ants.inputs.itk_transform = True
    c3d_flirt2ants.inputs.fsl2ras = True

    change_transform = pe.Node(niu.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type),
            name='change_transform_type')

    merge_transform = pe.Node(niu.Merge(3), name='MergeTransforms')

    apply_transform = pe.MapNode(interface=niu.Function(input_names=['fix_image', 'moving_image', 'ants_warp_affine'],
                                                        output_names=['out_warp_field', 'out_warped'],
                                                        function=ants_combin_transform),
                                 iterfield=['moving_image'],
                                 name='warp_filed')

    jacobian = pe.MapNode(interface=niu.Function(input_names=['imageDimension', 'deformationField', 'outputImage'],
                                                 output_names=['outputImage'],
                                                 function=create_jacobian_determinant_image),
                          iterfield=['deformationField'],
                          name='jacobian')

    jacobian.inputs.imageDimension = 3
    jacobian.inputs.outputImage = 'Jacobian_image.nii.gz'

    jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'),
                         iterfield=['in_file', 'operand_files'],
                         name='ModulateDWIs')

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    outputnode = pe.Node(niu.IdentityInterface(fields=['DWI_2_T1_Coregistration_matrix',
                                                       'epi_correction_deformation_field',
                                                       'epi_correction_affine_transform',
                                                       'epi_correction_image_warped',
                                                       'DWIs_epicorrected',
                                                       'warp_epi',
                                                       'out_bvec'
                                                       ]), name='outputnode')

    wf = pe.Workflow(name='epi_pipeline')

    wf.connect([(inputnode, split, [('DWI', 'in_file')])])
    wf.connect([(split, pick_ref, [('out_files', 'inlist')])])
    wf.connect([(pick_ref, flirt_b0_2_T1, [('out', 'in_file')])])
    wf.connect([(inputnode, flirt_b0_2_T1, [('T1', 'reference')])])
    wf.connect([(inputnode, rot_bvec, [('bvec', 'in_bvec')])])
    wf.connect([(flirt_b0_2_T1, expend_matrix, [('out_matrix_file', 'in_matrix')])])
    wf.connect([(inputnode, expend_matrix, [('bvec', 'in_bvec')])])
    wf.connect([(expend_matrix, rot_bvec, [('out_matrix_list', 'in_matrix')])])
    wf.connect([(inputnode, antsRegistrationSyNQuick, [('T1', 'fix_image')])])
    wf.connect([(flirt_b0_2_T1, antsRegistrationSyNQuick, [('out_file', 'moving_image')])])

    wf.connect([(inputnode, c3d_flirt2ants, [('T1', 'reference_file')])])
    wf.connect([(pick_ref, c3d_flirt2ants, [('out', 'source_file')])])
    wf.connect([(flirt_b0_2_T1, c3d_flirt2ants, [('out_matrix_file', 'transform_file')])])
    wf.connect([(c3d_flirt2ants, change_transform, [('itk_transform', 'input_affine_file')])])

    wf.connect([(antsRegistrationSyNQuick, merge_transform, [('warp', 'in1')])])
    wf.connect([(antsRegistrationSyNQuick, merge_transform, [('affine_matrix', 'in2')])])
    wf.connect([(change_transform, merge_transform, [('updated_affine_file', 'in3')])])
    wf.connect([(inputnode, apply_transform, [('T1', 'fix_image')])])
    wf.connect([(split, apply_transform, [('out_files', 'moving_image')])])

    wf.connect([(merge_transform, apply_transform, [('out', 'ants_warp_affine')])])
    wf.connect([(apply_transform, jacobian, [('out_warp_field', 'deformationField')])])
    wf.connect([(apply_transform, jacmult, [('out_warped', 'operand_files')])])
    wf.connect([(jacobian, jacmult, [('outputImage', 'in_file')])])
    wf.connect([(jacmult, thres, [('out_file', 'in_file')])])
    wf.connect([(thres, merge, [('out_file', 'in_files')])])

    wf.connect([(merge, outputnode, [('merged_file', 'DWIs_epicorrected')])])
    wf.connect([(flirt_b0_2_T1, outputnode, [('out_matrix_file', 'DWI_2_T1_Coregistration_matrix')])])
    wf.connect([(antsRegistrationSyNQuick, outputnode, [('warp', 'epi_correction_deformation_field'),
                                                        ('affine_matrix', 'epi_correction_affine_transform'),
                                                        ('image_warped', 'epi_correction_image_warped')])])
    wf.connect([(merge_transform, outputnode, [('out', 'warp_epi')])])
    wf.connect([(rot_bvec, outputnode, [('out_file', 'out_bvec')])])

    return wf
Exemple #4
0
def b0_flirt_pipeline(num_b0s, name='b0_coregistration'):
    """
    Rigid registration of the B0 dataset onto the first volume. Rigid
    registration is achieved using FLIRT and the normalized
    correlation.

    Args:
        num_b0s (int): Number of the B0 volumes in the dataset.
        name (str): Name of the workflow.

    Inputnode:
        in_file(str): B0 dataset.

    Outputnode
        out_b0_reg(str): The set of B0 volumes registered to the first volume.

    Returns:
        The workflow
    """
    import nipype.pipeline.engine as pe
    from nipype.interfaces import fsl
    import nipype.interfaces.utility as niu

    from clinica.utils.dwi import merge_volumes_tdim

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    fslroi_ref = pe.Node(fsl.ExtractROI(args='0 1'), name='b0_reference')
    tsize = num_b0s - 1
    fslroi_moving = pe.Node(fsl.ExtractROI(args='1 '+str(tsize)),
                            name='b0_moving')
    split_moving = pe.Node(fsl.Split(dimension='t'), name='split_b0_moving')

    bet_ref = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True),
                      name='bet_ref')

    dilate = pe.Node(
            fsl.maths.MathsCommand(
                nan2zeros=True,
                args='-kernel sphere 5 -dilM'),
            name='mask_dilate')

    flirt = pe.MapNode(fsl.FLIRT(
        interp='spline', dof=6, bins=50, save_log=True,
        cost='corratio', cost_func='corratio', padding_size=10,
        searchr_x=[-4, 4], searchr_y=[-4, 4], searchr_z=[-4, 4],
        fine_search=1, coarse_search=10),
        name='b0_co_registration', iterfield=['in_file'])

    merge = pe.Node(fsl.Merge(dimension='t'), name='merge_registered_b0s')
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='remove_negative')
    insert_ref = pe.Node(niu.Function(input_names=['in_file1', 'in_file2'],
                                      output_names=['out_file'],
                                      function=merge_volumes_tdim),
                         name='concat_ref_moving')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_xfms']),
        name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  fslroi_ref,   [('in_file', 'in_file')]),
        (inputnode,  fslroi_moving,   [('in_file', 'in_file')]),
        (fslroi_moving, split_moving,   [('roi_file', 'in_file')]),
        (fslroi_ref, bet_ref, [('roi_file', 'in_file')]),
        (bet_ref, dilate, [('mask_file', 'in_file')]),
        (dilate, flirt, [('out_file', 'ref_weight'),
                         ('out_file', 'in_weight')]),
        (fslroi_ref, flirt, [('roi_file', 'reference')]),
        (split_moving, flirt, [('out_files', 'in_file')]),
        (flirt, thres, [('out_file', 'in_file')]),
        (thres, merge, [('out_file', 'in_files')]),
        (merge, insert_ref, [('merged_file', 'in_file2')]),
        (fslroi_ref, insert_ref, [('roi_file', 'in_file1')]),
        (insert_ref, outputnode, [('out_file', 'out_file')]),
        (flirt, outputnode, [('out_matrix_file', 'out_xfms')])
    ])
    return wf
Exemple #5
0
def hmc_pipeline(name='motion_correct'):
    """
    HMC stands for head-motion correction.

    Creates a pipelines that corrects for head motion artifacts in dMRI
    sequences. It takes a series of diffusion weighted images and
    rigidly co-registers them to one reference image (FLIRT normalised
    mutual information). Finally, the `b`-matrix is rotated
    accordingly [Leemans09]_ making use of the rotation matrix
    obtained by FLIRT.

    A list of rigid transformation matrices is provided, so that transforms
    can be chained.
    This is useful to correct for artifacts with only one interpolation process
    and also to compute nuisance regressors as proposed by [Yendiki13]_.

    .. warning:: This workflow rotates the `b`-vectors, so please be advised
      that not all the dicom converters ensure the consistency between the
      resulting nifti orientation and the gradients table (e.g. dcm2nii
      checks it).

    .. admonition:: References

      .. [Leemans09] Leemans A, and Jones DK, `The B-matrix must be rotated
        when correcting for subject motion in DTI data
        <http://dx.doi.org/10.1002/mrm.21890>`_,
        Magn Reson Med. 61(6):1336-49. 2009. doi: 10.1002/mrm.21890.

      .. [Yendiki13] Yendiki A et al., `Spurious group differences due to head
        motion in a diffusion MRI study
        <http://dx.doi.org/10.1016/j.neuroimage.2013.11.027>`_.
        Neuroimage. 21(88C):79-90. 2013. doi: 10.1016/j.neuroimage.2013.11.027

    Inputnode
    ---------
    in_file : FILE
      Mandatory input. Input dwi file.
    in_bvec : FILE
      Mandatory input. Vector file of the diffusion directions of the dwi dataset.
    in_bval : FILE
      Mandatory input. B values file.
    in_mask : FILE
      Mandatory input. Weights mask of reference image (a file with data
      range in [0.0, 1.0], indicating the weight of each voxel when computing the metric
    ref_num : INT
      Optional input. Default=0. Index of the b0 volume that should be taken as reference.

    Outputnode
    ----------
        outputnode.out_file - corrected dwi file
        outputnode.out_bvec - rotated gradient vectors table
        outputnode.out_xfms - list of transformation matrices

    """
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    from nipype.workflows.data import get_flirt_schedule
    from nipype.workflows.dmri.fsl.utils import insert_mat
    from nipype.workflows.dmri.fsl.utils import rotate_bvecs

    from clinica.utils.dwi import merge_volumes_tdim
    from clinica.utils.dwi import hmc_split
    from clinica.workflows.dwi_preprocessing import dwi_flirt

    params = dict(dof=6, bgvalue=0, save_log=True, no_search=True,
                  # cost='mutualinfo', cost_func='mutualinfo', bins=64,
                  schedule=get_flirt_schedule('hmc'))

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'in_bvec', 'in_bval', 'in_mask', 'ref_num']),
        name='inputnode')

    split = pe.Node(
            niu.Function(
                function=hmc_split,
                input_names=['in_file', 'in_bval', 'ref_num'],
                output_names=['out_ref', 'out_mov', 'out_bval', 'volid']),
            name='split_ref_moving')

    flirt = dwi_flirt(flirt_param=params)

    insmat = pe.Node(niu.Function(
        input_names=['inlist', 'volid'],
        output_names=['out'],
        function=insert_mat), name='insert_ref_matrix')

    rot_bvec = pe.Node(niu.Function(
        input_names=['in_bvec', 'in_matrix'],
        output_names=['out_file'],
        function=rotate_bvecs), name='Rotate_Bvec')

    merged_volumes = pe.Node(niu.Function(
        input_names=['in_file1', 'in_file2'],
        output_names=['out_file'],
        function=merge_volumes_tdim), name='merge_reference_moving')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_bvec', 'out_xfms', 'mask_B0']),
        name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,          split,  [('in_file',                'in_file'),
                                      ('in_bval',                'in_bval'),
                                      ('ref_num',              'ref_num')]),
        (inputnode,          flirt,  [('in_mask',   'inputnode.ref_mask')]),
        (split,              flirt,  [('out_ref',    'inputnode.reference'),
                                      ('out_mov',      'inputnode.in_file'),
                                      ('out_bval',   'inputnode.in_bval')]),
        (flirt,              insmat, [('outputnode.out_xfms',   'inlist')]),
        (split,              insmat, [('volid',                  'volid')]),
        (inputnode,        rot_bvec, [('in_bvec',              'in_bvec')]),
        (insmat,           rot_bvec, [('out',                'in_matrix')]),
        (rot_bvec,       outputnode, [('out_file',            'out_bvec')]),
        (flirt,      merged_volumes, [('outputnode.out_ref',    'in_file1'),
                                      ('outputnode.out_file', 'in_file2')]),
        (merged_volumes, outputnode, [('out_file',            'out_file')]),
        (insmat,         outputnode, [('out',                 'out_xfms')]),
        (flirt,          outputnode, [('outputnode.out_ref',   'mask_B0')])
    ])
    return wf
def baw_ants_registration_template_build_single_iteration_wf(
    iterationPhasePrefix, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG
):
    """

    Inputs::

           inputspec.images :
           inputspec.fixed_image :
           inputspec.ListOfPassiveImagesDictionaries :
           inputspec.interpolationMapping :

    Outputs::

           outputspec.template :
           outputspec.transforms_list :
           outputspec.passive_deformed_templates :
    """
    TemplateBuildSingleIterationWF = pe.Workflow(
        name="antsRegistrationTemplateBuildSingleIterationWF_"
        + str(iterationPhasePrefix)
    )

    inputSpec = pe.Node(
        interface=util.IdentityInterface(
            fields=[
                "ListOfImagesDictionaries",
                "registrationImageTypes",
                # 'maskRegistrationImageType',
                "interpolationMapping",
                "fixed_image",
            ]
        ),
        run_without_submitting=True,
        name="inputspec",
    )
    ## HACK: INFO: We need to have the AVG_AIR.nii.gz be warped with a default voxel value of 1.0
    ## HACK: INFO: Need to move all local functions to a common untility file, or at the top of the file so that
    ##             they do not change due to re-indenting.  Otherwise re-indenting for flow control will trigger
    ##             their hash to change.
    ## HACK: INFO: REMOVE 'transforms_list' it is not used.  That will change all the hashes
    ## HACK: INFO: Need to run all python files through the code beutifiers.  It has gotten pretty ugly.
    outputSpec = pe.Node(
        interface=util.IdentityInterface(
            fields=["template", "transforms_list", "passive_deformed_templates"]
        ),
        run_without_submitting=True,
        name="outputspec",
    )

    ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template
    BeginANTS = pe.MapNode(
        interface=Registration(), name="BeginANTS", iterfield=["moving_image"]
    )
    # SEE template.py many_cpu_BeginANTS_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,4,2,8), 'overwrite': True}
    ## This is set in the template.py file BeginANTS.plugin_args = BeginANTS_cpu_sge_options_dictionary
    common_ants_registration_settings(
        antsRegistrationNode=BeginANTS,
        registrationTypeDescription="SixStageAntsRegistrationT1Only",
        output_transform_prefix=str(iterationPhasePrefix) + "_tfm",
        output_warped_image="atlas2subject.nii.gz",
        output_inverse_warped_image="subject2atlas.nii.gz",
        save_state="SavedantsRegistrationNodeSyNState.h5",
        invert_initial_moving_transform=False,
        initial_moving_transform=None,
    )

    GetMovingImagesNode = pe.Node(
        interface=util.Function(
            function=get_moving_images,
            input_names=[
                "ListOfImagesDictionaries",
                "registrationImageTypes",
                "interpolationMapping",
            ],
            output_names=["moving_images", "moving_interpolation_type"],
        ),
        run_without_submitting=True,
        name="99_GetMovingImagesNode",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "ListOfImagesDictionaries",
        GetMovingImagesNode,
        "ListOfImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "registrationImageTypes",
        GetMovingImagesNode,
        "registrationImageTypes",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "interpolationMapping", GetMovingImagesNode, "interpolationMapping"
    )

    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_images", BeginANTS, "moving_image"
    )
    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_interpolation_type", BeginANTS, "interpolation"
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "fixed_image", BeginANTS, "fixed_image"
    )

    ## Now warp all the input_images images
    wimtdeformed = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=["transforms", "input_image"],
        # iterfield=['transforms', 'invert_transform_flags', 'input_image'],
        name="wimtdeformed",
    )
    wimtdeformed.inputs.interpolation = "Linear"
    wimtdeformed.default_value = 0
    # HACK: Should try using forward_composite_transform
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transform', wimtdeformed, 'transforms')
    TemplateBuildSingleIterationWF.connect(
        BeginANTS, "composite_transform", wimtdeformed, "transforms"
    )
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', wimtdeformed, 'invert_transform_flags')
    ## NOTE: forward_invert_flags:: List of flags corresponding to the forward transforms
    # wimtdeformed.inputs.invert_transform_flags = [False,False,False,False,False]
    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_images", wimtdeformed, "input_image"
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "fixed_image", wimtdeformed, "reference_image"
    )

    ##  Shape Update Next =====
    ## Now  Average All input_images deformed images together to create an updated template average
    AvgDeformedImages = pe.Node(interface=AverageImages(), name="AvgDeformedImages")
    AvgDeformedImages.inputs.dimension = 3
    AvgDeformedImages.inputs.output_average_image = (
        str(iterationPhasePrefix) + ".nii.gz"
    )
    AvgDeformedImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(
        wimtdeformed, "output_image", AvgDeformedImages, "images"
    )

    ## Now average all affine transforms together
    AvgAffineTransform = pe.Node(
        interface=AverageAffineTransform(), name="AvgAffineTransform"
    )
    AvgAffineTransform.inputs.dimension = 3
    AvgAffineTransform.inputs.output_affine_transform = (
        "Avererage_" + str(iterationPhasePrefix) + "_Affine.h5"
    )

    SplitCompositeTransform = pe.MapNode(
        interface=util.Function(
            function=split_composite_to_component_transform,
            input_names=["transformFilename"],
            output_names=["affine_component_list", "warp_component_list"],
        ),
        iterfield=["transformFilename"],
        run_without_submitting=True,
        name="99_SplitCompositeTransform",
    )
    TemplateBuildSingleIterationWF.connect(
        BeginANTS, "composite_transform", SplitCompositeTransform, "transformFilename"
    )
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', SplitCompositeTransform, 'transformFilename')
    TemplateBuildSingleIterationWF.connect(
        SplitCompositeTransform,
        "affine_component_list",
        AvgAffineTransform,
        "transforms",
    )

    ## Now average the warp fields togther
    AvgWarpImages = pe.Node(interface=AverageImages(), name="AvgWarpImages")
    AvgWarpImages.inputs.dimension = 3
    AvgWarpImages.inputs.output_average_image = (
        str(iterationPhasePrefix) + "warp.nii.gz"
    )
    AvgWarpImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(
        SplitCompositeTransform, "warp_component_list", AvgWarpImages, "images"
    )

    ## Now average the images together
    ## INFO:  For now GradientStep is set to 0.25 as a hard coded default value.
    GradientStep = 0.25
    GradientStepWarpImage = pe.Node(
        interface=MultiplyImages(), name="GradientStepWarpImage"
    )
    GradientStepWarpImage.inputs.dimension = 3
    GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep
    GradientStepWarpImage.inputs.output_product_image = (
        "GradientStep0.25_" + str(iterationPhasePrefix) + "_warp.nii.gz"
    )
    TemplateBuildSingleIterationWF.connect(
        AvgWarpImages, "output_average_image", GradientStepWarpImage, "first_input"
    )

    ## Now create the new template shape based on the average of all deformed images
    UpdateTemplateShape = pe.Node(
        interface=ApplyTransforms(), name="UpdateTemplateShape"
    )
    UpdateTemplateShape.inputs.invert_transform_flags = [True]
    UpdateTemplateShape.inputs.interpolation = "Linear"
    UpdateTemplateShape.default_value = 0

    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        UpdateTemplateShape,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        [
            (
                AvgAffineTransform,
                UpdateTemplateShape,
                [(("affine_transform", make_list_of_one_element), "transforms")],
            )
        ]
    )
    TemplateBuildSingleIterationWF.connect(
        GradientStepWarpImage,
        "output_product_image",
        UpdateTemplateShape,
        "input_image",
    )

    ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(
        interface=util.Function(
            function=make_transform_list_with_gradient_warps,
            input_names=["averageAffineTranform", "gradientStepWarp"],
            output_names=["TransformListWithGradientWarps"],
        ),
        run_without_submitting=True,
        name="99_MakeTransformListWithGradientWarps",
    )
    # ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True

    TemplateBuildSingleIterationWF.connect(
        AvgAffineTransform,
        "affine_transform",
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "averageAffineTranform",
    )
    TemplateBuildSingleIterationWF.connect(
        UpdateTemplateShape,
        "output_image",
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "gradientStepWarp",
    )

    ReshapeAverageImageWithShapeUpdate = pe.Node(
        interface=ApplyTransforms(), name="ReshapeAverageImageWithShapeUpdate"
    )
    ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [
        True,
        False,
        False,
        False,
        False,
    ]
    ReshapeAverageImageWithShapeUpdate.inputs.interpolation = "Linear"
    ReshapeAverageImageWithShapeUpdate.default_value = 0
    ReshapeAverageImageWithShapeUpdate.inputs.output_image = (
        "ReshapeAverageImageWithShapeUpdate.nii.gz"
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        ReshapeAverageImageWithShapeUpdate,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        ReshapeAverageImageWithShapeUpdate,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "TransformListWithGradientWarps",
        ReshapeAverageImageWithShapeUpdate,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        ReshapeAverageImageWithShapeUpdate, "output_image", outputSpec, "template"
    )

    ######
    ######
    ######  Process all the passive deformed images in a way similar to the main image used for registration
    ######
    ######
    ######
    ##############################################
    ## Now warp all the ListOfPassiveImagesDictionaries images
    FlattenTransformAndImagesListNode = pe.Node(
        Function(
            function=flatten_transform_and_images_list,
            input_names=[
                "ListOfPassiveImagesDictionaries",
                "transforms",
                "interpolationMapping",
                "invert_transform_flags",
            ],
            output_names=[
                "flattened_images",
                "flattened_transforms",
                "flattened_invert_transform_flags",
                "flattened_image_nametypes",
                "flattened_interpolation_type",
            ],
        ),
        run_without_submitting=True,
        name="99_FlattenTransformAndImagesList",
    )

    GetPassiveImagesNode = pe.Node(
        interface=util.Function(
            function=get_passive_images,
            input_names=["ListOfImagesDictionaries", "registrationImageTypes"],
            output_names=["ListOfPassiveImagesDictionaries"],
        ),
        run_without_submitting=True,
        name="99_GetPassiveImagesNode",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "ListOfImagesDictionaries",
        GetPassiveImagesNode,
        "ListOfImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "registrationImageTypes",
        GetPassiveImagesNode,
        "registrationImageTypes",
    )

    TemplateBuildSingleIterationWF.connect(
        GetPassiveImagesNode,
        "ListOfPassiveImagesDictionaries",
        FlattenTransformAndImagesListNode,
        "ListOfPassiveImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "interpolationMapping",
        FlattenTransformAndImagesListNode,
        "interpolationMapping",
    )
    TemplateBuildSingleIterationWF.connect(
        BeginANTS,
        "composite_transform",
        FlattenTransformAndImagesListNode,
        "transforms",
    )
    ## FlattenTransformAndImagesListNode.inputs.invert_transform_flags = [False,False,False,False,False,False]
    ## INFO: Please check of invert_transform_flags has a fixed number.
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags')
    wimtPassivedeformed = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=[
            "transforms",
            "invert_transform_flags",
            "input_image",
            "interpolation",
        ],
        name="wimtPassivedeformed",
    )
    wimtPassivedeformed.default_value = 0
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        wimtPassivedeformed,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_interpolation_type",
        wimtPassivedeformed,
        "interpolation",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_images",
        wimtPassivedeformed,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_transforms",
        wimtPassivedeformed,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_invert_transform_flags",
        wimtPassivedeformed,
        "invert_transform_flags",
    )

    RenestDeformedPassiveImagesNode = pe.Node(
        Function(
            function=renest_deformed_passive_images,
            input_names=[
                "deformedPassiveImages",
                "flattened_image_nametypes",
                "interpolationMapping",
            ],
            output_names=[
                "nested_imagetype_list",
                "outputAverageImageName_list",
                "image_type_list",
                "nested_interpolation_type",
            ],
        ),
        run_without_submitting=True,
        name="99_RenestDeformedPassiveImages",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "interpolationMapping",
        RenestDeformedPassiveImagesNode,
        "interpolationMapping",
    )
    TemplateBuildSingleIterationWF.connect(
        wimtPassivedeformed,
        "output_image",
        RenestDeformedPassiveImagesNode,
        "deformedPassiveImages",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_image_nametypes",
        RenestDeformedPassiveImagesNode,
        "flattened_image_nametypes",
    )
    ## Now  Average All passive input_images deformed images together to create an updated template average
    AvgDeformedPassiveImages = pe.MapNode(
        interface=AverageImages(),
        iterfield=["images", "output_average_image"],
        name="AvgDeformedPassiveImages",
    )
    AvgDeformedPassiveImages.inputs.dimension = 3
    AvgDeformedPassiveImages.inputs.normalize = False
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "nested_imagetype_list",
        AvgDeformedPassiveImages,
        "images",
    )
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "outputAverageImageName_list",
        AvgDeformedPassiveImages,
        "output_average_image",
    )

    ## -- INFO:  Now neeed to reshape all the passive images as well
    ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=["input_image", "reference_image", "output_image", "interpolation"],
        name="ReshapeAveragePassiveImageWithShapeUpdate",
    )
    ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [
        True,
        False,
        False,
        False,
        False,
    ]
    ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "nested_interpolation_type",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "interpolation",
    )
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "outputAverageImageName_list",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "output_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedPassiveImages,
        "output_average_image",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedPassiveImages,
        "output_average_image",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "TransformListWithGradientWarps",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        ReshapeAveragePassiveImageWithShapeUpdate,
        "output_image",
        outputSpec,
        "passive_deformed_templates",
    )

    return TemplateBuildSingleIterationWF
Exemple #7
0
def init_phdiff_wf(omp_nthreads, name='phdiff_wf'):
    """
    Estimates the fieldmap using a phase-difference image and one or more
    magnitude images corresponding to two or more :abbr:`GRE (Gradient Echo sequence)`
    acquisitions. The `original code was taken from nipype
    <https://github.com/nipy/nipype/blob/master/nipype/workflows/dmri/fsl/artifacts.py#L514>`_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from sdcflows.workflows.phdiff import init_phdiff_wf
        wf = init_phdiff_wf(omp_nthreads=1)


    Outputs::

      outputnode.fmap_ref - The average magnitude image, skull-stripped
      outputnode.fmap_mask - The brain mask applied to the fieldmap
      outputnode.fmap - The estimated fieldmap in Hz


    """

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on a field map that was co-registered to the BOLD reference,
using a custom workflow of *fMRIPrep* derived from D. Greve's `epidewarp.fsl`
[script](http://www.nmr.mgh.harvard.edu/~greve/fbirn/b0/epidewarp.fsl) and
further improvements of HCP Pipelines [@hcppipelines].
"""

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'phasediff']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    def _pick1st(inlist):
        return inlist[0]

    # Read phasediff echo times
    meta = pe.Node(ReadSidecarJSON(bids_validate=False),
                   name='meta',
                   mem_gb=0.01)

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                 name='n4',
                 n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_report_fmap_mask = pe.Node(DerivativesDataSink(desc='brain',
                                                      suffix='mask'),
                                  name='ds_report_fmap_mask',
                                  mem_gb=0.01,
                                  run_without_submitting=True)
    # uses mask from bet; outputs a mask
    # dilate = pe.Node(fsl.maths.MathsCommand(
    #     nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate')

    # phase diff -> radians
    pha2rads = pe.Node(niu.Function(function=siemens2rads), name='pha2rads')

    # FSL PRELUDE will perform phase-unwrapping
    prelude = pe.Node(fsl.PRELUDE(), name='prelude')

    denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                        kernel_shape='sphere',
                                        kernel_size=3),
                      name='denoise')

    demean = pe.Node(niu.Function(function=demean_image), name='demean')

    cleanup_wf = cleanup_edge_pipeline(name="cleanup_wf")

    compfmap = pe.Node(Phasediff2Fieldmap(), name='compfmap')

    # The phdiff2fmap interface is equivalent to:
    # rad2rsec (using rads2radsec from nipype.workflows.dmri.fsl.utils)
    # pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='ComputeFieldmapFUGUE')
    # rsec2hz (divide by 2pi)

    workflow.connect([
        (inputnode, meta, [('phasediff', 'in_file')]),
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (magmrg, n4, [('out_avg', 'input_image')]),
        (n4, prelude, [('output_image', 'magnitude_file')]),
        (n4, bet, [('output_image', 'in_file')]),
        (bet, prelude, [('mask_file', 'mask_file')]),
        (inputnode, pha2rads, [('phasediff', 'in_file')]),
        (pha2rads, prelude, [('out', 'phase_file')]),
        (meta, compfmap, [('out_dict', 'metadata')]),
        (prelude, denoise, [('unwrapped_phase_file', 'in_file')]),
        (denoise, demean, [('out_file', 'in_file')]),
        (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
        (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
        (cleanup_wf, compfmap, [('outputnode.out_file', 'in_file')]),
        (compfmap, outputnode, [('out_file', 'fmap')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_report_fmap_mask, [('phasediff', 'source_file')]),
        (bet, ds_report_fmap_mask, [('out_report', 'in_file')]),
    ])

    return workflow
Exemple #8
0
        'sub-{subject_id}_ses-1_task-{task_id}_desc-confounds_regressors.tsv'),
    'events':
    os.path.join(out_root, 'event_files',
                 'sub-{subject_id}_task-{task_id}_cond_v3.csv')
}

# Flexibly collect data from disk to feed into flows.
selectfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_root),
                      name="selectfiles")

selectfiles.inputs.task_id = [1, 2, 3, 4, 5, 6, 7, 8]

# Extract motion parameters from regressors file
runinfo = MapNode(util.Function(input_names=[
    'in_file', 'events_file', 'regressors_file', 'regressors_names',
    'motion_columns'
],
                                function=_bids2nipypeinfo,
                                output_names=['info', 'realign_file']),
                  name='runinfo',
                  iterfield=['in_file', 'events_file', 'regressors_file'])

# Set the column names to be used from the confounds file
# reference a paper from podlrack lab
runinfo.inputs.regressors_names = ['std_dvars', 'framewise_displacement'] + \
                                  ['a_comp_cor_%02d' % i for i in range(6)]


runinfo.inputs.motion_columns   = ['trans_x', 'trans_x_derivative1', 'trans_x_derivative1_power2', 'trans_x_power2'] + \
                                  ['trans_y', 'trans_y_derivative1', 'trans_y_derivative1_power2', 'trans_y_power2'] + \
                                  ['trans_z', 'trans_z_derivative1', 'trans_z_derivative1_power2', 'trans_z_power2'] + \
                                  ['rot_x', 'rot_x_derivative1', 'rot_x_derivative1_power2', 'rot_x_power2'] + \
Exemple #9
0
def calc_local_metrics(brain_mask, preprocessed_data_dir, subject_id,
                       parcellations_dict, bp_freq_list, TR,
                       selectfiles_templates, working_dir, ds_dir, use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces.freesurfer.preprocess import MRIConvert

    import CPAC.alff.alff as cpac_alff
    import CPAC.reho.reho as cpac_reho
    import CPAC.utils.utils as cpac_utils

    import utils as calc_metrics_utils
    from motion import calculate_FD_P, calculate_FD_J

    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 15
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [
        ('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
        ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
    ]

    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(
        util.IdentityInterface(fields=['parcellation']),
        name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation',
                                         parcellations_dict.keys())

    # BP FILTER ITERATOR
    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']),
                                name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name='selectfiles')
    selectfiles.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############
    parcellated_ts = Node(util.Function(
        input_names=[
            'in_data', 'parcellation_name', 'parcellations_dict', 'bp_freqs',
            'tr'
        ],
        output_names=[
            'parcellation_time_series', 'parcellation_time_series_file',
            'masker_file'
        ],
        function=calc_metrics_utils.extract_parcellation_time_series),
                          name='parcellated_ts')

    parcellated_ts.inputs.parcellations_dict = parcellations_dict
    parcellated_ts.inputs.tr = TR
    wf.connect(selectfiles, 'epi_MNI_fullspectrum', parcellated_ts, 'in_data')
    wf.connect(parcellation_infosource, 'parcellation', parcellated_ts,
               'parcellation_name')
    wf.connect(bp_filter_infosource, 'bp_freqs', parcellated_ts, 'bp_freqs')

    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(
        input_names=['in_data', 'extraction_method'],
        output_names=['matrix', 'matrix_file'],
        function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts, 'parcellation_time_series', con_mat, 'in_data')

    ##############
    ## ds
    ##############

    wf.connect(parcellated_ts, 'parcellation_time_series_file', ds,
               'con_mat.parcellated_time_series.@parc_ts')
    wf.connect(parcellated_ts, 'masker_file', ds,
               'con_mat.parcellated_time_series.@masker')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix.@mat')

    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name,
               plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemple #10
0
def init_timeseries_wf(
    out_dir,
    out_path_base,
    source_file,
    dt,
    work_dir=None,
    name='timeseries_wf',
):
    """
    Calculate timeseries of interest for a bold image in standard space.

    Parameters
    ----------

    out_dir: str
        the output directory
    out_path_base: str
        the new directory for the  output, to be created within out_dir
    source_file: str
        a filename for output naming puroses
    dt: float
        repetition time
    work_dir: str
        the working directory for the workflow
    name: str
        the workflow name

    Returns
    -------

    workflow: nipype workflow

    Inputs
    ------

    bold_std
        BOLD series NIfTI file in MNI152NLin6Asym space
    bold_mask_std
        BOLD mask for MNI152NLin6Asym space
    movpar_file
        movement parameter file
    skip_vols
        number of non steady state volumes
    csf_mask
        csk mask in MNI 2mm space
    wm_mask
        wm mask in MNI 2mm space
    cortical_gm_mask
        gm mask in MNI 2mm space

    Outputs
    -------

    NONE
    """

    DerivativesDataSink.out_path_base = out_path_base

    workflow = Workflow(name=name, base_dir=work_dir)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_std', 'bold_mask_std', 'movpar_file', 'skip_vols', 'csf_mask',
        'wm_mask', 'cortical_gm_mask'
    ]),
                        name='inputnode')

    bold_confs_wf = init_bold_confs_wf(out_dir,
                                       out_path_base,
                                       source_file,
                                       mem_gb=1,
                                       regressors_all_comps=False,
                                       regressors_dvars_th=1.5,
                                       regressors_fd_th=0.5)

    ica_aroma_wf = init_ica_aroma_wf(dt, err_on_aroma_warn=True)

    join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join),
                   name='aroma_confounds')

    merge_metadata = pe.Node(niu.Merge(2),
                             name='merge_metadata',
                             run_without_submitting=True)

    merge_metadata2 = pe.Node(DictMerge(),
                              name='merge_metadata2',
                              run_without_submitting=True)

    ds_timeseries = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                desc='confounds',
                                                source_file=source_file,
                                                suffix='timeseries'),
                            name='ds_confounds')

    ds_aroma_noise_ics = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                     source_file=source_file,
                                                     suffix='AROMAnoiseICs'),
                                 name='ds_aroma_noise_ics')

    ds_melodic_mix = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                 desc='MELODIC',
                                                 source_file=source_file,
                                                 suffix='mixing'),
                             name='ds_melodic_mix')

    ds_aroma_report = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                  desc='mixing',
                                                  source_file=source_file,
                                                  suffix='reportlet'),
                              name='ds_aroma_report')

    workflow.connect([
        (inputnode, bold_confs_wf, [('bold_std', 'inputnode.bold'),
                                    ('bold_mask_std', 'inputnode.bold_mask'),
                                    ('movpar_file', 'inputnode.movpar_file'),
                                    ('skip_vols', 'inputnode.skip_vols'),
                                    ('csf_mask', 'inputnode.csf_mask'),
                                    ('wm_mask', 'inputnode.wm_mask'),
                                    ('cortical_gm_mask',
                                     'inputnode.cortical_gm_mask')]),
        (inputnode, ica_aroma_wf, [('bold_std', 'inputnode.bold_std'),
                                   ('bold_mask_std',
                                    'inputnode.bold_mask_std'),
                                   ('movpar_file', 'inputnode.movpar_file'),
                                   ('skip_vols', 'inputnode.skip_vols')]),

        # merge tsvs
        (bold_confs_wf, join, [('outputnode.confounds_file', 'in_file')]),
        (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]),

        # merge metadata
        (bold_confs_wf, merge_metadata, [('outputnode.confounds_metadata',
                                          'in1')]),
        (ica_aroma_wf, merge_metadata, [('outputnode.aroma_metadata', 'in2')]),
        (merge_metadata, merge_metadata2, [('out', 'in_dicts')]),

        # derivatives
        (join, ds_timeseries, [('out_file', 'in_file')]),
        (merge_metadata2, ds_timeseries, [('out_dict', 'meta_dict')]),
        (ica_aroma_wf, ds_aroma_noise_ics, [('outputnode.aroma_noise_ics',
                                             'in_file')]),
        (ica_aroma_wf, ds_melodic_mix, [('outputnode.melodic_mix', 'in_file')
                                        ]),
        (ica_aroma_wf, ds_aroma_report, [('outputnode.out_report', 'in_file')
                                         ]),
    ])

    return workflow
Exemple #11
0
def init_bold_reference_wf(
    omp_nthreads,
    bold_file=None,
    sbref_files=None,
    brainmask_thresh=0.85,
    pre_mask=False,
    multiecho=False,
    name="bold_reference_wf",
    gen_report=False,
):
    """
    Build a workflow that generates reference BOLD images for a series.

    The raw reference image is the target of :abbr:`HMC (head motion correction)`, and a
    contrast-enhanced reference is the subject of distortion correction, as well as
    boundary-based registration to T1w and template spaces.

    LIMITATION: If one wants to extract the reference from several SBRefs
    with several echoes each, the first echo should be selected elsewhere
    and run this interface in ``multiecho = False`` mode. In other words,
    SBRef inputs are assumed to be single-echo.

    LIMITATION: If a list of SBRefs is provided, each can be 3D or 4D, but they
    are assumed to be sampled in the exact same 3D-grid and have the same orientation
    information in their headers so that they can directly be merged into a 4D volume.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from niworkflows.func.util import init_bold_reference_wf
            wf = init_bold_reference_wf(omp_nthreads=1)

    Parameters
    ----------
    omp_nthreads : :obj:`int`
        Maximum number of threads an individual process may use
    bold_file : :obj:`str`
        BOLD series NIfTI file
    sbref_files : :obj:`list` or :obj:`bool`
        Single band (as opposed to multi band) reference NIfTI file.
        If ``True`` is passed, the workflow is built to accommodate SBRefs,
        but the input is left undefined (i.e., it is left open for connection)
    brainmask_thresh: :obj:`float`
        Lower threshold for the probabilistic brainmask to obtain
        the final binary mask (default: 0.85).
    pre_mask : :obj:`bool`
        Indicates whether the ``pre_mask`` input will be set (and thus, step 1
        should be skipped).
    multiecho : :obj:`bool`
        If multiecho data was supplied, data from the first echo will be selected
    name : :obj:`str`
        Name of workflow (default: ``bold_reference_wf``)
    gen_report : :obj:`bool`
        Whether a mask report node should be appended in the end

    Inputs
    ------
    bold_file : str
        BOLD series NIfTI file
    all_bold_files : str
        Validated and header-corrected BOLD series; multiple if multi-echo
    bold_mask : bool
        A tentative brain mask to initialize the workflow (requires ``pre_mask``
        parameter set ``True``).
    dummy_scans : int or None
        Number of non-steady-state volumes specified by user at beginning of ``bold_file``
    sbref_file : str
        single band (as opposed to multi band) reference NIfTI file

    Outputs
    -------
    bold_file : str
        Validated BOLD series NIfTI file
    raw_ref_image : str
        Reference image to which BOLD series is motion corrected
    skip_vols : int
        Number of non-steady-state volumes selected at beginning of ``bold_file``
    algo_dummy_scans : int
        Number of non-steady-state volumes agorithmically detected at
        beginning of ``bold_file``
    ref_image : str
        Contrast-enhanced reference image
    ref_image_brain : str
        Skull-stripped reference image
    bold_mask : str
        Skull-stripping mask of reference image
    validation_report : str
        HTML reportlet indicating whether ``bold_file`` had a valid affine


    Subworkflows
        * :py:func:`~niworkflows.func.util.init_enhance_and_skullstrip_wf`

    """
    from ..utils.connections import pop_file as _pop
    from ..interfaces.bold import NonsteadyStatesDetector
    from ..interfaces.images import RobustAverage

    workflow = Workflow(name=name)
    workflow.__desc__ = f"""\
First, a reference volume and its skull-stripped version were generated
{'from the shortest echo of the BOLD run' * multiecho} using a custom
methodology of *fMRIPrep*.
"""

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["bold_file", "bold_mask", "dummy_scans", "sbref_file"]
        ),
        name="inputnode",
    )
    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=[
                "bold_file",
                "all_bold_files",
                "raw_ref_image",
                "skip_vols",
                "algo_dummy_scans",
                "ref_image",
                "ref_image_brain",
                "bold_mask",
                "validation_report",
                "mask_report",
            ]
        ),
        name="outputnode",
    )

    # Simplify manually setting input image
    if bold_file is not None:
        inputnode.inputs.bold_file = bold_file

    val_bold = pe.MapNode(
        ValidateImage(),
        name="val_bold",
        mem_gb=DEFAULT_MEMORY_MIN_GB,
        iterfield=["in_file"],
    )

    get_dummy = pe.Node(NonsteadyStatesDetector(), name="get_dummy")
    gen_avg = pe.Node(RobustAverage(), name="gen_avg", mem_gb=1)

    enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf(
        brainmask_thresh=brainmask_thresh,
        omp_nthreads=omp_nthreads,
        pre_mask=pre_mask,
    )

    calc_dummy_scans = pe.Node(
        niu.Function(function=_pass_dummy_scans, output_names=["skip_vols_num"]),
        name="calc_dummy_scans",
        run_without_submitting=True,
        mem_gb=DEFAULT_MEMORY_MIN_GB,
    )

    # fmt: off
    workflow.connect([
        (inputnode, val_bold, [(("bold_file", listify), "in_file")]),
        (inputnode, get_dummy, [(("bold_file", _pop), "in_file")]),
        (inputnode, enhance_and_skullstrip_bold_wf, [("bold_mask", "inputnode.pre_mask")]),
        (inputnode, calc_dummy_scans, [("dummy_scans", "dummy_scans")]),
        (gen_avg, enhance_and_skullstrip_bold_wf, [("out_file", "inputnode.in_file")]),
        (get_dummy, calc_dummy_scans, [("n_dummy", "algo_dummy_scans")]),
        (calc_dummy_scans, outputnode, [("skip_vols_num", "skip_vols")]),
        (gen_avg, outputnode, [("out_file", "raw_ref_image")]),
        (get_dummy, outputnode, [("n_dummy", "algo_dummy_scans")]),
        (val_bold, outputnode, [(("out_file", _pop), "bold_file"),
                                ("out_file", "all_bold_files"),
                                (("out_report", _pop), "validation_report")]),
        (enhance_and_skullstrip_bold_wf, outputnode, [
            ("outputnode.bias_corrected_file", "ref_image"),
            ("outputnode.mask_file", "bold_mask"),
            ("outputnode.skull_stripped_file", "ref_image_brain"),
        ]),
    ])
    # fmt: on

    if gen_report:
        mask_reportlet = pe.Node(SimpleShowMaskRPT(), name="mask_reportlet")
        # fmt: off
        workflow.connect([
            (enhance_and_skullstrip_bold_wf, mask_reportlet, [
                ("outputnode.bias_corrected_file", "background_file"),
                ("outputnode.mask_file", "mask_file"),
            ]),
        ])
        # fmt: on

    if not sbref_files:
        # fmt: off
        workflow.connect([
            (val_bold, gen_avg, [(("out_file", _pop), "in_file")]),  # pop first echo of ME-EPI
            (get_dummy, gen_avg, [("t_mask", "t_mask")]),
        ])
        # fmt: on
        return workflow

    from ..interfaces.nibabel import MergeSeries

    nsbrefs = 0
    if sbref_files is not True:
        # If not boolean, then it is a list-of or pathlike.
        inputnode.inputs.sbref_file = sbref_files
        nsbrefs = 1 if isinstance(sbref_files, str) else len(sbref_files)

    val_sbref = pe.MapNode(
        ValidateImage(),
        name="val_sbref",
        mem_gb=DEFAULT_MEMORY_MIN_GB,
        iterfield=["in_file"],
    )
    merge_sbrefs = pe.Node(MergeSeries(), name="merge_sbrefs")

    # fmt: off
    workflow.connect([
        (inputnode, val_sbref, [(("sbref_file", listify), "in_file")]),
        (val_sbref, merge_sbrefs, [("out_file", "in_files")]),
        (merge_sbrefs, gen_avg, [("out_file", "in_file")]),
    ])
    # fmt: on

    # Edit the boilerplate as the SBRef will be the reference
    workflow.__desc__ = f"""\
First, a reference volume and its skull-stripped version were generated
by aligning and averaging {nsbrefs or ''} single-band references (SBRefs).
"""

    return workflow
Exemple #12
0
def init_ica_aroma_wf(
    dt,
    aroma_melodic_dim=-200,
    err_on_aroma_warn=False,
    susan_fwhm=6.0,
    name='ica_aroma_wf',
):
    """
    Build a workflow that runs `ICA-AROMA`_.

    This workflow wraps `ICA-AROMA`_ to identify and remove motion-related
    independent components from a BOLD time series.

    The following steps are performed:

    #. Remove non-steady state volumes from the bold series.
    #. Smooth data using FSL `susan`, with a kernel width FWHM=6.0mm.
    #. Run FSL `melodic` outside of ICA-AROMA to generate the report
    #. Run ICA-AROMA
    #. Aggregate identified motion components (aggressive) to TSV
    #. Return ``classified_motion_ICs`` and ``melodic_mix`` for user to complete
       non-aggressive denoising in T1w space
    #. Calculate ICA-AROMA-identified noise components
       (columns named ``AROMAAggrCompXX``)

    There is a current discussion on whether other confounds should be extracted
    before or after denoising `here
    <http://nbviewer.jupyter.org/github/nipreps/fmriprep-notebooks/blob/922e436429b879271fa13e76767a6e73443e74d9/issue-817_aroma_confounds.ipynb>`__.

    .. _ICA-AROMA: https://github.com/maartenmennes/ICA-AROMA

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from ecp.workflows.confounds import init_ica_aroma_wf
            wf = init_ica_aroma_wf(
                dt=1.0)

    Parameters
    ----------
    dt : :obj:`float`
        bold repetition time
    aroma_melodic_dim : :obj:`int`
        Set the dimensionality of the MELODIC ICA decomposition.
        Negative numbers set a maximum on automatic dimensionality estimation.
        Positive numbers set an exact number of components to extract.
        (default: -200, i.e., estimate <=200 components)
    err_on_aroma_warn : :obj:`bool`
        Do not fail on ICA-AROMA errors
    susan_fwhm : :obj:`float`
        Kernel width (FWHM in mm) for the smoothing step with
        FSL ``susan`` (default: 6.0mm)
    name : :obj:`str`
        Name of workflow (default: ``ica_aroma_wf``)

    Inputs
    ------
    bold_std
        BOLD series NIfTI file in MNI152NLin6Asym space
    bold_mask_std
        BOLD mask for MNI152NLin6Asym space
    movpar_file
        movement parameter file
    skip_vols
        number of non steady state volumes
        
    Outputs
    -------
    aroma_confounds
        TSV of confounds identified as noise by ICA-AROMA
    aroma_noise_ics
        CSV of noise components identified by ICA-AROMA
    melodic_mix
        FSL MELODIC mixing matrix
    aroma_metatdata
        metadata
    out_report
        aroma out report

    """
    from niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from niworkflows.interfaces.segmentation import ICA_AROMARPT
    from niworkflows.interfaces.utility import KeySelect
    from niworkflows.interfaces.utils import TSV2JSON

    workflow = Workflow(name=name)
    workflow.__postdesc__ = """\
Automatic removal of motion artifacts using independent component analysis
[ICA-AROMA, @aroma] was performed on the *preprocessed BOLD on MNI space*
time-series after removal of non-steady state volumes and spatial smoothing
with an isotropic, Gaussian kernel of 6mm FWHM (full-width half-maximum).
The "aggressive" noise-regressors were collected and placed
in the corresponding confounds file.
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_std',
        'bold_mask_std',
        'movpar_file',
        'skip_vols',
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'aroma_confounds', 'aroma_noise_ics', 'melodic_mix', 'aroma_metadata',
        'out_report'
    ]),
                         name='outputnode')

    # extract out to BOLD base
    rm_non_steady_state = pe.Node(Trim(), name='rm_nonsteady')
    trim_movement = pe.Node(TrimMovement(), name='trim_movement')

    calc_median_val = pe.Node(fsl.ImageStats(op_string='-k %s -p 50'),
                              name='calc_median_val')
    calc_bold_mean = pe.Node(fsl.MeanImage(), name='calc_bold_mean')

    def _getusans_func(image, thresh):
        return [tuple([image, thresh])]

    getusans = pe.Node(niu.Function(function=_getusans_func,
                                    output_names=['usans']),
                       name='getusans',
                       mem_gb=0.01)

    smooth = pe.Node(fsl.SUSAN(fwhm=susan_fwhm), name='smooth')

    # melodic node
    melodic = pe.Node(fsl.MELODIC(no_bet=True,
                                  tr_sec=dt,
                                  mm_thresh=0.5,
                                  out_stats=True,
                                  dim=aroma_melodic_dim),
                      name="melodic")

    # ica_aroma node
    ica_aroma = pe.Node(ICA_AROMARPT(denoise_type='no',
                                     generate_report=True,
                                     TR=dt,
                                     args='-np'),
                        name='ica_aroma')

    # extract the confound ICs from the results
    ica_aroma_confound_extraction = pe.Node(
        ICAConfounds(err_on_aroma_warn=err_on_aroma_warn),
        name='ica_aroma_confound_extraction')

    ica_aroma_metadata_fmt = pe.Node(TSV2JSON(index_column='IC',
                                              output=None,
                                              enforce_case=True,
                                              additional_metadata={
                                                  'Method': {
                                                      'Name':
                                                      'ICA-AROMA',
                                                      'Version':
                                                      getenv(
                                                          'AROMA_VERSION',
                                                          'n/a')
                                                  }
                                              }),
                                     name='ica_aroma_metadata_fmt')

    def _getbtthresh(medianval):
        return 0.75 * medianval

    # connect the nodes
    workflow.connect([
        (inputnode, ica_aroma, [('movpar_file', 'motion_parameters')]),
        (inputnode, rm_non_steady_state, [('skip_vols', 'begin_index')]),
        (inputnode, rm_non_steady_state, [('bold_std', 'in_file')]),
        (inputnode, calc_median_val, [('bold_mask_std', 'mask_file')]),
        (inputnode, trim_movement, [('movpar_file', 'movpar_file')]),
        (inputnode, trim_movement, [('skip_vols', 'skip_vols')]),
        (rm_non_steady_state, calc_median_val, [('out_file', 'in_file')]),
        (rm_non_steady_state, calc_bold_mean, [('out_file', 'in_file')]),
        (calc_bold_mean, getusans, [('out_file', 'image')]),
        (calc_median_val, getusans, [('out_stat', 'thresh')]),
        # Connect input nodes to complete smoothing
        (rm_non_steady_state, smooth, [('out_file', 'in_file')]),
        (getusans, smooth, [('usans', 'usans')]),
        (calc_median_val, smooth, [(('out_stat', _getbtthresh),
                                    'brightness_threshold')]),
        # connect smooth to melodic
        (smooth, melodic, [('smoothed_file', 'in_files')]),
        (inputnode, melodic, [('bold_mask_std', 'mask')]),
        # connect nodes to ICA-AROMA
        (smooth, ica_aroma, [('smoothed_file', 'in_file')]),
        (inputnode, ica_aroma, [('bold_mask_std', 'report_mask'),
                                ('bold_mask_std', 'mask')]),
        (melodic, ica_aroma, [('out_dir', 'melodic_dir')]),
        # generate tsvs from ICA-AROMA
        (ica_aroma, ica_aroma_confound_extraction, [('out_dir', 'in_directory')
                                                    ]),
        (inputnode, ica_aroma_confound_extraction, [('skip_vols', 'skip_vols')
                                                    ]),
        (ica_aroma_confound_extraction, ica_aroma_metadata_fmt,
         [('aroma_metadata', 'in_file')]),
        # output for processing and reporting
        (ica_aroma_confound_extraction,
         outputnode, [('aroma_confounds', 'aroma_confounds'),
                      ('aroma_noise_ics', 'aroma_noise_ics'),
                      ('melodic_mix', 'melodic_mix')]),
        (ica_aroma_metadata_fmt, outputnode, [('output', 'aroma_metadata')]),
        (ica_aroma, outputnode, [('out_report', 'out_report')]),
    ])

    return workflow
Exemple #13
0
def init_bold_confs_wf(
    out_dir,
    out_path_base,
    source_file,
    mem_gb,
    regressors_all_comps,
    regressors_dvars_th,
    regressors_fd_th,
    dt=None,
    work_dir=None,
    name="bold_confs_wf",
):
    """
    This workflow calculates confounds for a BOLD series, and aggregates them
    into a :abbr:`TSV (tab-separated value)` file, for use as nuisance
    regressors in a :abbr:`GLM (general linear model)`.

    The following confounds are calculated, with column headings in parentheses:

    #. Region-wise average signal (``csf``, ``white_matter``, ``global_signal``)
    #. DVARS - original and standardized variants (``dvars``, ``std_dvars``)
    #. Framewise displacement, based on head-motion parameters
       (``framewise_displacement``)
    #. Temporal CompCor (``t_comp_cor_XX``)
    #. Anatomical CompCor (``a_comp_cor_XX``)
    #. Cosine basis set for high-pass filtering w/ 0.008 Hz cut-off
       (``cosine_XX``)
    #. Non-steady-state volumes (``non_steady_state_XX``)
    #. Estimated head-motion parameters, in mm and rad
       (``trans_x``, ``trans_y``, ``trans_z``, ``rot_x``, ``rot_y``, ``rot_z``)


    Prior to estimating aCompCor and tCompCor, non-steady-state volumes are
    censored and high-pass filtered using a :abbr:`DCT (discrete cosine
    transform)` basis.
    The cosine basis, as well as one regressor per censored volume, are included
    for convenience.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.bold.confounds import init_bold_confs_wf
        wf = init_bold_confs_wf(
            mem_gb=1,
            regressors_all_comps=False,
            regressors_dvars_th=1.5,
            regressors_fd_th=0.5,
            dt=2.0,
        )

    **Parameters**

        mem_gb : float
            Size of BOLD file in GB - please note that this size
            should be calculated after resamplings that may extend
            the FoV
        regressors_all_comps: bool
            Indicates whether CompCor decompositions should return all
            components instead of the minimal number of components necessary
            to explain 50 percent of the variance in the decomposition mask.
        regressors_dvars_th
            Criterion for flagging DVARS outliers
        regressors_fd_th
            Criterion for flagging framewise displacement outliers
        dt: float
            repetition time
        name : str
            Name of workflow (default: ``bold_confs_wf``)


    **Inputs**

        bold
            BOLD image, after the prescribed corrections (STC, HMC and SDC)
            when available.
        bold_mask
            BOLD series mask
        movpar_file
            SPM-formatted motion parameters file
        skip_vols
            number of non steady state volumes
        csf_mask
            csk mask in MNI 2mm space
        wm_mask
            wm mask in MNI 2mm space
        cortical_gm_mask
            gm mask in MNI 2mm space
        

    **Outputs**

        confounds_file
            TSV of all aggregated confounds
        confounds_metadata
            Confounds metadata dictionary.

    """

    DerivativesDataSink.out_path_base = out_path_base

    workflow = Workflow(name=name, base_dir=work_dir)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold', 'bold_mask', 'movpar_file', 'skip_vols', 'csf_mask', 'wm_mask',
        'cortical_gm_mask'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['confounds_file', 'confounds_metadata']),
        name='outputnode')

    # create tcc mask: fslmaths cortical_gm_mask -dilD -mul -1 -add bold_mask -bin
    tcc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-dilD -mul -1 -add',
                                           args='-bin'),
                      name='tcc_roi')

    # create acc mask fslmaths wm_mask -add csf_mask
    acc_roi = pe.Node(fsl.utils.ImageMaths(op_string='-add'), name='acc_roi')

    # Ensure ROIs don't go off-limits (reduced FoV)
    csf_msk = pe.Node(niu.Function(function=_maskroi), name='csf_msk')
    wm_msk = pe.Node(niu.Function(function=_maskroi), name='wm_msk')
    acc_msk = pe.Node(niu.Function(function=_maskroi), name='acc_msk')
    tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')

    # DVARS
    dvars = pe.Node(nac.ComputeDVARS(save_nstd=True,
                                     save_std=True,
                                     remove_zerovariance=True),
                    name="dvars",
                    mem_gb=mem_gb)

    # Frame displacement
    fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"),
                    name="fdisp",
                    mem_gb=mem_gb)

    # a/t-Compcor
    mrg_lbl_cc = pe.Node(niu.Merge(3),
                         name='merge_rois_cc',
                         run_without_submitting=True)

    tcompcor = pe.Node(TCompCor(components_file='tcompcor.tsv',
                                header_prefix='t_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                percentile_threshold=.05,
                                failure_mode='NaN'),
                       name="tcompcor",
                       mem_gb=mem_gb)

    acompcor = pe.Node(ACompCor(components_file='acompcor.tsv',
                                header_prefix='a_comp_cor_',
                                pre_filter='cosine',
                                save_pre_filter=True,
                                save_metadata=True,
                                mask_names=['combined', 'CSF', 'WM'],
                                merge_method='none',
                                failure_mode='NaN'),
                       name="acompcor",
                       mem_gb=mem_gb)

    # Set number of components
    if regressors_all_comps:
        acompcor.inputs.num_components = 'all'
        tcompcor.inputs.num_components = 'all'
    else:
        acompcor.inputs.variance_threshold = 0.5
        tcompcor.inputs.variance_threshold = 0.5

    # Set TR if present
    if dt:
        tcompcor.inputs.repetition_time = dt
        acompcor.inputs.repetition_time = dt

    # Global and segment regressors
    mrg_lbl = pe.Node(niu.Merge(3),
                      name='merge_rois',
                      run_without_submitting=True)
    signals = pe.Node(SignalExtraction(
        class_labels=["csf", "white_matter", "global_signal"]),
                      name="signals",
                      mem_gb=mem_gb)

    # Arrange confounds
    add_dvars_header = pe.Node(AddTSVHeader(columns=["dvars"]),
                               name="add_dvars_header",
                               mem_gb=0.01,
                               run_without_submitting=True)
    add_std_dvars_header = pe.Node(AddTSVHeader(columns=["std_dvars"]),
                                   name="add_std_dvars_header",
                                   mem_gb=0.01,
                                   run_without_submitting=True)
    add_motion_headers = pe.Node(AddTSVHeader(
        columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
                                 name="add_motion_headers",
                                 mem_gb=0.01,
                                 run_without_submitting=True)
    concat = pe.Node(GatherConfounds(),
                     name="concat",
                     mem_gb=0.01,
                     run_without_submitting=True)

    # CompCor metadata
    tcc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        drop_columns=['mask'],
        output=None,
        additional_metadata={'Method': 'tCompCor'},
        enforce_case=True),
                               name='tcc_metadata_fmt')
    acc_metadata_fmt = pe.Node(TSV2JSON(
        index_column='component',
        output=None,
        additional_metadata={'Method': 'aCompCor'},
        enforce_case=True),
                               name='acc_metadata_fmt')
    mrg_conf_metadata = pe.Node(niu.Merge(2),
                                name='merge_confound_metadata',
                                run_without_submitting=True)
    mrg_conf_metadata2 = pe.Node(DictMerge(),
                                 name='merge_confound_metadata2',
                                 run_without_submitting=True)

    # Expand model to include derivatives and quadratics
    model_expand = pe.Node(
        ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'),
        name='model_expansion')

    # Add spike regressors
    spike_regress = pe.Node(SpikeRegressors(fd_thresh=regressors_fd_th,
                                            dvars_thresh=regressors_dvars_th),
                            name='spike_regressors')

    # Generate reportlet (ROIs)
    mrg_compcor = pe.Node(niu.Merge(2),
                          name='merge_compcor',
                          run_without_submitting=True)
    rois_plot = pe.Node(ROIsPlot(colors=['b', 'magenta'],
                                 generate_report=True),
                        name='rois_plot',
                        mem_gb=mem_gb)

    ds_report_bold_rois = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                      desc='rois',
                                                      source_file=source_file,
                                                      suffix='reportlet',
                                                      keep_dtype=True),
                                  name='ds_report_bold_rois',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (CompCor)
    mrg_cc_metadata = pe.Node(niu.Merge(2),
                              name='merge_compcor_metadata',
                              run_without_submitting=True)
    compcor_plot = pe.Node(
        CompCorVariancePlot(metadata_sources=['tCompCor', 'aCompCor']),
        name='compcor_plot')
    ds_report_compcor = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                    desc='compcorvar',
                                                    source_file=source_file,
                                                    keep_dtype=True),
                                name='ds_report_compcor',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Generate reportlet (Confound correlation)
    conf_corr_plot = pe.Node(ConfoundsCorrelationPlot(
        reference_column='global_signal', max_dim=70),
                             name='conf_corr_plot')
    ds_report_conf_corr = pe.Node(DerivativesDataSink(base_directory=out_dir,
                                                      desc='confoundcorr',
                                                      source_file=source_file,
                                                      keep_dtype=True),
                                  name='ds_report_conf_corr',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        # generate tcc and acc rois
        (inputnode, tcc_roi, [('cortical_gm_mask', 'in_file'),
                              ('bold_mask', 'in_file2')]),
        (inputnode, acc_roi, [('wm_mask', 'in_file'),
                              ('csf_mask', 'in_file2')]),
        # Mask ROIs with bold_mask
        (inputnode, csf_msk, [('bold_mask', 'in_mask')]),
        (inputnode, wm_msk, [('bold_mask', 'in_mask')]),
        (inputnode, acc_msk, [('bold_mask', 'in_mask')]),
        (inputnode, tcc_msk, [('bold_mask', 'in_mask')]),
        # connect inputnode to each non-anatomical confound node
        (inputnode, dvars, [('bold', 'in_file'), ('bold_mask', 'in_mask')]),
        (inputnode, fdisp, [('movpar_file', 'in_file')]),

        # tCompCor
        (inputnode, tcompcor, [('bold', 'realigned_file')]),
        (inputnode, tcompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (tcc_roi, tcc_msk, [('out_file', 'roi_file')]),
        (tcc_msk, tcompcor, [('out', 'mask_files')]),

        # aCompCor
        (inputnode, acompcor, [('bold', 'realigned_file')]),
        (inputnode, acompcor, [('skip_vols', 'ignore_initial_volumes')]),
        (acc_roi, acc_msk, [('out_file', 'roi_file')]),
        (acc_msk, mrg_lbl_cc, [('out', 'in1')]),
        (inputnode, mrg_lbl_cc, [('csf_mask', 'in2')]),
        (inputnode, mrg_lbl_cc, [('wm_mask', 'in3')]),
        (mrg_lbl_cc, acompcor, [('out', 'mask_files')]),

        # Global signals extraction (constrained by anatomy)
        (inputnode, signals, [('bold', 'in_file')]),
        (inputnode, csf_msk, [('csf_mask', 'roi_file')]),
        (csf_msk, mrg_lbl, [('out', 'in1')]),
        (inputnode, wm_msk, [('wm_mask', 'roi_file')]),
        (wm_msk, mrg_lbl, [('out', 'in2')]),
        (inputnode, mrg_lbl, [('bold_mask', 'in3')]),
        (mrg_lbl, signals, [('out', 'label_files')]),

        # Collate computed confounds together
        (inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
        (dvars, add_dvars_header, [('out_nstd', 'in_file')]),
        (dvars, add_std_dvars_header, [('out_std', 'in_file')]),
        (signals, concat, [('out_file', 'signals')]),
        (fdisp, concat, [('out_file', 'fd')]),
        (tcompcor, concat, [('components_file', 'tcompcor'),
                            ('pre_filter_file', 'cos_basis')]),
        (acompcor, concat, [('components_file', 'acompcor')]),
        (add_motion_headers, concat, [('out_file', 'motion')]),
        (add_dvars_header, concat, [('out_file', 'dvars')]),
        (add_std_dvars_header, concat, [('out_file', 'std_dvars')]),

        # Confounds metadata
        (tcompcor, tcc_metadata_fmt, [('metadata_file', 'in_file')]),
        (acompcor, acc_metadata_fmt, [('metadata_file', 'in_file')]),
        (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]),
        (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]),
        (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]),

        # Expand the model with derivatives, quadratics, and spikes
        (concat, model_expand, [('confounds_file', 'confounds_file')]),
        (model_expand, spike_regress, [('confounds_file', 'confounds_file')]),

        # Set outputs
        (spike_regress, outputnode, [('confounds_file', 'confounds_file')]),
        (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]),
        (inputnode, rois_plot, [('bold', 'in_file'),
                                ('bold_mask', 'in_mask')]),
        (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]),
        (acc_msk, mrg_compcor, [('out', 'in2')]),
        (mrg_compcor, rois_plot, [('out', 'in_rois')]),
        (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]),
        (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]),
        (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]),
        (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]),
        (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]),
        (concat, conf_corr_plot, [('confounds_file', 'confounds_file')]),
        (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]),
    ])

    return workflow
Exemple #14
0
def init_pepolar_unwarp_wf(omp_nthreads=1, matched_pe=False,
                           name="pepolar_unwarp_wf"):
    """
    Create the PE-Polar field estimation workflow.

    This workflow takes in a set of EPI files with opposite phase encoding
    direction than the target file and calculates a displacements field
    (in other words, an ANTs-compatible warp file).

    This procedure works if there is only one ``_epi`` file is present
    (as long as it has the opposite phase encoding direction to the target
    file). The target file will be used to estimate the field distortion.
    However, if there is another ``_epi`` file present with a matching
    phase encoding direction to the target it will be used instead.

    Currently, different phase encoding directions in the target file and the
    ``_epi`` file(s) (for example, ``i`` and ``j``) is not supported.

    The warp field correcting for the distortions is estimated using AFNI's
    ``3dQwarp``, with displacement estimation limited to the target file phase
    encoding direction.

    It also calculates a new mask for the input dataset that takes into
    account the distortions.

    Workflow Graph
        .. workflow ::
            :graph2use: orig
            :simple_form: yes

            from sdcflows.workflows.pepolar import init_pepolar_unwarp_wf
            wf = init_pepolar_unwarp_wf()

    Parameters
    ----------
    matched_pe : bool
        Whether the input ``fmaps_epi`` will contain images with matched
        PE blips or not. Please use :func:`sdcflows.workflows.pepolar.check_pes`
        to determine whether they exist or not.
    name : str
        Name for this workflow
    omp_nthreads : int
        Parallelize internal tasks across the number of CPUs given by this option.

    Inputs
    ------
    fmaps_epi : list of tuple(pathlike, str)
        The list of EPI images that will be used in PE-Polar correction, and
        their corresponding ``PhaseEncodingDirection`` metadata.
        The workflow will use the ``epi_pe_dir`` input to separate out those
        EPI acquisitions with opposed PE blips and those with matched PE blips
        (the latter could be none, and ``in_reference_brain`` would then be
        used). The workflow raises a ``ValueError`` when no images with
        opposed PE blips are found.
    epi_pe_dir : str
        The baseline PE direction.
    in_reference : pathlike
        The baseline reference image (must correspond to ``epi_pe_dir``).
    in_reference_brain : pathlike
        The reference image above, but skullstripped.
    in_mask : pathlike
        Not used, present only for consistency across fieldmap estimation
        workflows.

    Outputs
    -------
    out_reference : pathlike
        The ``in_reference`` after unwarping
    out_reference_brain : pathlike
        The ``in_reference`` after unwarping and skullstripping
    out_warp : pathlike
        The corresponding :abbr:`DFM (displacements field map)` compatible with
        ANTs.
    out_mask : pathlike
        Mask of the unwarped input file

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A B0-nonuniformity map (or *fieldmap*) was estimated based on two (or more)
echo-planar imaging (EPI) references with opposing phase-encoding
directions, with `3dQwarp` @afni (AFNI {afni_ver}).
""".format(afni_ver=''.join(['%02d' % v for v in afni.Info().version() or []]))

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['fmaps_epi', 'in_reference', 'in_reference_brain',
                'in_mask', 'epi_pe_dir']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_reference', 'out_reference_brain', 'out_warp', 'out_mask']),
        name='outputnode')

    prepare_epi_wf = init_prepare_epi_wf(omp_nthreads=omp_nthreads,
                                         matched_pe=matched_pe,
                                         name="prepare_epi_wf")

    qwarp = pe.Node(afni.QwarpPlusMinus(
        pblur=[0.05, 0.05], blur=[-1, -1], noweight=True, minpatch=9, nopadWARP=True,
        environ={'OMP_NUM_THREADS': '%d' % omp_nthreads}),
        name='qwarp', n_procs=omp_nthreads)

    to_ants = pe.Node(niu.Function(function=_fix_hdr), name='to_ants',
                      mem_gb=0.01)

    cphdr_warp = pe.Node(CopyHeader(), name='cphdr_warp', mem_gb=0.01)

    unwarp_reference = pe.Node(ANTSApplyTransformsRPT(dimension=3,
                                                      generate_report=False,
                                                      float=True,
                                                      interpolation='LanczosWindowedSinc'),
                               name='unwarp_reference')

    enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf(
        omp_nthreads=omp_nthreads)

    workflow.connect([
        (inputnode, qwarp, [(('epi_pe_dir', _qwarp_args), 'args')]),
        (inputnode, cphdr_warp, [('in_reference', 'hdr_file')]),
        (inputnode, prepare_epi_wf, [
            ('fmaps_epi', 'inputnode.maps_pe'),
            ('epi_pe_dir', 'inputnode.epi_pe'),
            ('in_reference_brain', 'inputnode.ref_brain')]),
        (prepare_epi_wf, qwarp, [('outputnode.opposed_pe', 'base_file'),
                                 ('outputnode.matched_pe', 'in_file')]),
        (qwarp, cphdr_warp, [('source_warp', 'in_file')]),
        (cphdr_warp, to_ants, [('out_file', 'in_file')]),
        (to_ants, unwarp_reference, [('out', 'transforms')]),
        (inputnode, unwarp_reference, [('in_reference', 'reference_image'),
                                       ('in_reference', 'input_image')]),
        (unwarp_reference, enhance_and_skullstrip_bold_wf, [
            ('output_image', 'inputnode.in_file')]),
        (unwarp_reference, outputnode, [('output_image', 'out_reference')]),
        (enhance_and_skullstrip_bold_wf, outputnode, [
            ('outputnode.mask_file', 'out_mask'),
            ('outputnode.skull_stripped_file', 'out_reference_brain')]),
        (to_ants, outputnode, [('out', 'out_warp')]),
    ])

    return workflow
Exemple #15
0
def analyze_openfmri_dataset(data_dir,
                             subject=None,
                             model_id=None,
                             task_id=None,
                             output_dir=None,
                             subj_prefix='*',
                             hpcutoff=120.,
                             use_derivatives=True,
                             fwhm=6.0,
                             subjects_dir=None,
                             target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """
    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()
    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')
    """
    Set up openfmri data specific components
    """

    subjects = sorted([
        path.split(os.path.sep)[-1]
        for path in glob(os.path.join(data_dir, subj_prefix))
    ])

    infosource = pe.Node(
        niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']),
        name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]), ('task_id', task_id)]
    else:
        infosource.iterables = [
            ('subject_id',
             [subjects[subjects.index(subj)] for subj in subject]),
            ('model_id', [model_id]), ('task_id', task_id)
        ]

    subjinfo = pe.Node(niu.Function(
        input_names=['subject_id', 'base_dir', 'task_id', 'model_id'],
        output_names=['run_id', 'conds', 'TR'],
        function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir
    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav', 'contrasts']),
                             name='datasource')
    else:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav']),
                             name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt'),
            'contrasts': ('models/model%03d/'
                          'task_contrasts.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']],
            'contrasts': [['model_id']]
        }
    else:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]
        }

    datasource.inputs.sort_filelist = True
    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([
        (datasource, preproc, [('bold', 'inputspec.func')]),
    ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2. * TR)

    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')
    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([
                    np.array(row.split()) for row in fp.readlines()
                    if row.strip()
                ])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [
                row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))],
                row[2:].astype(float).tolist()
            ]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(
        input_names=['contrast_file', 'task_id', 'conds'],
        output_names=['contrasts'],
        function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(
        interface=ra.ArtifactDetect(use_differences=[True, False],
                                    use_norm=True,
                                    norm_threshold=1,
                                    zintensity_threshold=3,
                                    parameter_source='FSL',
                                    mask_type='file'),
        iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
        name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, (str, bytes)):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(int(num_elements / num_conds),
                                   num_conds).tolist()

    reshape_behav = pe.Node(niu.Function(
        input_names=['behav', 'run_id', 'conds'],
        output_names=['behav'],
        function=check_behav_list),
                            name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art,
                 [('outputspec.motion_parameters', 'realignment_parameters'),
                  ('outputspec.realigned_files', 'realigned_files'),
                  ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec,
                 [('outputspec.highpassed_files', 'functional_runs'),
                  ('outputspec.motion_parameters', 'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(CalculateMedian(), name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(int(len(all_copes) / num_copes),
                                     num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(int(len(all_varcopes) / num_copes),
                                           num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'contrasts'],
        output_names=['copes', 'varcopes', 'n_runs'],
        function=sort_copes),
                          name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, cope_sorter, [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'),
                                         ('varcopes', 'inputspec.varcopes'),
                                         ('n_runs', 'l2model.num_copes')]),
                (modelfit, fixed_fx, [
                    ('outputspec.dof_file', 'inputspec.dof_files'),
                ])])

    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration,
                   'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration,
                   'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'zstats'],
        output_names=['out_files', 'splits'],
        function=merge_files),
                        name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [
        ('copes', 'copes'),
        ('varcopes', 'varcopes'),
        ('zstats', 'zstats'),
    ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(
        input_names=['in_files', 'splits'],
        output_names=['copes', 'varcopes', 'zstats'],
        function=split_files),
                        name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files', splitfunc,
               'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean,
                   'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean,
                   'segmentation_file')

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr,
                   'segmentation_file')
    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(
            ('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
                     'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(niu.Function(
        input_names=['subject_id', 'conds', 'run_id', 'model_id', 'task_id'],
        output_names=['substitutions'],
        function=get_subs),
                      name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(), name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                 [('res4d', 'res4d'), ('copes', 'copes'),
                  ('varcopes', 'varcopes'), ('zstats', 'zstats'),
                  ('tstats', 'tstats')])])
    wf.connect([(modelfit.get_node('modelgen'), datasink, [
        ('design_cov', 'qa.model'),
        ('design_image', 'qa.model.@matrix_image'),
        ('design_file', 'qa.model.@matrix'),
    ])])
    wf.connect([(preproc, datasink,
                 [('outputspec.motion_parameters', 'qa.motion'),
                  ('outputspec.motion_plots', 'qa.motion.plots'),
                  ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink,
               'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink,
                   'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file',
                                               'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file',
                                               'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink, [
        ('copes', 'copes.mni'),
        ('varcopes', 'varcopes.mni'),
        ('zstats', 'zstats.mni'),
    ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink,
               'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink,
               'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink,
               'xfm.anat2target')
    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemple #16
0
def init_fmap_derivatives_wf(
    *,
    output_dir,
    bids_fmap_id=None,
    custom_entities=None,
    name="fmap_derivatives_wf",
    write_coeff=False,
):
    """
    Set up datasinks to store derivatives in the right location.

    Parameters
    ----------
    output_dir : :obj:`str`
        Directory in which to save derivatives
    bids_fmap_id : :obj:`str`
        Sets the ``B0FieldIdentifier`` metadata into the outputs.
    custom_entities : :obj:`dict`
        Define extra entities that will be written out in filenames.
    name : :obj:`str`
        Workflow name (default: ``"fmap_derivatives_wf"``)
    write_coeff : :obj:`bool`
        Build the workflow path to map coefficients into target space.

    Inputs
    ------
    source_files
        One or more fieldmap file(s) of the BIDS dataset that will serve for naming reference.
    fieldmap
        The preprocessed fieldmap, in its original space with Hz units.
    fmap_coeff
        Field coefficient(s) file(s)
    fmap_ref
        An anatomical reference (e.g., magnitude file)

    """
    custom_entities = custom_entities or {}
    if bids_fmap_id:
        custom_entities["fmapid"] = bids_fmap_id.replace("_", "")

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            "source_files", "fieldmap", "fmap_coeff", "fmap_ref", "fmap_meta"
        ]),
        name="inputnode",
    )

    ds_reference = pe.Node(
        DerivativesDataSink(
            base_directory=output_dir,
            compress=True,
            suffix="fieldmap",
            datatype="fmap",
            dismiss_entities=("fmap", ),
            allowed_entities=tuple(custom_entities.keys()),
        ),
        name="ds_reference",
    )

    ds_fieldmap = pe.Node(
        DerivativesDataSink(
            base_directory=output_dir,
            desc="preproc",
            suffix="fieldmap",
            datatype="fmap",
            compress=True,
            allowed_entities=tuple(custom_entities.keys()),
        ),
        name="ds_fieldmap",
    )
    ds_fieldmap.inputs.Units = "Hz"
    if bids_fmap_id:
        ds_fieldmap.inputs.B0FieldIdentifier = bids_fmap_id

    for k, v in custom_entities.items():
        setattr(ds_reference.inputs, k, v)
        setattr(ds_fieldmap.inputs, k, v)

    # fmt:off
    workflow.connect([
        (inputnode, ds_reference, [("source_files", "source_file"),
                                   ("fmap_ref", "in_file"),
                                   (("source_files", _getsourcetype), "desc")
                                   ]),
        (inputnode, ds_fieldmap, [("source_files", "source_file"),
                                  ("fieldmap", "in_file"),
                                  ("source_files", "RawSources")]),
        (ds_reference, ds_fieldmap, [
            (("out_file", _getname), "AnatomicalReference"),
        ]),
        (inputnode, ds_fieldmap, [(("fmap_meta", _selectintent), "IntendedFor")
                                  ]),
    ])
    # fmt:on

    if not write_coeff:
        return workflow

    ds_coeff = pe.MapNode(
        DerivativesDataSink(
            base_directory=output_dir,
            suffix="fieldmap",
            datatype="fmap",
            compress=True,
            allowed_entities=tuple(custom_entities.keys()),
        ),
        name="ds_coeff",
        iterfield=("in_file", "desc"),
    )

    gen_desc = pe.Node(niu.Function(function=_gendesc), name="gen_desc")

    for k, v in custom_entities.items():
        setattr(ds_coeff.inputs, k, v)

    # fmt:off
    workflow.connect([
        (inputnode, ds_coeff, [("source_files", "source_file"),
                               ("fmap_coeff", "in_file")]),
        (inputnode, gen_desc, [("fmap_coeff", "infiles")]),
        (gen_desc, ds_coeff, [("out", "desc")]),
        (ds_coeff, ds_fieldmap, [(("out_file", _getname),
                                  "AssociatedCoefficients")]),
    ])
    # fmt:on

    return workflow
def create_pipeline(name="msmt_csd", opt=""):

    parameters = {
        'algorithm': 'dhollander',
        'no_bias': False,
        'preproc': False,
        'bthres': False,
        'mask': 'dwi2mask'
    }

    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=["dwi", "bvecs", "bvals", "t1_dw"]),
                        name="inputnode")

    if opt is not None:
        opt_list = opt.split(',')
        for o in opt_list:
            try:
                [key, value] = o.split(':')
                parameters[key] = value
            except ValueError:
                print(o + ': irregular format, skipping')

    mrconvert = pe.Node(interface=mrtrix3.MRConvert(), name='convert')

    bias_correct = pe.Node(interface=mrtrix3.DWIBiasCorrect(),
                           name='bias_correct')
    bias_correct.inputs.out_file = 'dwi_bias_corrected.mif'
    bias_correct.inputs.use_ants = True

    mask = pe.Node(interface=mrtrix3.BrainMask(), name='dwi_mask')

    gen5tt = pe.Node(interface=mrtrix3.Generate5tt(), name='gen5tt')
    gen5tt.inputs.algorithm = 'fsl'
    gen5tt.inputs.out_file = '5tt.mif'

    dwiextract = pe.Node(interface=mrtrix3.DWIExtract(), name='dwiextract')
    dwiextract.inputs.out_file = 'dwi_nobzero.mif'

    bval = pe.Node(name='bval',
                   interface=util.Function(input_names=['bval_file', 'thres'],
                                           output_names=['bval_list'],
                                           function=generate_bval_list))

    resp = pe.Node(interface=mrtrix3.ResponseSD(), name='response')
    resp.inputs.algorithm = parameters['algorithm']
    resp.inputs.gm_file = 'gm.txt'
    resp.inputs.csf_file = 'csf.txt'

    dwi2fod = pe.Node(interface=mrtrix3.EstimateFOD(), name='FOD')
    dwi2fod.inputs.algorithm = 'msmt_csd'
    dwi2fod.inputs.gm_odf = 'gm.mif'
    dwi2fod.inputs.csf_odf = 'csf.mif'

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    workflow.connect([(inputnode, mrconvert, [("bvecs", "in_bvec"),
                                              ("bvals", "in_bval")])])

    if parameters['algorithm'] == 'msmt_5tt':
        workflow.connect([(inputnode, gen5tt, [['t1_dw', 'in_file']]),
                          (gen5tt, resp, [['out_file', 'mtt_file']])])

    if int(parameters['bthres']) > 0:
        bval.inputs.thres = float(parameters['bthres'])
        workflow.connect([(inputnode, bval, [("bvals", "bval_file")]),
                          (bval, dwiextract, [("bval_list", "shell")])])

    workflow.connect([(inputnode, mrconvert, [['dwi', 'in_file']])])

    if parameters['no_bias'] and parameters['bthres']:
        workflow.connect([(mrconvert, dwiextract, [['out_file', 'in_file']]),
                          (mrconvert, mask, [['out_file', 'in_file']]),
                          (dwiextract, resp, [['out_file', 'in_file']]),
                          (dwiextract, dwi2fod, [['out_file', 'in_file']])])
    elif parameters['bthres']:
        workflow.connect([(mrconvert, bias_correct, [['out_file', 'in_file']]),
                          (bias_correct, dwiextract, [['out_file',
                                                       'in_file']]),
                          (bias_correct, mask, [['out_file', 'in_file']]),
                          (dwiextract, resp, [['out_file', 'in_file']]),
                          (dwiextract, dwi2fod, [['out_file', 'in_file']])])
    elif parameters['no_bias']:
        workflow.connect([(mrconvert, bias_correct, [['out_file', 'in_file']]),
                          (bias_correct, mask, [['out_file', 'in_file']]),
                          (bias_correct, resp, [['out_file', 'in_file']]),
                          (bias_correct, dwi2fod, [['out_file', 'in_file']])])
    else:
        workflow.connect([(mrconvert, mask, [['out_file', 'in_file']]),
                          (mrconvert, resp, [['out_file', 'in_file']]),
                          (mrconvert, dwi2fod, [['out_file', 'in_file']])])

    if parameters['mask'] == 'dwi2mask':
        workflow.connect([(mask, dwi2fod, [['out_file', 'mask_file']])])
    else:
        dwi2fod.inputs.mask_file = os.path.abspath(parameters['mask'])

    workflow.connect([(resp, dwi2fod, [("wm_file", "wm_txt"),
                                       ("gm_file", "gm_txt"),
                                       ("csf_file", "csf_txt")])])

    output_fields = ["odf", "seed"]
    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=output_fields),
        name="outputnode")

    workflow.connect([(dwi2fod, outputnode, [("wm_odf", "odf")]),
                      (mask, outputnode, [("out_file", "seed")])])

    return workflow
Exemple #18
0
def init_surface_recon_wf(omp_nthreads, hires, name='surface_recon_wf'):
    r"""
    Reconstruct anatomical surfaces using FreeSurfer's ``recon-all``.

    Reconstruction is performed in three phases.
    The first phase initializes the subject with T1w and T2w (if available)
    structural images and performs basic reconstruction (``autorecon1``) with the
    exception of skull-stripping.
    For example, a subject with only one session with T1w and T2w images
    would be processed by the following command::

        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -i <bids-root>/sub-<subject_label>/anat/sub-<subject_label>_T1w.nii.gz \
            -T2 <bids-root>/sub-<subject_label>/anat/sub-<subject_label>_T2w.nii.gz \
            -autorecon1 \
            -noskullstrip

    The second phase imports an externally computed skull-stripping mask.
    This workflow refines the external brainmask using the internal mask
    implicit the the FreeSurfer's ``aseg.mgz`` segmentation,
    to reconcile ANTs' and FreeSurfer's brain masks.

    First, the ``aseg.mgz`` mask from FreeSurfer is refined in two
    steps, using binary morphological operations:

      1. With a binary closing operation the sulci are included
         into the mask. This results in a smoother brain mask
         that does not exclude deep, wide sulci.

      2. Fill any holes (typically, there could be a hole next to
         the pineal gland and the corpora quadrigemina if the great
         cerebral brain is segmented out).

    Second, the brain mask is grown, including pixels that have a high likelihood
    to the GM tissue distribution:

      3. Dilate and substract the brain mask, defining the region to search for candidate
         pixels that likely belong to cortical GM.

      4. Pixels found in the search region that are labeled as GM by ANTs
         (during ``antsBrainExtraction.sh``) are directly added to the new mask.

      5. Otherwise, estimate GM tissue parameters locally in  patches of ``ww`` size,
         and test the likelihood of the pixel to belong in the GM distribution.

    This procedure is inspired on mindboggle's solution to the problem:
    https://github.com/nipy/mindboggle/blob/7f91faaa7664d820fe12ccc52ebaf21d679795e2/mindboggle/guts/segment.py#L1660

    The final phase resumes reconstruction, using the T2w image to assist
    in finding the pial surface, if available.
    See :py:func:`~smriprep.workflows.surfaces.init_autorecon_resume_wf` for details.

    Memory annotations for FreeSurfer are based off `their documentation
    <https://surfer.nmr.mgh.harvard.edu/fswiki/SystemRequirements>`_.
    They specify an allocation of 4GB per subject. Here we define 5GB
    to have a certain margin.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.surfaces import init_surface_recon_wf
            wf = init_surface_recon_wf(omp_nthreads=1, hires=True)

    Parameters
    ----------
    omp_nthreads : int
        Maximum number of threads an individual process may use
    hires : bool
        Enable sub-millimeter preprocessing in FreeSurfer

    Inputs
    ------
    t1w
        List of T1-weighted structural images
    t2w
        List of T2-weighted structural images (only first used)
    flair
        List of FLAIR images
    skullstripped_t1
        Skull-stripped T1-weighted image (or mask of image)
    ants_segs
        Brain tissue segmentation from ANTS ``antsBrainExtraction.sh``
    corrected_t1
        INU-corrected, merged T1-weighted image
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID

    Outputs
    -------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space
    fsnative2t1w_xfm
        LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w
    surfaces
        GIFTI surfaces for gray/white matter boundary, pial surface,
        midthickness (or graymid) surface, and inflated surfaces
    out_brainmask
        Refined brainmask, derived from FreeSurfer's ``aseg`` volume
    out_aseg
        FreeSurfer's aseg segmentation, in native T1w space
    out_aparc
        FreeSurfer's aparc+aseg segmentation, in native T1w space

    See also
    --------
    * :py:func:`~smriprep.workflows.surfaces.init_autorecon_resume_wf`
    * :py:func:`~smriprep.workflows.surfaces.init_gifti_surface_wf`

    """
    workflow = Workflow(name=name)
    workflow.__desc__ = """\
Brain surfaces were reconstructed using `recon-all` [FreeSurfer {fs_ver},
RRID:SCR_001847, @fs_reconall], and the brain mask estimated
previously was refined with a custom variation of the method to reconcile
ANTs-derived and FreeSurfer-derived segmentations of the cortical
gray-matter of Mindboggle [RRID:SCR_002438, @mindboggle].
""".format(fs_ver=fs.Info().looseversion() or '<ver>')

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        't1w', 't2w', 'flair', 'skullstripped_t1', 'corrected_t1', 'ants_segs',
        'subjects_dir', 'subject_id'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'subjects_dir', 'subject_id', 't1w2fsnative_xfm', 'fsnative2t1w_xfm',
        'surfaces', 'out_brainmask', 'out_aseg', 'out_aparc'
    ]),
                         name='outputnode')

    recon_config = pe.Node(FSDetectInputs(hires_enabled=hires),
                           name='recon_config')

    fov_check = pe.Node(niu.Function(function=_check_cw256), name='fov_check')

    autorecon1 = pe.Node(ReconAll(directive='autorecon1', openmp=omp_nthreads),
                         name='autorecon1',
                         n_procs=omp_nthreads,
                         mem_gb=5)
    autorecon1.interface._can_resume = False
    autorecon1.interface._always_run = True

    skull_strip_extern = pe.Node(FSInjectBrainExtracted(),
                                 name='skull_strip_extern')

    fsnative2t1w_xfm = pe.Node(RobustRegister(auto_sens=True,
                                              est_int_scale=True),
                               name='fsnative2t1w_xfm')
    t1w2fsnative_xfm = pe.Node(LTAConvert(out_lta=True, invert=True),
                               name='t1w2fsnative_xfm')

    autorecon_resume_wf = init_autorecon_resume_wf(omp_nthreads=omp_nthreads)
    gifti_surface_wf = init_gifti_surface_wf()

    aseg_to_native_wf = init_segs_to_native_wf()
    aparc_to_native_wf = init_segs_to_native_wf(segmentation='aparc_aseg')
    refine = pe.Node(RefineBrainMask(), name='refine')

    workflow.connect([
        # Configuration
        (inputnode, recon_config, [('t1w', 't1w_list'), ('t2w', 't2w_list'),
                                   ('flair', 'flair_list')]),
        # Passing subjects_dir / subject_id enforces serial order
        (inputnode, autorecon1, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id')]),
        (autorecon1, skull_strip_extern, [('subjects_dir', 'subjects_dir'),
                                          ('subject_id', 'subject_id')]),
        (skull_strip_extern, autorecon_resume_wf,
         [('subjects_dir', 'inputnode.subjects_dir'),
          ('subject_id', 'inputnode.subject_id')]),
        (autorecon_resume_wf, gifti_surface_wf,
         [('outputnode.subjects_dir', 'inputnode.subjects_dir'),
          ('outputnode.subject_id', 'inputnode.subject_id')]),
        # Reconstruction phases
        (inputnode, autorecon1, [('t1w', 'T1_files')]),
        (inputnode, fov_check, [('t1w', 'in_files')]),
        (fov_check, autorecon1, [('out', 'flags')]),
        (
            recon_config,
            autorecon1,
            [
                ('t2w', 'T2_file'),
                ('flair', 'FLAIR_file'),
                ('hires', 'hires'),
                # First run only (recon-all saves expert options)
                ('mris_inflate', 'mris_inflate')
            ]),
        (inputnode, skull_strip_extern, [('skullstripped_t1', 'in_brain')]),
        (recon_config, autorecon_resume_wf, [('use_t2w', 'inputnode.use_T2'),
                                             ('use_flair',
                                              'inputnode.use_FLAIR')]),
        # Construct transform from FreeSurfer conformed image to sMRIPrep
        # reoriented image
        (inputnode, fsnative2t1w_xfm, [('t1w', 'target_file')]),
        (autorecon1, fsnative2t1w_xfm, [('T1', 'source_file')]),
        (fsnative2t1w_xfm, gifti_surface_wf, [('out_reg_file',
                                               'inputnode.fsnative2t1w_xfm')]),
        (fsnative2t1w_xfm, t1w2fsnative_xfm, [('out_reg_file', 'in_lta')]),
        # Refine ANTs mask, deriving new mask from FS' aseg
        (inputnode, refine, [('corrected_t1', 'in_anat'),
                             ('ants_segs', 'in_ants')]),
        (inputnode, aseg_to_native_wf, [('corrected_t1', 'inputnode.in_file')
                                        ]),
        (autorecon_resume_wf, aseg_to_native_wf,
         [('outputnode.subjects_dir', 'inputnode.subjects_dir'),
          ('outputnode.subject_id', 'inputnode.subject_id')]),
        (inputnode, aparc_to_native_wf, [('corrected_t1', 'inputnode.in_file')
                                         ]),
        (autorecon_resume_wf, aparc_to_native_wf,
         [('outputnode.subjects_dir', 'inputnode.subjects_dir'),
          ('outputnode.subject_id', 'inputnode.subject_id')]),
        (aseg_to_native_wf, refine, [('outputnode.out_file', 'in_aseg')]),

        # Output
        (autorecon_resume_wf, outputnode,
         [('outputnode.subjects_dir', 'subjects_dir'),
          ('outputnode.subject_id', 'subject_id')]),
        (gifti_surface_wf, outputnode, [('outputnode.surfaces', 'surfaces')]),
        (t1w2fsnative_xfm, outputnode, [('out_lta', 't1w2fsnative_xfm')]),
        (fsnative2t1w_xfm, outputnode, [('out_reg_file', 'fsnative2t1w_xfm')]),
        (refine, outputnode, [('out_file', 'out_brainmask')]),
        (aseg_to_native_wf, outputnode, [('outputnode.out_file', 'out_aseg')]),
        (aparc_to_native_wf, outputnode, [('outputnode.out_file', 'out_aparc')
                                          ]),
    ])

    return workflow
Exemple #19
0
def create_nuisance(use_ants, name='nuisance'):
    """
    Workflow for the removal of various signals considered to be noise in resting state
    fMRI data.  The residual signals for linear regression denoising is performed in a single
    model.  Therefore the residual time-series will be orthogonal to all signals.
    
    Parameters
    ----------
    name : string, optional
        Name of the workflow.
    
    Returns
    -------
    nuisance : nipype.pipeline.engine.Workflow
        Nuisance workflow.
        
    Notes
    -----
    
    Workflow Inputs::

        inputspec.subject : string (nifti file)
            Path of the subject's realigned nifti file.
        inputspec.wm_mask : string (nifti file)
            Corresponding white matter mask.
        inputspec.csf_mask : string (nifti file)
            Corresponding cerebral spinal fluid mask.
        inputspec.gm_mask : string (nifti file)
            Corresponding grey matter mask.
        inputspec.mni_to_anat_linear_xfm : string (nifti file)
            Corresponding MNI to anatomical linear transformation 
        inputspec.func_to_anat_linear_xfm : string (nifti file)
            Corresponding EPI to anatomical linear transformation
        inputspec.harvard_oxford_mask : string (nifti file)
            Harvard Oxford parcellation for ventrical locations
        inputspec.motion_components : string (text file)
            Corresponding rigid-body motion parameters.  Matrix in the file should be of shape 
            (`T`, `R`), `T` timepoints and `R` motion parameters.
        inputspec.selector : dictionary
        inputspec.compcor_ncomponents : integer
        
    Workflow Outputs::

        outputspec.subject : string (nifti file)
            Path of residual file in nifti format
        outputspec.regressors : string (mat file)
            Path of csv file of regressors used.  Filename corresponds to the name of each
            regressor in each column.
            
    Nuisance Procedure:
    
    1. Compute nuisance regressors based on input selections.
    2. Calculate residuals with respect to these nuisance regressors in a
       single model for every voxel.
    
    Workflow Graph:
    
    .. image:: ../images/nuisance.dot.png
        :width: 500
    
    Detailed Workflow Graph:
    
    .. image:: ../images/nuisance_detailed.dot.png
        :width: 500    
    
    """
    nuisance = pe.Workflow(name=name)
    
    inputspec = pe.Node(util.IdentityInterface(fields=['subject',
                                                       'wm_mask',
                                                       'csf_mask',
                                                       'gm_mask',
                                                       'mni_to_anat_linear_xfm',
                                                       'anat_to_mni_initial_xfm',
                                                       'anat_to_mni_rigid_xfm',
                                                       'anat_to_mni_affine_xfm',
                                                       'func_to_anat_linear_xfm',
                                                       'lat_ventricles_mask',
                                                       'motion_components',
                                                       'selector',
                                                       'compcor_ncomponents',
                                                       'template_brain']),
                        name='inputspec')
    outputspec = pe.Node(util.IdentityInterface(fields=['subject',
                                                        'regressors']),
                         name='outputspec')


    # Resampling the masks from 1mm to 2mm, but remaining in subject space
    wm_anat_to_2mm = pe.Node(interface=fsl.FLIRT(), name='wm_anat_to_2mm_flirt_applyxfm')
    wm_anat_to_2mm.inputs.args = '-applyisoxfm 2'
    wm_anat_to_2mm.inputs.interp = 'nearestneighbour'

    nuisance.connect(inputspec, 'wm_mask', wm_anat_to_2mm, 'in_file')
    nuisance.connect(inputspec, 'wm_mask', wm_anat_to_2mm, 'reference')
 

    # Resampling the masks from 1mm to 2mm, but remaining in subject space
    csf_anat_to_2mm = pe.Node(interface=fsl.FLIRT(), name='csf_anat_to_2mm_flirt_applyxfm')
    csf_anat_to_2mm.inputs.args = '-applyisoxfm 2'
    csf_anat_to_2mm.inputs.interp = 'nearestneighbour'

    nuisance.connect(inputspec, 'csf_mask', csf_anat_to_2mm, 'in_file')
    nuisance.connect(inputspec, 'csf_mask', csf_anat_to_2mm, 'reference')

    
    # Resampling the masks from 1mm to 2mm, but remaining in subject space
    gm_anat_to_2mm = pe.Node(interface=fsl.FLIRT(), name='gm_anat_to_2mm_flirt_applyxfm')
    gm_anat_to_2mm.inputs.args = '-applyisoxfm 2'
    gm_anat_to_2mm.inputs.interp = 'nearestneighbour'

    nuisance.connect(inputspec, 'gm_mask', gm_anat_to_2mm, 'in_file')
    nuisance.connect(inputspec, 'gm_mask', gm_anat_to_2mm, 'reference')


    func_to_2mm = pe.Node(interface=fsl.FLIRT(), name='func_to_2mm_flirt_applyxfm')
    func_to_2mm.inputs.args = '-applyisoxfm 2'

    nuisance.connect(inputspec, 'subject', func_to_2mm, 'in_file')
    nuisance.connect(inputspec, 'csf_mask', func_to_2mm, 'reference')
    nuisance.connect(inputspec, 'func_to_anat_linear_xfm', func_to_2mm, 'in_matrix_file')


    if use_ants == True:

        collect_linear_transforms = pe.Node(util.Merge(3), name='ho_mni_to_2mm_ants_collect_linear_transforms')

        ho_mni_to_2mm = pe.Node(interface=ants.ApplyTransforms(), name='ho_mni_to_2mm_ants_applyxfm')

        ho_mni_to_2mm.inputs.invert_transform_flags = [True, True, True]
        ho_mni_to_2mm.inputs.interpolation = 'NearestNeighbor'
        ho_mni_to_2mm.inputs.dimension = 3

        nuisance.connect(inputspec, 'anat_to_mni_initial_xfm', collect_linear_transforms, 'in1')
        nuisance.connect(inputspec, 'anat_to_mni_rigid_xfm', collect_linear_transforms, 'in2')
        nuisance.connect(inputspec, 'anat_to_mni_affine_xfm', collect_linear_transforms, 'in3')

        nuisance.connect(collect_linear_transforms, 'out', ho_mni_to_2mm, 'transforms')

        nuisance.connect(inputspec, 'lat_ventricles_mask', ho_mni_to_2mm, 'input_image')
        nuisance.connect(csf_anat_to_2mm, 'out_file', ho_mni_to_2mm, 'reference_image')

        #resample_to_2mm = pe.Node(interface=afni.Resample(), name='resample_to_2mm_ants_output'
        


    else:

        ho_mni_to_2mm = pe.Node(interface=fsl.FLIRT(), name='ho_mni_to_2mm_flirt_applyxfm')
        ho_mni_to_2mm.inputs.args = '-applyisoxfm 2'
        ho_mni_to_2mm.inputs.interp = 'nearestneighbour'

        nuisance.connect(inputspec, 'mni_to_anat_linear_xfm', ho_mni_to_2mm, 'in_matrix_file')
        nuisance.connect(inputspec, 'lat_ventricles_mask', ho_mni_to_2mm, 'in_file')
        nuisance.connect(inputspec, 'csf_mask', ho_mni_to_2mm, 'reference')


    tissue_masks = pe.Node(util.Function(input_names=['data_file',
                                                      'ventricles_mask_file',
                                                      'wm_seg_file', 'csf_seg_file', 'gm_seg_file',
                                                      'wm_threshold', 'csf_threshold', 'gm_threshold'],
                                         output_names=['file_wm', 'file_csf', 'file_gm'],
                                         function=extract_tissue_data),
                           name='tissue_masks')
    


    nuisance.connect(func_to_2mm, 'out_file', tissue_masks, 'data_file')
    nuisance.connect(wm_anat_to_2mm, 'out_file', tissue_masks, 'wm_seg_file')
    nuisance.connect(csf_anat_to_2mm, 'out_file', tissue_masks, 'csf_seg_file')
    nuisance.connect(gm_anat_to_2mm, 'out_file', tissue_masks, 'gm_seg_file')

    if use_ants == True:
        nuisance.connect(ho_mni_to_2mm, 'output_image', tissue_masks, 'ventricles_mask_file')

    else:
        nuisance.connect(ho_mni_to_2mm, 'out_file', tissue_masks, 'ventricles_mask_file')



    calc_r = pe.Node(util.Function(input_names=['subject',
                                                'selector',
                                                'wm_sig_file',
                                                'csf_sig_file',
                                                'gm_sig_file',
                                                'motion_file',
                                                'compcor_ncomponents'],
                                   output_names=['residual_file',
                                                'regressors_file'],
                                   function=calc_residuals),
                     name='residuals')
    nuisance.connect(inputspec, 'subject',
                     calc_r, 'subject')
    nuisance.connect(tissue_masks, 'file_wm',
                     calc_r, 'wm_sig_file')
    nuisance.connect(tissue_masks, 'file_csf',
                     calc_r, 'csf_sig_file')
    nuisance.connect(tissue_masks, 'file_gm',
                     calc_r, 'gm_sig_file')
    nuisance.connect(inputspec, 'motion_components',
                     calc_r, 'motion_file')
    nuisance.connect(inputspec, 'selector',
                     calc_r, 'selector')
    nuisance.connect(inputspec, 'compcor_ncomponents',
                     calc_r, 'compcor_ncomponents')
    nuisance.connect(calc_r, 'residual_file',
                     outputspec, 'subject')
    nuisance.connect(calc_r, 'regressors_file',
                     outputspec, 'regressors')
    
    return nuisance
Exemple #20
0
def create_skullstripped_recon_flow(name="skullstripped_recon_all"):
    """Performs recon-all on voulmes that are already skull stripped.
    FreeSurfer failes to perform skullstrippig on some volumes (especially
    MP2RAGE). This can be avoided by doing skullstripping before running
    recon-all (using for example SPECTRE algorithm).

    Example
    -------
    >>> from nipype.workflows.smri.freesurfer import create_skullstripped_recon_flow
    >>> recon_flow = create_skullstripped_recon_flow()
    >>> recon_flow.inputs.inputspec.subject_id = 'subj1'
    >>> recon_flow.inputs.inputspec.T1_files = 'T1.nii.gz'
    >>> recon_flow.run()  # doctest: +SKIP


    Inputs::
           inputspec.T1_files : skullstripped T1_files (mandatory)
           inputspec.subject_id : freesurfer subject id (optional)
           inputspec.subjects_dir : freesurfer subjects directory (optional)

    Outputs::

           outputspec.subject_id : freesurfer subject id
           outputspec.subjects_dir : freesurfer subjects directory
    """
    wf = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['subject_id', 'subjects_dir', 'T1_files']),
                        name='inputspec')

    autorecon1 = pe.Node(fs.ReconAll(), name="autorecon1")
    autorecon1.plugin_args = {'submit_specs': 'request_memory = 2500'}
    autorecon1.inputs.directive = "autorecon1"
    autorecon1.inputs.args = "-noskullstrip"
    autorecon1._interface._can_resume = False

    wf.connect(inputnode, "T1_files", autorecon1, "T1_files")
    wf.connect(inputnode, "subjects_dir", autorecon1, "subjects_dir")
    wf.connect(inputnode, "subject_id", autorecon1, "subject_id")

    def link_masks(subjects_dir, subject_id):
        import os
        os.symlink(
            os.path.join(subjects_dir, subject_id, "mri", "T1.mgz"),
            os.path.join(subjects_dir, subject_id, "mri",
                         "brainmask.auto.mgz"))
        os.symlink(
            os.path.join(subjects_dir, subject_id, "mri",
                         "brainmask.auto.mgz"),
            os.path.join(subjects_dir, subject_id, "mri", "brainmask.mgz"))
        return subjects_dir, subject_id

    masks = pe.Node(niu.Function(input_names=['subjects_dir', 'subject_id'],
                                 output_names=['subjects_dir', 'subject_id'],
                                 function=link_masks),
                    name="link_masks")

    wf.connect(autorecon1, "subjects_dir", masks, "subjects_dir")
    wf.connect(autorecon1, "subject_id", masks, "subject_id")

    autorecon_resume = pe.Node(fs.ReconAll(), name="autorecon_resume")
    autorecon_resume.plugin_args = {'submit_specs': 'request_memory = 2500'}
    autorecon_resume.inputs.args = "-no-isrunning"
    wf.connect(masks, "subjects_dir", autorecon_resume, "subjects_dir")
    wf.connect(masks, "subject_id", autorecon_resume, "subject_id")

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['subject_id', 'subjects_dir']),
        name='outputspec')

    wf.connect(autorecon_resume, "subjects_dir", outputnode, "subjects_dir")
    wf.connect(autorecon_resume, "subject_id", outputnode, "subject_id")
    return wf
    def _run_function_workflow(self, num_gb, num_threads):
        '''
        Function to run the use_resources() function in a nipype workflow
        and return the runtime stats recorded by the profiler

        Parameters
        ----------
        self : RuntimeProfileTestCase
            a unittest.TestCase-inherited class

        Returns
        -------
        finish_str : string
            a json-compatible dictionary string containing the runtime
            statistics of the nipype node that used system resources
        '''

        # Import packages
        import logging
        import os
        import shutil
        import tempfile

        import nipype.pipeline.engine as pe
        import nipype.interfaces.utility as util
        from nipype.pipeline.plugins.callback_log import log_nodes_cb

        # Init variables
        base_dir = tempfile.mkdtemp()
        log_file = os.path.join(base_dir, 'callback.log')

        # Init logger
        logger = logging.getLogger('callback')
        logger.setLevel(logging.DEBUG)
        handler = logging.FileHandler(log_file)
        logger.addHandler(handler)

        # Declare workflow
        wf = pe.Workflow(name='test_runtime_prof_func')
        wf.base_dir = base_dir

        # Input node
        input_node = pe.Node(
            util.IdentityInterface(fields=['num_gb', 'num_threads']),
            name='input_node')
        input_node.inputs.num_gb = num_gb
        input_node.inputs.num_threads = num_threads

        # Resources used node
        resource_node = pe.Node(util.Function(
            input_names=['num_threads', 'num_gb'],
            output_names=[],
            function=use_resources),
                                name='resource_node')
        resource_node.interface.estimated_memory_gb = num_gb
        resource_node.interface.num_threads = num_threads

        # Connect workflow
        wf.connect(input_node, 'num_gb', resource_node, 'num_gb')
        wf.connect(input_node, 'num_threads', resource_node, 'num_threads')

        # Run workflow
        plugin_args = {
            'n_procs': num_threads,
            'memory': num_gb,
            'status_callback': log_nodes_cb
        }
        wf.run(plugin='MultiProc', plugin_args=plugin_args)

        # Get runtime stats from log file
        start_str = open(log_file, 'r').readlines()[0].rstrip('\n')
        finish_str = open(log_file, 'r').readlines()[1].rstrip('\n')

        # Delete wf base dir
        shutil.rmtree(base_dir)

        # Return runtime stats
        return start_str, finish_str
Exemple #22
0
def create_reconall_workflow(name="ReconAll", plugin_args=None):
    """Creates the ReconAll workflow in Nipype. This workflow is designed to
    run the same commands as FreeSurfer's reconall script but with the added
    features that a Nipype workflow provides. Before running this workflow, it
    is necessary to have the FREESURFER_HOME environmental variable set to the
    directory containing the version of FreeSurfer to be used in this workflow.

    Example
    -------
    >>> from nipype.workflows.smri.freesurfer import create_reconall_workflow
    >>> recon_all = create_reconall_workflow()
    >>> recon_all.inputs.inputspec.subject_id = 'subj1'
    >>> recon_all.inputs.inputspec.subjects_dir = '.'
    >>> recon_all.inputs.inputspec.T1_files = 'T1.nii.gz'
    >>> recon_all.run()  # doctest: +SKIP


    Inputs::
           inputspec.subjects_dir : subjects directory (mandatory)
           inputspec.subject_id : name of subject (mandatory)
           inputspec.T1_files : T1 files (mandatory)
           inputspec.T2_file : T2 file (optional)
           inputspec.FLAIR_file : FLAIR file (optional)
           inputspec.cw256 : Conform inputs to 256 FOV (optional)
           inputspec.num_threads: Number of threads on nodes that utilize OpenMP (default=1)
           plugin_args : Dictionary of plugin args to set to nodes that utilize OpenMP (optional)
    Outputs::
           postdatasink_outputspec.subject_id : name of the datasinked output folder in the subjects directory

    Note:
    The input subject_id is not passed to the commands in the workflow. Commands
    that require subject_id are reading implicit inputs from
    {SUBJECTS_DIR}/{subject_id}. For those commands the subject_id is set to the
    default value and SUBJECTS_DIR is set to the node directory. The implicit
    inputs are then copied to the node directory in order to mimic a SUBJECTS_DIR
    structure. For example, if the command implicitly reads in brainmask.mgz, the
    interface would copy that input file to
    {node_dir}/{subject_id}/mri/brainmask.mgz and set SUBJECTS_DIR to node_dir.
    The workflow only uses the input subject_id to datasink the outputs to
    {subjects_dir}/{subject_id}.
    """
    reconall = pe.Workflow(name=name)

    inputspec = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'subjects_dir', 'T1_files', 'T2_file', 'FLAIR_file',
        'num_threads', 'cw256', 'reg_template', 'reg_template_withskull',
        'lh_atlas', 'rh_atlas', 'lh_classifier1', 'rh_classifier1',
        'lh_classifier2', 'rh_classifier2', 'lh_classifier3', 'rh_classifier3',
        'lookup_table', 'wm_lookup_table', 'src_subject_id', 'src_subject_dir',
        'color_table', 'awk_file'
    ]),
                        run_without_submitting=True,
                        name='inputspec')

    # check freesurfer version and set parameters
    fs_version_full = Info.version()
    if fs_version_full and ('v6.0' in fs_version_full
                            or 'dev' in fs_version_full):
        # assuming that dev is 6.0
        fsvernum = 6.0
        fs_version = 'v6.0'
        th3 = True
        shrink = 2
        distance = 200  # 3T should be 50
        stop = 0.0001
        exvivo = True
        entorhinal = True
        rb_date = "2014-08-21"
    else:
        # 5.3 is default
        fsvernum = 5.3
        if fs_version_full:
            if 'v5.3' in fs_version_full:
                fs_version = 'v5.3'
            else:
                fs_version = fs_version_full.split('-')[-1]
                logger.info(("Warning: Workflow may not work properly if "
                             "FREESURFER_HOME environmental variable is not "
                             "set or if you are using an older version of "
                             "FreeSurfer"))
        else:
            fs_version = 5.3  # assume version 5.3
        th3 = False
        shrink = None
        distance = 50
        stop = None
        exvivo = False
        entorhinal = False
        rb_date = "2008-03-26"

    logger.info("FreeSurfer Version: {0}".format(fs_version))

    def setconfig(reg_template=None,
                  reg_template_withskull=None,
                  lh_atlas=None,
                  rh_atlas=None,
                  lh_classifier1=None,
                  rh_classifier1=None,
                  lh_classifier2=None,
                  rh_classifier2=None,
                  lh_classifier3=None,
                  rh_classifier3=None,
                  src_subject_id=None,
                  src_subject_dir=None,
                  color_table=None,
                  lookup_table=None,
                  wm_lookup_table=None,
                  awk_file=None,
                  rb_date=None):
        """Set optional configurations to the default"""
        def checkarg(arg, default):
            """Returns the value if defined; otherwise default"""
            if arg:
                return arg
            else:
                return default

        defaultconfig = getdefaultconfig(exitonfail=True, rb_date=rb_date)
        # set the default template and classifier files
        reg_template = checkarg(reg_template,
                                defaultconfig['registration_template'])
        reg_template_withskull = checkarg(
            reg_template_withskull,
            defaultconfig['registration_template_withskull'])
        lh_atlas = checkarg(lh_atlas, defaultconfig['lh_atlas'])
        rh_atlas = checkarg(rh_atlas, defaultconfig['rh_atlas'])
        lh_classifier1 = checkarg(lh_classifier1,
                                  defaultconfig['lh_classifier'])
        rh_classifier1 = checkarg(rh_classifier1,
                                  defaultconfig['rh_classifier'])
        lh_classifier2 = checkarg(lh_classifier2,
                                  defaultconfig['lh_classifier2'])
        rh_classifier2 = checkarg(rh_classifier2,
                                  defaultconfig['rh_classifier2'])
        lh_classifier3 = checkarg(lh_classifier3,
                                  defaultconfig['lh_classifier3'])
        rh_classifier3 = checkarg(rh_classifier3,
                                  defaultconfig['rh_classifier3'])
        src_subject_id = checkarg(src_subject_id,
                                  defaultconfig['src_subject_id'])
        src_subject_dir = checkarg(src_subject_dir,
                                   defaultconfig['src_subject_dir'])
        color_table = checkarg(color_table, defaultconfig['AvgColorTable'])
        lookup_table = checkarg(lookup_table, defaultconfig['LookUpTable'])
        wm_lookup_table = checkarg(wm_lookup_table,
                                   defaultconfig['WMLookUpTable'])
        awk_file = checkarg(awk_file, defaultconfig['awk_file'])
        return reg_template, reg_template_withskull, lh_atlas, rh_atlas, \
            lh_classifier1, rh_classifier1, lh_classifier2, rh_classifier2, \
            lh_classifier3, rh_classifier3, src_subject_id, src_subject_dir, \
            color_table, lookup_table, wm_lookup_table, awk_file

    # list of params to check
    params = [
        'reg_template', 'reg_template_withskull', 'lh_atlas', 'rh_atlas',
        'lh_classifier1', 'rh_classifier1', 'lh_classifier2', 'rh_classifier2',
        'lh_classifier3', 'rh_classifier3', 'src_subject_id',
        'src_subject_dir', 'color_table', 'lookup_table', 'wm_lookup_table',
        'awk_file'
    ]

    config_node = pe.Node(niu.Function(params + ['rb_date'], params,
                                       setconfig),
                          name="config")

    config_node.inputs.rb_date = rb_date

    for param in params:
        reconall.connect(inputspec, param, config_node, param)

    # create AutoRecon1
    ar1_wf, ar1_outputs = create_AutoRecon1(plugin_args=plugin_args,
                                            stop=stop,
                                            distance=distance,
                                            shrink=shrink,
                                            fsvernum=fsvernum)
    # connect inputs for AutoRecon1
    reconall.connect([
        (inputspec, ar1_wf, [('T1_files', 'inputspec.T1_files'),
                             ('T2_file', 'inputspec.T2_file'),
                             ('FLAIR_file', 'inputspec.FLAIR_file'),
                             ('num_threads', 'inputspec.num_threads'),
                             ('cw256', 'inputspec.cw256')]),
        (config_node, ar1_wf, [('reg_template_withskull',
                                'inputspec.reg_template_withskull'),
                               ('awk_file', 'inputspec.awk_file')])
    ])
    # create AutoRecon2
    ar2_wf, ar2_outputs = create_AutoRecon2(plugin_args=plugin_args,
                                            fsvernum=fsvernum,
                                            stop=stop,
                                            shrink=shrink,
                                            distance=distance)
    # connect inputs for AutoRecon2
    reconall.connect([
        (inputspec, ar2_wf, [('num_threads', 'inputspec.num_threads')]),
        (config_node, ar2_wf, [('reg_template_withskull',
                                'inputspec.reg_template_withskull'),
                               ('reg_template', 'inputspec.reg_template')]),
        (ar1_wf, ar2_wf, [('outputspec.brainmask', 'inputspec.brainmask'),
                          ('outputspec.talairach', 'inputspec.transform'),
                          ('outputspec.orig', 'inputspec.orig')])
    ])

    if fsvernum < 6:
        reconall.connect([(ar1_wf, ar2_wf, [('outputspec.nu', 'inputspec.nu')])
                          ])

    # create AutoRecon3
    ar3_wf, ar3_outputs = create_AutoRecon3(plugin_args=plugin_args,
                                            th3=th3,
                                            exvivo=exvivo,
                                            entorhinal=entorhinal,
                                            fsvernum=fsvernum)
    # connect inputs for AutoRecon3
    reconall.connect([
        (config_node,
         ar3_wf, [('lh_atlas', 'inputspec.lh_atlas'),
                  ('rh_atlas', 'inputspec.rh_atlas'),
                  ('lh_classifier1', 'inputspec.lh_classifier1'),
                  ('rh_classifier1', 'inputspec.rh_classifier1'),
                  ('lh_classifier2', 'inputspec.lh_classifier2'),
                  ('rh_classifier2', 'inputspec.rh_classifier2'),
                  ('lh_classifier3', 'inputspec.lh_classifier3'),
                  ('rh_classifier3', 'inputspec.rh_classifier3'),
                  ('lookup_table', 'inputspec.lookup_table'),
                  ('wm_lookup_table', 'inputspec.wm_lookup_table'),
                  ('src_subject_dir', 'inputspec.src_subject_dir'),
                  ('src_subject_id', 'inputspec.src_subject_id'),
                  ('color_table', 'inputspec.color_table')]),
        (ar1_wf, ar3_wf, [('outputspec.brainmask', 'inputspec.brainmask'),
                          ('outputspec.talairach', 'inputspec.transform'),
                          ('outputspec.orig', 'inputspec.orig_mgz'),
                          ('outputspec.rawavg', 'inputspec.rawavg')]),
        (ar2_wf, ar3_wf,
         [('outputspec.aseg_presurf', 'inputspec.aseg_presurf'),
          ('outputspec.brain_finalsurfs', 'inputspec.brain_finalsurfs'),
          ('outputspec.wm', 'inputspec.wm'),
          ('outputspec.filled', 'inputspec.filled'),
          ('outputspec.norm', 'inputspec.norm')])
    ])
    for hemi in ('lh', 'rh'):
        reconall.connect([(ar2_wf, ar3_wf,
                           [('outputspec.{0}_inflated'.format(hemi),
                             'inputspec.{0}_inflated'.format(hemi)),
                            ('outputspec.{0}_smoothwm'.format(hemi),
                             'inputspec.{0}_smoothwm'.format(hemi)),
                            ('outputspec.{0}_white'.format(hemi),
                             'inputspec.{0}_white'.format(hemi)),
                            ('outputspec.{0}_cortex'.format(hemi),
                             'inputspec.{0}_cortex_label'.format(hemi)),
                            ('outputspec.{0}_area'.format(hemi),
                             'inputspec.{0}_area'.format(hemi)),
                            ('outputspec.{0}_curv'.format(hemi),
                             'inputspec.{0}_curv'.format(hemi)),
                            ('outputspec.{0}_sulc'.format(hemi),
                             'inputspec.{0}_sulc'.format(hemi)),
                            ('outputspec.{0}_orig_nofix'.format(hemi),
                             'inputspec.{0}_orig_nofix'.format(hemi)),
                            ('outputspec.{0}_orig'.format(hemi),
                             'inputspec.{0}_orig'.format(hemi)),
                            ('outputspec.{0}_white_H'.format(hemi),
                             'inputspec.{0}_white_H'.format(hemi)),
                            ('outputspec.{0}_white_K'.format(hemi),
                             'inputspec.{0}_white_K'.format(hemi))])])

    # Add more outputs to outputspec
    outputs = ar1_outputs + ar2_outputs + ar3_outputs
    outputspec = pe.Node(niu.IdentityInterface(fields=outputs,
                                               mandatory_inputs=True),
                         name="outputspec")

    for outfields, wf in [(ar1_outputs, ar1_wf), (ar2_outputs, ar2_wf),
                          (ar3_outputs, ar3_wf)]:
        for field in outfields:
            reconall.connect([(wf, outputspec, [('outputspec.' + field, field)
                                                ])])

    # PreDataSink: Switch Transforms to datasinked transfrom
    # The transforms in the header files of orig.mgz, orig_nu.mgz, and nu.mgz
    # are all reference a transform in the cache directory. We need to rewrite the
    # headers to reference the datasinked transform

    # get the filepath to where the transform will be datasinked
    def getDSTransformPath(subjects_dir, subject_id):
        import os
        transform = os.path.join(subjects_dir, subject_id, 'mri', 'transforms',
                                 'talairach.xfm')
        return transform

    dstransform = pe.Node(niu.Function(['subjects_dir', 'subject_id'],
                                       ['transform'], getDSTransformPath),
                          name="PreDataSink_GetTransformPath")
    reconall.connect([(inputspec, dstransform,
                       [('subjects_dir', 'subjects_dir'),
                        ('subject_id', 'subject_id')])])
    # add the data sink transfrom location to the headers
    predatasink_orig = pe.Node(AddXFormToHeader(), name="PreDataSink_Orig")
    predatasink_orig.inputs.copy_name = True
    predatasink_orig.inputs.out_file = 'orig.mgz'
    reconall.connect([(outputspec, predatasink_orig, [('orig', 'in_file')]),
                      (dstransform, predatasink_orig, [('transform',
                                                        'transform')])])
    predatasink_orig_nu = pe.Node(AddXFormToHeader(),
                                  name="PreDataSink_Orig_Nu")
    predatasink_orig_nu.inputs.copy_name = True
    predatasink_orig_nu.inputs.out_file = 'orig_nu.mgz'
    reconall.connect([
        (outputspec, predatasink_orig_nu, [('orig_nu', 'in_file')]),
        (dstransform, predatasink_orig_nu, [('transform', 'transform')])
    ])
    predatasink_nu = pe.Node(AddXFormToHeader(), name="PreDataSink_Nu")
    predatasink_nu.inputs.copy_name = True
    predatasink_nu.inputs.out_file = 'nu.mgz'
    reconall.connect([(outputspec, predatasink_nu, [('nu', 'in_file')]),
                      (dstransform, predatasink_nu, [('transform', 'transform')
                                                     ])])

    # Datasink outputs
    datasink = pe.Node(DataSink(), name="DataSink")
    datasink.inputs.parameterization = False

    reconall.connect([(inputspec, datasink, [('subjects_dir',
                                              'base_directory'),
                                             ('subject_id', 'container')])])

    # assign datasink inputs
    reconall.connect([
        (predatasink_orig, datasink, [('out_file', 'mri.@orig')]),
        (predatasink_orig_nu, datasink, [('out_file', 'mri.@orig_nu')]),
        (predatasink_nu, datasink, [('out_file', 'mri.@nu')]),
        (outputspec, datasink, [
            ('origvols', 'mri.orig'),
            ('t2_raw', 'mri.orig.@t2raw'),
            ('flair', 'mri.orig.@flair'),
            ('rawavg', 'mri.@rawavg'),
            ('talairach_auto', 'mri.transforms.@tal_auto'),
            ('talairach', 'mri.transforms.@tal'),
            ('t1', 'mri.@t1'),
            ('brainmask_auto', 'mri.@brainmask_auto'),
            ('brainmask', 'mri.@brainmask'),
            ('braintemplate', 'mri.@braintemplate'),
            ('tal_lta', 'mri.transforms.@tal_lta'),
            ('norm', 'mri.@norm'),
            ('ctrl_pts', 'mri.@ctrl_pts'),
            ('tal_m3z', 'mri.transforms.@tal_m3z'),
            ('nu_noneck', 'mri.@nu_noneck'),
            ('talskull2', 'mri.transforms.@talskull2'),
            ('aseg_noCC', 'mri.@aseg_noCC'),
            ('cc_up', 'mri.transforms.@cc_up'),
            ('aseg_auto', 'mri.@aseg_auto'),
            ('aseg_presurf', 'mri.@aseg_presuf'),
            ('brain', 'mri.@brain'),
            ('brain_finalsurfs', 'mri.@brain_finalsurfs'),
            ('wm_seg', 'mri.@wm_seg'),
            ('wm_aseg', 'mri.@wm_aseg'),
            ('wm', 'mri.@wm'),
            ('filled', 'mri.@filled'),
            ('ponscc_log', 'mri.@ponscc_log'),
            ('lh_orig_nofix', 'surf.@lh_orig_nofix'),
            ('lh_orig', 'surf.@lh_orig'),
            ('lh_smoothwm_nofix', 'surf.@lh_smoothwm_nofix'),
            ('lh_inflated_nofix', 'surf.@lh_inflated_nofix'),
            ('lh_qsphere_nofix', 'surf.@lh_qsphere_nofix'),
            ('lh_white', 'surf.@lh_white'),
            ('lh_curv', 'surf.@lh_curv'),
            ('lh_area', 'surf.@lh_area'),
            ('lh_cortex', 'label.@lh_cortex'),
            ('lh_smoothwm', 'surf.@lh_smoothwm'),
            ('lh_sulc', 'surf.@lh_sulc'),
            ('lh_inflated', 'surf.@lh_inflated'),
            ('lh_white_H', 'surf.@lh_white_H'),
            ('lh_white_K', 'surf.@lh_white_K'),
            ('lh_inflated_H', 'surf.@lh_inflated_H'),
            ('lh_inflated_K', 'surf.@lh_inflated_K'),
            ('lh_curv_stats', 'stats.@lh_curv_stats'),
            ('rh_orig_nofix', 'surf.@rh_orig_nofix'),
            ('rh_orig', 'surf.@rh_orig'),
            ('rh_smoothwm_nofix', 'surf.@rh_smoothwm_nofix'),
            ('rh_inflated_nofix', 'surf.@rh_inflated_nofix'),
            ('rh_qsphere_nofix', 'surf.@rh_qsphere_nofix'),
            ('rh_white', 'surf.@rh_white'),
            ('rh_curv', 'surf.@rh_curv'),
            ('rh_area', 'surf.@rh_area'),
            ('rh_cortex', 'label.@rh_cortex'),
            ('rh_smoothwm', 'surf.@rh_smoothwm'),
            ('rh_sulc', 'surf.@rh_sulc'),
            ('rh_inflated', 'surf.@rh_inflated'),
            ('rh_white_H', 'surf.@rh_white_H'),
            ('rh_white_K', 'surf.@rh_white_K'),
            ('rh_inflated_H', 'surf.@rh_inflated_H'),
            ('rh_inflated_K', 'surf.@rh_inflated_K'),
            ('rh_curv_stats', 'stats.@rh_curv_stats'),
            ('lh_aparc_annot_ctab', 'label.@aparc_annot_ctab'),
            ('aseg', 'mri.@aseg'),
            ('wmparc', 'mri.@wmparc'),
            ('wmparc_stats', 'stats.@wmparc_stats'),
            ('aseg_stats', 'stats.@aseg_stats'),
            ('aparc_a2009s_aseg', 'mri.@aparc_a2009s_aseg'),
            ('aparc_aseg', 'mri.@aparc_aseg'),
            ('aseg_presurf_hypos', 'mri.@aseg_presurf_hypos'),
            ('ribbon', 'mri.@ribbon'),
            ('rh_ribbon', 'mri.@rh_ribbon'),
            ('lh_ribbon', 'mri.@lh_ribbon'),
            ('lh_sphere', 'surf.@lh_sphere'),
            ('rh_sphere', 'surf.@rh_sphere'),
            ('lh_sphere_reg', 'surf.@lh_sphere_reg'),
            ('rh_sphere_reg', 'surf.@rh_sphere_reg'),
            ('lh_jacobian_white', 'surf.@lh_jacobian_white'),
            ('rh_jacobian_white', 'surf.@rh_jacobian_white'),
            ('lh_avg_curv', 'surf.@lh_avg_curv'),
            ('rh_avg_curv', 'surf.@rh_avg_curv'),
            ('lh_aparc_annot', 'label.@lh_aparc_annot'),
            ('rh_aparc_annot', 'label.@rh_aparc_annot'),
            ('lh_area_pial', 'surf.@lh_area_pial'),
            ('rh_area_pial', 'surf.@rh_area_pial'),
            ('lh_curv_pial', 'surf.@lh_curv_pial'),
            ('rh_curv_pial', 'surf.@rh_curv_pial'),
            ('lh_pial', 'surf.@lh_pial'),
            ('rh_pial', 'surf.@rh_pial'),
            ('lh_thickness_pial', 'surf.@lh_thickness_pial'),
            ('rh_thickness_pial', 'surf.@rh_thickness_pial'),
            ('lh_area_mid', 'surf.@lh_area_mid'),
            ('rh_area_mid', 'surf.@rh_area_mid'),
            ('lh_volume', 'surf.@lh_volume'),
            ('rh_volume', 'surf.@rh_volume'),
            ('lh_aparc_annot_ctab', 'label.@lh_aparc_annot_ctab'),
            ('rh_aparc_annot_ctab', 'label.@rh_aparc_annot_ctab'),
            ('lh_aparc_stats', 'stats.@lh_aparc_stats'),
            ('rh_aparc_stats', 'stats.@rh_aparc_stats'),
            ('lh_aparc_pial_stats', 'stats.@lh_aparc_pial_stats'),
            ('rh_aparc_pial_stats', 'stats.@rh_aparc_pial_stats'),
            ('lh_aparc_a2009s_annot', 'label.@lh_aparc_a2009s_annot'),
            ('rh_aparc_a2009s_annot', 'label.@rh_aparc_a2009s_annot'),
            ('lh_aparc_a2009s_annot_ctab',
             'label.@lh_aparc_a2009s_annot_ctab'),
            ('rh_aparc_a2009s_annot_ctab',
             'label.@rh_aparc_a2009s_annot_ctab'),
            ('lh_aparc_a2009s_annot_stats',
             'stats.@lh_aparc_a2009s_annot_stats'),
            ('rh_aparc_a2009s_annot_stats',
             'stats.@rh_aparc_a2009s_annot_stats'),
            ('lh_aparc_DKTatlas40_annot', 'label.@lh_aparc_DKTatlas40_annot'),
            ('rh_aparc_DKTatlas40_annot', 'label.@rh_aparc_DKTatlas40_annot'),
            ('lh_aparc_DKTatlas40_annot_ctab',
             'label.@lh_aparc_DKTatlas40_annot_ctab'),
            ('rh_aparc_DKTatlas40_annot_ctab',
             'label.@rh_aparc_DKTatlas40_annot_ctab'),
            ('lh_aparc_DKTatlas40_annot_stats',
             'stats.@lh_aparc_DKTatlas40_annot_stats'),
            ('rh_aparc_DKTatlas40_annot_stats',
             'stats.@rh_aparc_DKTatlas40_annot_stats'),
            ('lh_wg_pct_mgh', 'surf.@lh_wg_pct_mgh'),
            ('rh_wg_pct_mgh', 'surf.@rh_wg_pct_mgh'),
            ('lh_wg_pct_stats', 'stats.@lh_wg_pct_stats'),
            ('rh_wg_pct_stats', 'stats.@rh_wg_pct_stats'),
            ('lh_pctsurfcon_log', 'log.@lh_pctsurfcon_log'),
            ('rh_pctsurfcon_log', 'log.@rh_pctsurfcon_log'),
            ('lh_BAMaps_stats', 'stats.@lh_BAMaps_stats'),
            ('lh_color', 'label.@lh_color'),
            ('lh_thresh_BAMaps_stats', 'stats.@lh_thresh_BAMaps_stats'),
            ('lh_thresh_color', 'label.@lh_thresh_color'),
            ('rh_BAMaps_stats', 'stats.@rh_BAMaps_stats'),
            ('rh_color', 'label.@rh_color'),
            ('rh_thresh_BAMaps_stats', 'stats.@rh_thresh_BAMaps_stats'),
            ('rh_thresh_color', 'label.@rh_thresh_color'),
            ('lh_BAMaps_labels', 'label.@lh_BAMaps_labels'),
            ('lh_thresh_BAMaps_labels', 'label.@lh_thresh_BAMaps_labels'),
            ('rh_BAMaps_labels', 'label.@rh_BAMaps_labels'),
            ('rh_thresh_BAMaps_labels', 'label.@rh_thresh_BAMaps_labels'),
            ('lh_BAMaps_annotation', 'label.@lh_BAMaps_annotation'),
            ('lh_thresh_BAMaps_annotation',
             'label.@lh_thresh_BAMaps_annotation'),
            ('rh_BAMaps_annotation', 'label.@rh_BAMaps_annotation'),
            ('rh_thresh_BAMaps_annotation',
             'label.@rh_thresh_BAMaps_annotation'),
        ]),
    ])

    # compeltion node
    # since recon-all outputs so many files a completion node is added
    # that will output the subject_id once the workflow has completed
    def completemethod(datasinked_files, subject_id):
        print("recon-all has finished executing for subject: {0}".format(
            subject_id))
        return subject_id

    completion = pe.Node(niu.Function(['datasinked_files', 'subject_id'],
                                      ['subject_id'], completemethod),
                         name="Completion")

    # create a special identity interface for outputing the subject_id

    postds_outputspec = pe.Node(niu.IdentityInterface(['subject_id']),
                                name="postdatasink_outputspec")

    reconall.connect([
        (datasink, completion, [('out_file', 'datasinked_files')]),
        (inputspec, completion, [('subject_id', 'subject_id')]),
        (completion, postds_outputspec, [('subject_id', 'subject_id')])
    ])

    return reconall
Exemple #23
0
def dwi_flirt(name='DWICoregistration', excl_nodiff=False, flirt_param={}):
    """
    Generates a workflow for linear registration of dwi volumes using flirt.

    Inputnode
    ---------
    reference : FILE
      Mandatory input. Reference data set.
    in_file : FILE
      Mandatory input. Moving data set.
    ref_mask : FILE
      Mandatory input. Binary mask of the reference volume.
    in_xfms : FILE
      Mandatory input. Intialisation matrices for flirt.
    in_bval : FILE
      Mandatory input. B values file.

    """
    import nipype.interfaces.ants as ants
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    from nipype.workflows.dmri.fsl.utils import _checkinitxfm

    from nipype.workflows.dmri.fsl.utils import enhance

    inputnode = pe.Node(
            niu.IdentityInterface(
                fields=['reference',
                        'in_file',
                        'ref_mask',
                        'in_xfms',
                        'in_bval']),
            name='inputnode')

    initmat = pe.Node(
            niu.Function(
                input_names=['in_bval',
                             'in_xfms',
                             'excl_nodiff'],
                output_names=['init_xfms'],
                function=_checkinitxfm),
            name='InitXforms')
    initmat.inputs.excl_nodiff = excl_nodiff
    dilate = pe.Node(
            fsl.maths.MathsCommand(
                nan2zeros=True,
                args='-kernel sphere 5 -dilM'),
            name='MskDilate')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias')
    flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration',
                       iterfield=['in_file', 'in_matrix_file'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')
    outputnode = pe.Node(
            niu.IdentityInterface(
                fields=['out_file',
                        'out_xfms',
                        'out_ref']),
            name='outputnode')
    enhb0 = pe.Node(niu.Function(
        input_names=['in_file', 'in_mask', 'clip_limit'],
        output_names=['out_file'], function=enhance), name='B0Equalize')
    enhb0.inputs.clip_limit = 0.015
    enhdw = pe.MapNode(niu.Function(
        input_names=['in_file', 'in_mask'], output_names=['out_file'],
        function=enhance), name='DWEqualize', iterfield=['in_file'])
    # enhb0.inputs.clip_limit = clip_limit

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  split,      [('in_file', 'in_file')]),
        (inputnode,  dilate,     [('ref_mask', 'in_file')]),
        (inputnode,   n4,        [('reference', 'input_image'),
                                  ('ref_mask', 'mask_image')]),
        #        (inputnode,  flirt,      [('ref_mask', 'reference')]),
        (n4, enhb0, [('output_image', 'in_file')]),
        (enhb0, flirt, [('out_file', 'reference')]),
        (inputnode, initmat, [('in_xfms', 'in_xfms'),
                              ('in_bval', 'in_bval')]),
        (split, enhdw, [('out_files', 'in_file')]),
        (dilate, enhdw, [('out_file', 'in_mask')]),
        (dilate, flirt, [('out_file', 'ref_weight'),
                         ('out_file', 'in_weight')]),
        (enhdw, flirt, [('out_file', 'in_file')]),
        (initmat, flirt, [('init_xfms', 'in_matrix_file')]),
        (flirt,      thres,      [('out_file', 'in_file')]),
        (thres,      merge,      [('out_file', 'in_files')]),
        (merge,     outputnode, [('merged_file', 'out_file')]),
        (enhb0, outputnode, [('out_file', 'out_ref')]),
        (flirt,     outputnode, [('out_matrix_file', 'out_xfms')])
    ])
    return wf
Exemple #24
0
def calc_local_metrics(preprocessed_data_dir,
                       subject_id,
                       parcellations_dict,
                       bp_freq_list,
                       fd_thresh,
                       working_dir,
                       ds_dir,
                       use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import utils as calc_metrics_utils





    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 15})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
                                      ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
                                      ('_parcellation_', ''),
                                      ('_bp_freqs_', 'bp_'),
                                      ]



    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(util.IdentityInterface(fields=['parcellation']), name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation', parcellations_dict.keys())

    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']), name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(
        {
            'parcellation_time_series': '{subject_id}/con_mat/parcellated_time_series/bp_{bp_freqs}/{parcellation}/parcellation_time_series.npy'},
        base_directory=preprocessed_data_dir),
        name='selectfiles')
    selectfiles.inputs.subject_id = subject_id
    wf.connect(parcellation_infosource, 'parcellation', selectfiles, 'parcellation')
    wf.connect(bp_filter_infosource, 'bp_freqs', selectfiles, 'bp_freqs')

    fd_file = Node(nio.SelectFiles({'fd_p': '{subject_id}/QC/FD_P_ts'}, base_directory=preprocessed_data_dir),
                   name='fd_file')
    fd_file.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############

    get_good_trs = Node(util.Function(input_names=['fd_file', 'fd_thresh'],
                                      output_names=['good_trs', 'fd_scrubbed_file'],
                                      function=calc_metrics_utils.get_good_trs),
                        name='get_good_trs')
    wf.connect(fd_file, 'fd_p', get_good_trs, 'fd_file')
    get_good_trs.inputs.fd_thresh = fd_thresh

    parcellated_ts_scrubbed = Node(util.Function(input_names=['parcellation_time_series_file', 'good_trs'],
                                                 output_names=['parcellation_time_series_scrubbed'],
                                                 function=calc_metrics_utils.parcellation_time_series_scrubbing),
                                   name='parcellated_ts_scrubbed')

    wf.connect(selectfiles, 'parcellation_time_series', parcellated_ts_scrubbed, 'parcellation_time_series_file')
    wf.connect(get_good_trs, 'good_trs', parcellated_ts_scrubbed, 'good_trs')




    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(input_names=['in_data', 'extraction_method'],
                                 output_names=['matrix', 'matrix_file'],
                                 function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts_scrubbed, 'parcellation_time_series_scrubbed', con_mat, 'in_data')


    ##############
    ## ds
    ##############

    wf.connect(get_good_trs, 'fd_scrubbed_file', ds, 'QC.@fd_scrubbed_file')
    fd_str = ('%.1f' % fd_thresh).replace('.', '_')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix_scrubbed_%s.@mat' % fd_str)

    # wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    # wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    # wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name, plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
Exemple #25
0
def ecc_pipeline(name='eddy_correct'):
    """
    ECC stands for Eddy currents correction.
    Creates a pipelines that corrects for artifacts induced by Eddy currents in
    dMRI sequences.
    It takes a series of diffusion weighted images and linearly co-registers
    them to one reference image (the average of all b0s in the dataset).
    DWIs are also modulated by the determinant of the Jacobian as indicated by
    [Jones10]_ and [Rohde04]_.
    A list of rigid transformation matrices can be provided, sourcing from a
    :func:`.hmc_pipeline` workflow, to initialize registrations in a *motion
    free* framework.
    A list of affine transformation matrices is available as output, so that
    transforms can be chained (discussion
    `here <https://github.com/nipy/nipype/pull/530#issuecomment-14505042>`_).
    .. admonition:: References
      .. [Jones10] Jones DK, `The signal intensity must be modulated by the
        determinant of the Jacobian when correcting for eddy currents in
        diffusion MRI
        <http://cds.ismrm.org/protected/10MProceedings/files/1644_129.pdf>`_,
        Proc. ISMRM 18th Annual Meeting, (2010).
      .. [Rohde04] Rohde et al., `Comprehensive Approach for Correction of
        Motion and Distortion in Diffusion-Weighted MRI
        <http://stbb.nichd.nih.gov/pdf/com_app_cor_mri04.pdf>`_, MRM
        51:103-114 (2004).
    Example
    -------
    from nipype.workflows.dmri.fsl.artifacts import ecc_pipeline
    ecc = ecc_pipeline()
    ecc.inputs.inputnode.in_file = 'diffusion.nii'
    ecc.inputs.inputnode.in_bval = 'diffusion.bval'
    ecc.inputs.inputnode.in_mask = 'mask.nii'
    ecc.run() # doctest: +SKIP
    Inputs::
        inputnode.in_file - input dwi file
        inputnode.in_mask - weights mask of reference image (a file with data \
range sin [0.0, 1.0], indicating the weight of each voxel when computing the \
metric.
        inputnode.in_bval - b-values table
        inputnode.in_xfms - list of matrices to initialize registration (from \
head-motion correction)
    Outputs::
        outputnode.out_file - corrected dwi file
        outputnode.out_xfms - list of transformation matrices
    """

    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.fsl as fsl

    from nipype.workflows.data import get_flirt_schedule
    from nipype.workflows.dmri.fsl.utils import extract_bval
    from nipype.workflows.dmri.fsl.utils import recompose_xfm
    from nipype.workflows.dmri.fsl.utils import recompose_dwi
    from nipype.workflows.dmri.fsl.artifacts import _xfm_jacobian

    from clinica.workflows.dwi_preprocessing import dwi_flirt
    from clinica.utils.dwi import merge_volumes_tdim

    params = dict(dof=12, no_search=True, interp='spline', bgvalue=0,
                  schedule=get_flirt_schedule('ecc'))

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'in_bval', 'in_mask', 'in_xfms']), name='inputnode')

    getb0 = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='get_b0')

    pick_dws = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'],
        function=extract_bval), name='extract_dwi')
    pick_dws.inputs.b = 'diff'

    flirt = dwi_flirt(flirt_param=params, excl_nodiff=True)

    mult = pe.MapNode(fsl.BinaryMaths(operation='mul'), name='ModulateDWIs',
                      iterfield=['in_file', 'operand_value'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    get_mat = pe.Node(niu.Function(
        input_names=['in_bval', 'in_xfms'], output_names=['out_files'],
        function=recompose_xfm), name='GatherMatrices')
    merge = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'in_corrected'],
        output_names=['out_file'], function=recompose_dwi), name='MergeDWIs')

    merged_volumes = pe.Node(niu.Function(
        input_names=['in_file1', 'in_file2'],
        output_names=['out_file'],
        function=merge_volumes_tdim), name='merge_enhanced_ref_dwis')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_xfms']), name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  getb0,        [('in_file', 'in_file')]),
        (inputnode,  pick_dws,     [('in_file', 'in_dwi'),
                                    ('in_bval', 'in_bval')]),
        (flirt, merged_volumes, [('outputnode.out_ref', 'in_file1'),
                                 ('outputnode.out_file', 'in_file2')]),

        (merged_volumes,  merge,        [('out_file', 'in_dwi')]),
        (inputnode, merge, [('in_bval', 'in_bval')]),
        (inputnode,  flirt,        [('in_mask', 'inputnode.ref_mask'),
                                    ('in_xfms', 'inputnode.in_xfms'),
                                    ('in_bval', 'inputnode.in_bval')]),
        (inputnode,  get_mat,      [('in_bval', 'in_bval')]),
        (getb0,      flirt,        [('roi_file', 'inputnode.reference')]),
        (pick_dws,   flirt,        [('out_file', 'inputnode.in_file')]),
        (flirt,      get_mat,      [('outputnode.out_xfms', 'in_xfms')]),
        (flirt,      mult,         [(('outputnode.out_xfms', _xfm_jacobian),
                                     'operand_value')]),
        (flirt,      split,        [('outputnode.out_file', 'in_file')]),
        (split,      mult,         [('out_files', 'in_file')]),
        (mult,       thres,        [('out_file', 'in_file')]),
        (thres,      merge,        [('out_file', 'in_corrected')]),
        (get_mat,    outputnode,   [('out_files', 'out_xfms')]),
        (merge,      outputnode,   [('out_file', 'out_file')])
    ])
    return wf
Exemple #26
0
def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp',
                                  mult_input=0,
                                  num_threads=1):
    '''
    Calculates the nonlinear ANTS registration transform. This workflow
    employs the antsRegistration tool:

    http://stnava.github.io/ANTs/


    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    calc_ants_warp_wf : nipype.pipeline.engine.Workflow

    Notes
    -----

    Some of the inputs listed below are lists or lists of lists. This is
    because antsRegistration can perform multiple stages of calculations
    depending on how the user configures their registration.

    For example, if one wants to employ a different metric (with different
    parameters) at each stage, the lists would be configured like this:

    warp_wf.inputs.inputspec.transforms = ['Rigid','Affine','SyN']
    warp_wf.inputs.inputspec.transform_parameters = [[0.1],[0.1],[0.1,3,0]]

    ..where each element in the first list is a metric to be used at each
    stage, 'Rigid' being for stage 1, 'Affine' for stage 2, etc. The lists
    within the list for transform_parameters would then correspond to each
    stage's metric, with [0.1] applying to 'Rigid' and 'Affine' (stages 1 and
    2), and [0.1,3,0] applying to 'SyN' of stage 3.

    In some cases, when a parameter is not needed for a stage, 'None' must be
    entered in its place if there are other parameters for other stages.

    
    Workflow Inputs::
    
        inputspec.anatomical_brain : string (nifti file)
            File of brain to be normalized (registered)
        inputspec.reference_brain : string (nifti file)
            Target brain file to normalize to
        inputspec.dimension : integer
            Dimension of the image (default: 3)
        inputspec.use_histogram_matching : boolean
            Histogram match the images before registration
        inputspec.winsorize_lower_quantile : float
            Winsorize data based on quantiles (lower range)
        inputspec.winsorize_higher_quantile : float
            Winsorize data based on quantiles (higher range)
        inputspec.metric : list of strings
            Image metric(s) to be used at each stage
        inputspec.metric_weight : list of floats
            Modulate the per-stage weighting of the corresponding metric
        inputspec.radius_or_number_of_bins : list of integers
            Number of bins in each stage for the MI and Mattes metric, the
            radius for other metrics
        inputspec.sampling_strategy : list of strings
            Sampling strategy (or strategies) to use for the metrics
            {None, Regular, or Random}
        inputspec.sampling_percentage : list of floats
            Defines the sampling strategy
            {float value, or None}
        inputspec.number_of_iterations : list of lists of integers
            Determines the convergence
        inputspec.convergence_threshold : list of floats
            Threshold compared to the slope of the line fitted in convergence
        inputspec.convergence_window_size : list of integers
            Window size of convergence calculations
        inputspec.transforms : list of strings
            Selection of transform options. See antsRegistration documentation
            for a full list of options and their descriptions
        inputspec.transform_parameters : list of lists of floats
            Fine-tuning for the different transform options
        inputspec.shrink_factors : list of lists of integers
            Specify the shrink factor for the virtual domain (typically the
            fixed image) at each level
        inputspec.smoothing_sigmas : list of lists of floats
            Specify the sigma of gaussian smoothing at each level

    Workflow Outputs::
    
        outputspec.warp_field : string (nifti file)
            Output warp field of registration
        outputspec.inverse_warp_field : string (nifti file)
            Inverse of the warp field of the registration
        outputspec.ants_affine_xfm : string (.mat file)
            The affine matrix of the registration
        outputspec.ants_inverse_affine_xfm : string (.mat file)
            The affine matrix of the reverse registration
        outputspec.composite_transform : string (nifti file)
            The combined transform including the warp field and rigid & affine
            linear warps
        outputspec.normalized_output_brain : string (nifti file)
            Template-registered version of input brain
            
    Registration Procedure:
    
    1. Calculates a nonlinear anatomical-to-template registration.

    Workflow Graph:
    
    .. image:: 
        :width: 500

    Detailed Workflow Graph:
    
    .. image:: 
        :width: 500      
    '''

    import nipype.interfaces.ants as ants
    from nipype.interfaces.utility import Function
    from CPAC.registration.utils import seperate_warps_list, \
                                        combine_inputs_into_list, \
                                        hardcoded_reg

    calc_ants_warp_wf = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'anatomical_brain', 'reference_brain', 'dimension',
        'use_histogram_matching', 'winsorize_lower_quantile',
        'winsorize_upper_quantile', 'metric', 'metric_weight',
        'radius_or_number_of_bins', 'sampling_strategy', 'sampling_percentage',
        'number_of_iterations', 'convergence_threshold',
        'convergence_window_size', 'transforms', 'transform_parameters',
        'shrink_factors', 'smoothing_sigmas', 'write_composite_transform',
        'anatomical_skull', 'reference_skull'
    ]),
                        name='inputspec')

    # use ANTS to warp the masked anatomical image to a template image
    '''
    calculate_ants_warp = pe.Node(interface=ants.Registration(),
            name='calculate_ants_warp')

    calculate_ants_warp.inputs.output_warped_image = True
    calculate_ants_warp.inputs.initial_moving_transform_com = 0
    '''
    reg_imports = ['import os', 'import subprocess']
    calculate_ants_warp = \
        pe.Node(interface=util.Function(input_names=['anatomical_brain',
                                                     'reference_brain',
                                                     'anatomical_skull',
                                                     'reference_skull'],
                                        output_names=['warp_list',
                                                      'warped_image'],
                                        function=hardcoded_reg,
                                        imports=reg_imports),
                name='calc_ants_warp')
    calculate_ants_warp.interface.num_threads = num_threads

    select_forward_initial = pe.Node(util.Function(
        input_names=['warp_list', 'selection'],
        output_names=['selected_warp'],
        function=seperate_warps_list),
                                     name='select_forward_initial')

    select_forward_initial.inputs.selection = "Initial"

    select_forward_rigid = pe.Node(util.Function(
        input_names=['warp_list', 'selection'],
        output_names=['selected_warp'],
        function=seperate_warps_list),
                                   name='select_forward_rigid')

    select_forward_rigid.inputs.selection = "Rigid"

    select_forward_affine = pe.Node(util.Function(
        input_names=['warp_list', 'selection'],
        output_names=['selected_warp'],
        function=seperate_warps_list),
                                    name='select_forward_affine')

    select_forward_affine.inputs.selection = "Affine"

    select_forward_warp = pe.Node(util.Function(
        input_names=['warp_list', 'selection'],
        output_names=['selected_warp'],
        function=seperate_warps_list),
                                  name='select_forward_warp')

    select_forward_warp.inputs.selection = "3Warp"

    select_inverse_warp = pe.Node(util.Function(
        input_names=['warp_list', 'selection'],
        output_names=['selected_warp'],
        function=seperate_warps_list),
                                  name='select_inverse_warp')

    select_inverse_warp.inputs.selection = "Inverse"

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'ants_initial_xfm', 'ants_rigid_xfm', 'ants_affine_xfm', 'warp_field',
        'inverse_warp_field', 'composite_transform', 'wait',
        'normalized_output_brain'
    ]),
                         name='outputspec')

    # connections from inputspec
    if mult_input == 1:
        '''
        combine_inputs = pe.Node(util.Function(input_names=['input1', 'input2', 'input3'],
                output_names=['inputs_list'], function=combine_inputs_into_list),
                name='ants_reg_combine_inputs')

        combine_refs = pe.Node(util.Function(input_names=['input1', 'input2', 'input3'],
                output_names=['inputs_list'], function=combine_inputs_into_list),
                name='ants_reg_combine_refs')
        '''

        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                                  calculate_ants_warp, 'anatomical_brain')

        calc_ants_warp_wf.connect(inputspec, 'anatomical_skull',
                                  calculate_ants_warp, 'anatomical_skull')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                                  calculate_ants_warp, 'reference_brain')

        calc_ants_warp_wf.connect(inputspec, 'reference_skull',
                                  calculate_ants_warp, 'reference_skull')
        '''
        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                combine_inputs, 'input1')

        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                combine_inputs, 'input2')

        calc_ants_warp_wf.connect(inputspec, 'anatomical_skull',
                combine_inputs, 'input3')

        calc_ants_warp_wf.connect(combine_inputs, 'inputs_list',
                calculate_ants_warp, 'moving_image')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                combine_refs, 'input1')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                combine_refs, 'input2')

        calc_ants_warp_wf.connect(inputspec, 'reference_skull',
                combine_refs, 'input3')

        calc_ants_warp_wf.connect(combine_refs, 'inputs_list',
                calculate_ants_warp, 'fixed_image') 
        '''

    else:
        '''
        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                calculate_ants_warp, 'moving_image')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                calculate_ants_warp, 'fixed_image')
        '''

        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                                  calculate_ants_warp, 'anatomical_brain')

        calc_ants_warp_wf.connect(inputspec, 'anatomical_brain',
                                  calculate_ants_warp, 'anatomical_skull')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                                  calculate_ants_warp, 'reference_brain')

        calc_ants_warp_wf.connect(inputspec, 'reference_brain',
                                  calculate_ants_warp, 'reference_skull')

    calc_ants_warp_wf.connect(inputspec, 'dimension', calculate_ants_warp,
                              'dimension')

    calc_ants_warp_wf.connect(inputspec, 'use_histogram_matching',
                              calculate_ants_warp, 'use_histogram_matching')

    calc_ants_warp_wf.connect(inputspec, 'winsorize_lower_quantile',
                              calculate_ants_warp, 'winsorize_lower_quantile')

    calc_ants_warp_wf.connect(inputspec, 'winsorize_upper_quantile',
                              calculate_ants_warp, 'winsorize_upper_quantile')

    calc_ants_warp_wf.connect(inputspec, 'metric', calculate_ants_warp,
                              'metric')

    calc_ants_warp_wf.connect(inputspec, 'metric_weight', calculate_ants_warp,
                              'metric_weight')

    calc_ants_warp_wf.connect(inputspec, 'radius_or_number_of_bins',
                              calculate_ants_warp, 'radius_or_number_of_bins')

    calc_ants_warp_wf.connect(inputspec, 'sampling_strategy',
                              calculate_ants_warp, 'sampling_strategy')

    calc_ants_warp_wf.connect(inputspec, 'sampling_percentage',
                              calculate_ants_warp, 'sampling_percentage')

    calc_ants_warp_wf.connect(inputspec, 'number_of_iterations',
                              calculate_ants_warp, 'number_of_iterations')

    calc_ants_warp_wf.connect(inputspec, 'convergence_threshold',
                              calculate_ants_warp, 'convergence_threshold')

    calc_ants_warp_wf.connect(inputspec, 'convergence_window_size',
                              calculate_ants_warp, 'convergence_window_size')

    calc_ants_warp_wf.connect(inputspec, 'transforms', calculate_ants_warp,
                              'transforms')

    calc_ants_warp_wf.connect(inputspec, 'transform_parameters',
                              calculate_ants_warp, 'transform_parameters')

    calc_ants_warp_wf.connect(inputspec, 'shrink_factors', calculate_ants_warp,
                              'shrink_factors')

    calc_ants_warp_wf.connect(inputspec, 'smoothing_sigmas',
                              calculate_ants_warp, 'smoothing_sigmas')

    calc_ants_warp_wf.connect(inputspec, 'write_composite_transform',
                              calculate_ants_warp, 'write_composite_transform')

    # inter-workflow connections

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list',
                              select_forward_initial, 'warp_list')

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list',
                              select_forward_rigid, 'warp_list')

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list',
                              select_forward_affine, 'warp_list')

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list',
                              select_forward_warp, 'warp_list')

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list',
                              select_inverse_warp, 'warp_list')

    # connections to outputspec

    calc_ants_warp_wf.connect(select_forward_initial, 'selected_warp',
                              outputspec, 'ants_initial_xfm')

    calc_ants_warp_wf.connect(select_forward_rigid, 'selected_warp',
                              outputspec, 'ants_rigid_xfm')

    calc_ants_warp_wf.connect(select_forward_affine, 'selected_warp',
                              outputspec, 'ants_affine_xfm')

    calc_ants_warp_wf.connect(select_forward_warp, 'selected_warp', outputspec,
                              'warp_field')

    calc_ants_warp_wf.connect(select_inverse_warp, 'selected_warp', outputspec,
                              'inverse_warp_field')

    calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', outputspec,
                              'normalized_output_brain')

    return calc_ants_warp_wf
Exemple #27
0
def eddy_fsl_pipeline(epi_param, name='eddy_fsl'):
    """
    Using eddy from fsl for head motion correction and eddy current distortion correction.

    """
    from nipype.interfaces.fsl import Eddy
    import nipype.interfaces.utility as niu     # utility
    import nipype.pipeline.engine as pe          # pypeline engine
    from clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils import eddy_fsl, generate_acq, generate_index, b0_indices

    inputnode = pe.Node(
            niu.IdentityInterface(
                fields=['in_file',
                        'in_bvec',
                        'in_bval',
                        'in_mask',
                        'ref_b0']),
            name='inputnode')

    generate_acq = pe.Node(niu.Function(function=generate_acq,
                                        input_names=['in_b0', 'epi_param'],
                                        output_names=['out_file']),
                           name='generate_acq')
    generate_acq.inputs.epi_param = epi_param

    list_b0 = pe.Node(niu.Function(input_names=['in_bval'],
                                   output_names=['out_idx'],
                                   function=b0_indices),
                      name='find_b0_indices')

    generate_index = pe.Node(niu.Function(function=generate_index,
                                          input_names=['in_bval', 'b0_index'],
                                          output_names=['eddy_index']),
                             name='generate_index')

    eddy = pe.Node(niu.Function(input_names=['in_bvec', 'in_bval', 'in_file', 'in_mask', 'in_acqp', 'in_index'],
                                output_names=['out_parameter', 'out_corrected', 'out_rotated_bvecs'],
                                function=eddy_fsl),
                   name='eddy_fsl')

    eddy = pe.Node(interface=Eddy(), name='eddy_fsl')
    eddy.inputs.flm = 'linear'

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_parameter',
                                                       'out_corrected',
                                                       'out_rotated_bvecs']),
                         name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,     generate_acq,   [('ref_b0', 'in_b0')]),
        (inputnode,  generate_index,      [('in_bval', 'in_bval')]),
        (list_b0,  generate_index,      [('out_idx', 'b0_index')]),
        (inputnode,      list_b0,      [('in_bval', 'in_bval')]),

        (inputnode,      eddy,     [('in_bvec', 'in_bvec')]),
        (inputnode,      eddy,     [('in_bval', 'in_bval')]),
        (inputnode,  eddy,   [('in_file', 'in_file')]),
        (inputnode,     eddy,   [('in_mask', 'in_mask')]),
        (generate_acq,      eddy, [('out_file', 'in_acqp')]),
        (generate_index,      eddy, [('eddy_index', 'in_index')]),

        (eddy,   outputnode, [('out_parameter', 'out_parameter')]),
        (eddy,      outputnode, [('out_corrected', 'out_corrected')]),
        (eddy,      outputnode, [('out_rotated_bvecs', 'out_rotated_bvecs')])
    ])
    return wf
Exemple #28
0
def create_wf_c3d_fsl_to_itk(map_node,
                             input_image_type=0,
                             name='create_wf_c3d_fsl_to_itk'):
    """
    Converts an FSL-format output matrix to an ITK-format (ANTS) matrix
    for use with ANTS registration tools.

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    fsl_to_itk_conversion : nipype.pipeline.engine.Workflow

    Notes
    -----
    
    Workflow Inputs::
    
        inputspec.affine_file : string (nifti file)
            Output matrix of FSL-based functional to anatomical registration
        inputspec.reference_file : string (nifti file)
            File of skull-stripped anatomical brain to be used in affine
            conversion
        inputspec.source_file : string (nifti file)
            Should match the input of the apply warp (in_file) unless you are
            applying the warp to a 4-d file, in which case this file should
            be a mean_functional file

    Workflow Outputs::
    
        outputspec.itk_transform : string (nifti file)
            Converted affine transform in ITK format usable with ANTS
    
    """

    import nipype.interfaces.c3 as c3
    from nipype.interfaces.utility import Function
    from CPAC.registration.utils import change_itk_transform_type
    from nipype.interfaces.afni import preprocess

    fsl_to_itk_conversion = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(
        fields=['affine_file', 'reference_file', 'source_file']),
                        name='inputspec')

    # converts FSL-format .mat affine xfm into ANTS-format .txt
    # .mat affine comes from Func->Anat registration

    if map_node == 0:
        fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk')

    elif map_node == 1:
        fsl_reg_2_itk = pe.MapNode(c3.C3dAffineTool(),
                                   name='fsl_reg_2_itk_mapnode',
                                   iterfield=['source_file'])

    fsl_reg_2_itk.inputs.itk_transform = True
    fsl_reg_2_itk.inputs.fsl2ras = True

    itk_imports = ['import os']

    if map_node == 0:
        change_transform = pe.Node(util.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type,
            imports=itk_imports),
                                   name='change_transform_type')

    elif map_node == 1:
        change_transform = pe.MapNode(util.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type,
            imports=itk_imports),
                                      name='change_transform_type',
                                      iterfield=['input_affine_file'])

    outputspec = pe.Node(util.IdentityInterface(fields=['itk_transform']),
                         name='outputspec')

    fsl_to_itk_conversion.connect(inputspec, 'affine_file', fsl_reg_2_itk,
                                  'transform_file')

    fsl_to_itk_conversion.connect(inputspec, 'reference_file', fsl_reg_2_itk,
                                  'reference_file')

    # source_file input of the conversion must be a 3D file, so if the source
    # file is 4D (input_image_type=3), average it into a 3D file first
    if input_image_type == 0:

        fsl_to_itk_conversion.connect(inputspec, 'source_file', fsl_reg_2_itk,
                                      'source_file')

    elif input_image_type == 3:

        try:
            tstat_source = pe.Node(interface=preprocess.TStat(),
                                   name='fsl_to_itk_tcat_source')
        except AttributeError:
            from nipype.interfaces.afni import utils as afni_utils
            tstat_source = pe.Node(interface=afni_utils.TStat(),
                                   name='fsl_to_itk_tcat_source')

        tstat_source.inputs.outputtype = 'NIFTI_GZ'
        tstat_source.inputs.options = '-mean'

        fsl_to_itk_conversion.connect(inputspec, 'source_file', tstat_source,
                                      'in_file')

        fsl_to_itk_conversion.connect(tstat_source, 'out_file', fsl_reg_2_itk,
                                      'source_file')

    fsl_to_itk_conversion.connect(fsl_reg_2_itk, 'itk_transform',
                                  change_transform, 'input_affine_file')

    fsl_to_itk_conversion.connect(change_transform, 'updated_affine_file',
                                  outputspec, 'itk_transform')

    return fsl_to_itk_conversion
Exemple #29
0
    def build_output_node(self):
        """Build and connect an output node to the pipelines.
        """
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        import nipype.interfaces.io as nio
        from os.path import join
        from clinica.utils.io import fix_join
        import clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils as utils

        # Find container path from DWI filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['bids_dwi_filename'],
            output_names=['container'],
            function=utils.dwi_container_from_filename),
                                  name='container_path')

        rename_into_caps = npe.Node(nutil.Function(
            input_names=[
                'in_bids_dwi', 'fname_dwi', 'fname_bval', 'fname_bvec',
                'fname_brainmask'
            ],
            output_names=[
                'out_caps_dwi', 'out_caps_bval', 'out_caps_bvec',
                'out_caps_brainmask'
            ],
            function=utils.rename_into_caps),
                                    name='rename_into_caps')

        # Writing results into CAPS
        # =========================
        write_results = npe.Node(name='write_results',
                                 interface=nio.DataSink())
        write_results.inputs.base_directory = self.caps_directory
        write_results.inputs.parameterization = False

        self.connect([
            (self.input_node, container_path, [('dwi', 'bids_dwi_filename')
                                               ]),  # noqa
            (self.input_node, rename_into_caps, [('dwi', 'in_bids_dwi')
                                                 ]),  # noqa
            (
                self.output_node,
                rename_into_caps,
                [
                    ('preproc_dwi', 'fname_dwi'),  # noqa
                    ('preproc_bval', 'fname_bval'),  # noqa
                    ('preproc_bvec', 'fname_bvec'),  # noqa
                    ('b0_mask', 'fname_brainmask')
                ]),  # noqa
            (container_path, write_results, [(('container', fix_join, 'dwi'),
                                              'container')]),  # noqa
            (
                rename_into_caps,
                write_results,
                [
                    ('out_caps_dwi', 'preprocessing.@preproc_dwi'),  # noqa
                    ('out_caps_bval', 'preprocessing.@preproc_bval'),  # noqa
                    ('out_caps_bvec', 'preprocessing.@preproc_bvec'),  # noqa
                    ('out_caps_brainmask', 'preprocessing.@b0_mask')
                ])  # noqa
        ])
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.
        """

        import clinica.pipelines.machine_learning_spatial_svm.spatial_svm_utils as utils
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        import nipype.interfaces.io as nio

        fisher_tensor_generation = npe.Node(name="obtain_g_fisher_tensor",
                                            interface=nutil.Function(input_names=['dartel_input', 'FWHM'],
                                                                     output_names=['fisher_tensor', 'fisher_tensor_path'],
                                                                     function=utils.obtain_g_fisher_tensor))
        fisher_tensor_generation.inputs.FWHM = self.parameters['fwhm']

        time_step_generation = npe.Node(name='estimation_time_step',
                                        interface=nutil.Function(input_names=['dartel_input', 'FWHM', 'g'],
                                                                 output_names=['t_step', 'json_file'],
                                                                 function=utils.obtain_time_step_estimation))
        time_step_generation.inputs.FWHM = self.parameters['fwhm']

        heat_solver_equation = npe.MapNode(name='heat_solver_equation',
                                           interface=nutil.Function(input_names=['input_image', 'g',
                                                                                 'FWHM', 't_step', 'dartel_input'],
                                                                    output_names=['regularized_image'],
                                                                    function=utils.heat_solver_equation),
                                           iterfield=['input_image'])
        heat_solver_equation.inputs.FWHM = self.parameters['fwhm']

        datasink = npe.Node(nio.DataSink(),
                            name='sinker')
        datasink.inputs.base_directory = self.caps_directory
        datasink.inputs.parameterization = True
        if self.parameters['image_type'] == 't1':
            datasink.inputs.regexp_substitutions = [
                (r'(.*)/regularized_image/.*/(.*(sub-(.*)_ses-(.*))_T1w(.*)_probability(.*))$',
                 r'\1/subjects/sub-\4/ses-\5/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'/\3_T1w\6_spatialregularization\7'),

                (r'(.*)json_file/(output_data.json)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'_space-Ixi549Space_parameters.json'),

                (r'(.*)fisher_tensor_path/(output_fisher_tensor.npy)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'_space-Ixi549Space_gram.npy')
            ]

        elif self.parameters['image_type'] == 'pet':
            datasink.inputs.regexp_substitutions = [
                (r'(.*)/regularized_image/.*/(.*(sub-(.*)_ses-(.*))_(task.*)_pet(.*))$',
                 r'\1/subjects/sub-\4/ses-\5/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'/\3_\6_spatialregularization\7'),
                (r'(.*)json_file/(output_data.json)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' +
                 self.parameters['group_id'] + r'_space-Ixi549Space_parameters.json'),
                (r'(.*)fisher_tensor_path/(output_fisher_tensor.npy)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' +
                 self.parameters[
                     'group_id'] + r'_space-Ixi549Space_gram.npy')
            ]
        # Connection
        # ==========
        self.connect([
            (self.input_node,      fisher_tensor_generation,    [('dartel_input',    'dartel_input')]),
            (fisher_tensor_generation,      time_step_generation,    [('fisher_tensor',    'g')]),

            (self.input_node, time_step_generation, [('dartel_input', 'dartel_input')]),
            (self.input_node, heat_solver_equation, [('input_image', 'input_image')]),
            (fisher_tensor_generation, heat_solver_equation, [('fisher_tensor', 'g')]),
            (time_step_generation, heat_solver_equation, [('t_step', 't_step')]),
            (self.input_node, heat_solver_equation, [('dartel_input', 'dartel_input')]),

            (fisher_tensor_generation, datasink, [('fisher_tensor_path', 'fisher_tensor_path')]),
            (time_step_generation, datasink, [('json_file', 'json_file')]),
            (heat_solver_equation, datasink, [('regularized_image', 'regularized_image')])
        ])