Exemplo n.º 1
0
def test_Function_outputs():
    output_map = dict()
    outputs = Function.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Exemplo n.º 2
0
def test_Function_inputs():
    input_map = dict(function_str=dict(mandatory=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    )
    inputs = Function.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Exemplo n.º 3
0
def spm_anat_preprocessing(wf_name="spm_anat_preproc"):
    """ Run the T1 pre-processing workflow against the anat_hc
    files in `data_dir`.

    It does:
    - N4BiasFieldCorrection
    - SPM12 New Segment
    - SPM12 Warp of MPRAGE to MNI

    [Optional: from config]
    - Atlas file warping to MPRAGE
    - Cortical thickness (SPM+DiReCT)

    Nipype Inputs
    -------------
    anat_input.in_file: traits.File
        Path to the anatomical image.

    anat_input.atlas_file: traits.File
        Path to an atlas file in MNI space to be
        warped to the anatomical space.
        Can also be set through the configuration
        setting `atlas_file`.

    Nipype Outputs
    --------------
    anat_output.anat_mni: traits.File
        The bias-field normalized to MNI anatomical image.

    anat_output.tissues_warped: traits.File
        The tissue segmentation in MNI space from SPM.

    anat_output.tissues_native: traits.File
        The tissue segmentation in native space from SPM.

    anat_output.affine_transform: traits.File
        The affine transformation file.

    anat_output.warp_forward: traits.File
        The forward (anat to MNI) warp field from SPM.

    anat_output.warp_inverse: traits.File
        The inverse (MNI to anat) warp field from SPM.

    anat_output.anat_biascorr: traits.File
        The bias-field corrected anatomical image.

    anat_output.brain_mask: traits.File
        A brain mask file in anatomical space.
        This is calculated by summing up the maps of
        segmented tissues (CSF, WM, GM) and then binarised.

    anat_output.atlas_anat: traits.File
        If `atlas_file` is an existing file in MNI space.
        The atlas file warped to anatomical space,
        if do_atlas and the atlas file is set in configuration.

    anat_output.cortical_thickness: traits.File
        If `anat_preproc.do_cortical_thickness` is True.
        The cortical thickness estimations calculated with the
        SPM+DiReCT method (KellyKapowski).

    anat_output.warped_white_matter: warped_white_matter
        If `anat_preproc.do_cortical_thickness` is True.
        The warped white matter image calculated with the
        SPM+DiReCT method (KellyKapowski).

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = ["in_file"]
    out_fields = [
        "anat_mni",
        "tissues_warped",
        "tissues_native",
        "affine_transform",
        "warp_forward",
        "warp_inverse",
        "anat_biascorr",
        "brain_mask",
    ]

    # check if we have to warp an atlas files too.
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_file"]
        out_fields += ["atlas_anat"]

    # check if we have to do cortical thickness (SPM+DiReCT) method.
    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        out_fields += [
            "cortical_thickness",
            "warped_white_matter",
        ]

    # input node
    anat_input = pe.Node(IdentityInterface(fields=in_fields,
                                           mandatory_inputs=True),
                         name="anat_input")

    # atlas registration
    if do_atlas and not isdefined(anat_input.inputs.atlas_file):
        anat_input.inputs.set(atlas_file=atlas_file)

    # T1 preprocessing nodes
    biascor = setup_node(biasfield_correct(), name="bias_correction")
    gunzip_anat = setup_node(Gunzip(), name="gunzip_anat")
    segment = setup_node(spm_segment(), name="new_segment")
    warp_anat = setup_node(spm_apply_deformations(), name="warp_anat")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # calculate brain mask from tissue maps
    tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"],
                                           mandatory_inputs=True),
                         name="tissues")

    brain_mask = setup_node(Function(
        function=math_img,
        input_names=["formula", "out_file", "gm", "wm", "csf"],
        output_names=["out_file"],
        imports=['from pypes.interfaces.nilearn import ni2file']),
                            name='brain_mask')
    brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
    brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"

    # output node
    anat_output = pe.Node(IdentityInterface(fields=out_fields),
                          name="anat_output")

    # Connect the nodes
    wf.connect([
        # input to biasfieldcorrection
        (anat_input, biascor, [("in_file", "input_image")]),

        # new segment
        (biascor, gunzip_anat, [("output_image", "in_file")]),
        (gunzip_anat, segment, [("out_file", "channel_files")]),

        # Normalize12
        (segment, warp_anat, [("forward_deformation_field", "deformation_file")
                              ]),
        (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]),
        (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]),

        # brain mask from tissues
        (segment, tissues, [
            (("native_class_images", selectindex, 0), "gm"),
            (("native_class_images", selectindex, 1), "wm"),
            (("native_class_images", selectindex, 2), "csf"),
        ]),
        (tissues, brain_mask, [
            ("gm", "gm"),
            ("wm", "wm"),
            ("csf", "csf"),
        ]),

        # output
        (warp_anat, anat_output, [("normalized_files", "anat_mni")]),
        (segment, anat_output, [("modulated_class_images", "tissues_warped"),
                                ("native_class_images", "tissues_native"),
                                ("transformation_mat", "affine_transform"),
                                ("forward_deformation_field", "warp_forward"),
                                ("inverse_deformation_field", "warp_inverse"),
                                ("bias_corrected_images", "anat_biascorr")]),
        (brain_mask, anat_output, [("out_file", "brain_mask")]),
    ])

    # atlas warping nodes
    if do_atlas:
        gunzip_atlas = pe.Node(Gunzip(), name="gunzip_atlas")
        warp_atlas = setup_node(spm_apply_deformations(), name="warp_atlas")
        anat_bbox = setup_node(Function(function=get_bounding_box,
                                        input_names=["in_file"],
                                        output_names=["bbox"]),
                               name="anat_bbox")

        # set the warping interpolation to nearest neighbour.
        warp_atlas.inputs.write_interp = 0

        # connect the atlas registration nodes
        wf.connect([
            (anat_input, gunzip_atlas, [("atlas_file", "in_file")]),
            (gunzip_anat, anat_bbox, [("out_file", "in_file")]),
            (gunzip_atlas, warp_atlas, [("out_file", "apply_to_files")]),
            (segment, warp_atlas, [("inverse_deformation_field",
                                    "deformation_file")]),
            (anat_bbox, warp_atlas, [("bbox", "write_bounding_box")]),
            (warp_atlas, anat_output, [("normalized_files", "atlas_anat")]),
        ])

    # cortical thickness (SPM+DiReCT) method
    if do_cortical_thickness:
        from ..interfaces.ants import KellyKapowski

        segm_img = setup_node(Function(
            function=math_img,
            input_names=["formula", "out_file", "gm", "wm"],
            output_names=["out_file"],
            imports=['from pypes.interfaces.nilearn import ni2file']),
                              name='gm-wm_image')
        segm_img.inputs.out_file = "gm_wm.nii.gz"
        segm_img.inputs.formula = '((gm >= 0.5)*2 + (wm > 0.5)*3).astype(np.uint8)'

        # copy the header from the GM tissue image to the result from `gm-wm_image`.
        # this is necessary because the `gm-wm_image` operation sometimes modifies the
        # offset of the image, which will provoke an ANTs exception due to
        # ITK tolerance in ImageToImageFilter
        # https://github.com/stnava/ANTs/issues/74
        cp_hdr = setup_node(Function(
            function=copy_header,
            input_names=["in_file", "data_file"],
            output_names=["out_file"],
            imports=['from pypes.interfaces.nilearn import ni2file']),
                            name='copy_header')

        kk = setup_node(KellyKapowski(), name='direct')
        kk.inputs.cortical_thickness = 'direct_cortical_thickness.nii.gz'
        kk.inputs.warped_white_matter = 'direct_warped_white_matter.nii.gz'

        # connect the cortical thickness (SPM+DiReCT) method
        wf.connect([
            # create segmentation GM+WM file
            (tissues, segm_img, [("gm", "gm"), ("wm", "wm")]),
            (segm_img, cp_hdr, [("out_file", "data_file")]),
            (tissues, cp_hdr, [("gm", "in_file")]),

            # kellykapowski
            (cp_hdr, kk, [("out_file", "segmentation_image")]),
            (tissues, kk, [("gm", "gray_matter_prob_image"),
                           ("wm", "white_matter_prob_image")]),
            (kk, anat_output, [("cortical_thickness", "cortical_thickness"),
                               ("warped_white_matter", "warped_white_matter")
                               ]),
        ])
    return wf
Exemplo n.º 4
0
psb6351_wf.base_dir = work_dir + f'/psb6351workdir/sub-{sids[0]}'  # I deinfe the working directory where I want preliminary files to be written
psb6351_wf.config['execution'][
    'use_relative_paths'] = True  # I assign a execution variable to use relative paths...TRYING TO USE THIS TO FIX A BUG?

#######################################################################################
# Create a Function node to substitute names of files created during pipeline
# In nipype you create nodes using the pipeline engine that was imported earlier.
# In this case I am sepcifically creating a function node with an input called func_files
# and expects an output (what the function returns) called subs.  The actual function
# which was created above is called get_subs.
# I can assign the input either through a workflow connect syntax or by simplying hardcoding it.
# in this case I hard coded it by saying that .inputs.func_files = func_files
#######################################################################################

getsubs = pe.Node(Function(input_names=['func_files'],
                           output_names=['subs'],
                           function=get_subs),
                  name='getsubs')
getsubs.inputs.func_files = func_files

#######################################################################################
# Here I am inputing just the first run functional data
# I want to use afni's 3dToutcount to find the number of
# outliers at each volume.  I will use this information to
# later select the earliest volume with the least number of outliers
# to serve as the base for the motion correction
#######################################################################################

id_outliers = pe.Node(afni.OutlierCount(), name='id_outliers')
# Mandatory Inputs
id_outliers.inputs.in_file = func_files[
Exemplo n.º 5
0
def create_seg_preproc(use_ants,
                       use_priors,
                       use_threshold,
                       csf_use_erosion=False,
                       wm_use_erosion=False,
                       gm_use_erosion=False,
                       wf_name='seg_preproc'):
    """Segment the subject's anatomical brain into cerebral spinal fluids,
    white matter and gray matter and refine them using template-space tissue
    priors, if selected to do so.

    Parameters
    ----------
    use_ants: boolean
        Whether we are using ANTs or FSL-FNIRT for registration purposes.
    wf_name : string
        name of the workflow

    Returns
    -------
    seg_preproc : workflow
        Workflow Object for Segmentation Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/seg_preproc/seg_preproc.py>`_

    Workflow Inputs: ::

        csf_threshold.csf_threshold : list (float)
            Threshold of Cerebral Spinal Fluid probabilities

        wm_threshold.wm_threshold : list (float)
            Threshold of White Matter probabilities

        gm_threshold.gm_threshold : list (float)
            Threshold of Gray Matter probabilities

        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)

        inputspec.standard2highres_mat : string (existing affine transformation .mat file)
            File for transformation from mni space to anatomical space

        inputspec.PRIOR_CSF : string (existing nifti file)
            FSL Standard CSF Tissue prior image , binarized with threshold of 0.4

        inputspec.PRIOR_GRAY : string (existing nifti file)
            FSL Standard GRAY Matter Tissue prior image , binarized with threshold of 0.66

        inputspec.PRIOR_WHITE : string (existing nifti file)
            FSL Standard White Matter Tissue prior image , binarized with threshold of 0.2

    Workflow Outputs: ::

        outputspec.csf_mni2t1 : string (nifti file)
            outputs CSF prior template(in MNI space) registered to anatomical space

        outputspec.gm_mni2t1 : string (nifti file)
            outputs gray matter prior template registered to anatomical space

        outputspec.gm_mask : string (nifti file)
            outputs image after masking gm_combo with gm prior in t1 space

        outputspec.wm_mni2t1 : string (nifti file)
            outputs White Matter prior template(in MNI space) registered to anatomical space

        outputspec.wm_mask : string (nifti file)
            outputs image after masking wm_combo with white matter(wm) prior in t1 space

        outputspec.probability_maps : string (nifti file)
            outputs individual probability maps (output from brain segmentation using FAST)

        outputspec.mixeltype : string (nifti file)
            outputs mixeltype volume file _mixeltype (output from brain segmentation using FAST)

        outputspec.partial_volume_map : string (nifti file)
            outputs partial volume file _pveseg (output from brain segmentation using FAST)

        outputspec.partial_volume_files : string (nifti file)
            outputs partial volume estimate files _pve_ (output from brain segmentation using FAST)


    Order of commands:

    - Segment the Anatomical brain. For details see `fast <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST>`_::

        fast
        -t 1
        -g
        -p
        -o segment
        mprage_brain.nii.gz

    - Register CSF template in template space to t1 space. For details see `flirt <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FLIRT>`_::

        flirt
        -in PRIOR_CSF
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres_inv.mat
        -out csf_mni2t1

    - Find overlap between csf probability map and csf_mni2t1. For details see  `fslmaths <http://www.fmrib.ox.ac.uk/fslcourse/lectures/practicals/intro/index.htm>`_::

        fslmaths
        segment_prob_0.nii.gz
        -mas csf_mni2t1.nii.gz
        csf_combo.nii.gz

    - Threshold and binarize CSF probability map ::

        fslmaths
        csf_combo.nii.gz
        -thr 0.4 (threshold value can be changeable by user)
        -bin csf_bin.nii.gz

    - Generate CSF csf_mask, by applying csf prior in t1 space to binarized csf probability map ::

        fslmaths
        csf_bin.nii.gz
        -mas csf_mni2t1
        csf_mask

    - Register WM template in template space to t1 space ::

        flirt
        -in PRIOR_WM
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres.mat
        -out wm_mni2t1

    - Find overlap between wm probability mask and wm_mni2t1 ::

        fslmaths
        segment_prob_2.nii.gz
        -mas wm_mni2t1.nii.gz
        wm_combo.nii.gz

    - Threshold and binarize WM probability map ::

        fslmaths
        wm_combo.nii.gz
        -thr 0.4 (threshold value can be changeable by user)
        -bin wm_bin.nii.gz

    - Generate WM csf_mask, by applying wm_prior in t1 space to binarized wm map ::

        fslmaths
        wm_bin.nii.gz
        -mas wm_mni2t1
        wm_mask

    - Register GM template in template space to t1 space ::

        flirt
        -in PRIOR_GM
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres.mat
        -out gm_mni2t1

    - Find overlap between gm probability map and gm_mni2t1 ::

        fslmaths
        segment_prob_1.nii.gz
        -mas gm_mni2t1.nii.gz
        gm_combo.nii.gz

    - Threshold and binarize GM probability map ::

        fslmaths
        gm_combo.nii.gz
        -thr 0.4 (threshold value can be changeable by user)
        -bin gm_bin.nii.gz

    - Generate GM csf_mask, by applying gm prior in t1 space to thresholded binarized gm probability map ::

        fslmaths
        gm_bin.nii.gz
        -mas gm_mni2t1
        gm_mask


    Examples
    --------
    >>> import CPAC.seg_preproc as seg_wflow
    >>> seg = seg_wflow.create_seg_preproc()
    >>> seg.inputs.inputspec.standard2highres_mat = '/home/data/Projects/C-PAC/working_directory/s1001/reg_preproc/standard2highres.mat'
    >>> seg.inputs.inputspec.PRIOR_CSF = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_csf_bin.nii.gz'
    >>> seg.inputs.inputspec.PRIOR_WHITE = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_white_bin.nii.gz'
    >>> seg.inputs.inputspec.PRIOR_GRAY = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_gray_bin.nii.gz'
    >>> seg.inputs.inputspec.brain = '/home/data/Projects/C-PAC/working_directory/s1001/anat_preproc/mprage_brain.nii.gz'
    >>> seg_preproc.run() # doctest: +SKIP


    High Level Graph:

    .. image:: ../images/seg_preproc.dot.png
        :width: 1100
        :height: 480

    Detailed Graph:

    .. image:: ../images/seg_preproc_detailed.dot.png
        :width: 1100
        :height: 480
    """

    preproc = pe.Workflow(name=wf_name)
    inputNode = pe.Node(util.IdentityInterface(fields=[
        'brain', 'brain_mask', 'standard2highres_init', 'standard2highres_mat',
        'standard2highres_rig', 'PRIOR_CSF', 'PRIOR_GRAY', 'PRIOR_WHITE'
    ]),
                        name='inputspec')

    inputnode_csf_threshold = pe.Node(
        util.IdentityInterface(fields=['csf_threshold']), name='csf_threshold')

    inputnode_wm_threshold = pe.Node(
        util.IdentityInterface(fields=['wm_threshold']), name='wm_threshold')

    inputnode_gm_threshold = pe.Node(
        util.IdentityInterface(fields=['gm_threshold']), name='gm_threshold')

    inputnode_csf_erosion_prop = pe.Node(
        util.IdentityInterface(fields=['csf_erosion_prop']),
        name='csf_erosion_prop')

    inputnode_wm_erosion_prop = pe.Node(
        util.IdentityInterface(fields=['wm_erosion_prop']),
        name='wm_erosion_prop')

    inputnode_gm_erosion_prop = pe.Node(
        util.IdentityInterface(fields=['gm_erosion_prop']),
        name='gm_erosion_prop')

    inputnode_csf_mask_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['csf_mask_erosion_mm']),
        name='csf_mask_erosion_mm')

    inputnode_wm_mask_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['wm_mask_erosion_mm']),
        name='wm_mask_erosion_mm')

    inputnode_gm_mask_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['gm_mask_erosion_mm']),
        name='gm_mask_erosion_mm')

    inputnode_csf_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['csf_erosion_mm']),
        name='csf_erosion_mm')

    inputnode_wm_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['wm_erosion_mm']), name='wm_erosion_mm')

    inputnode_gm_erosion_mm = pe.Node(
        util.IdentityInterface(fields=['gm_erosion_mm']), name='gm_erosion_mm')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'csf_mask', 'gm_mask', 'wm_mask', 'csf_probability_map',
        'probability_maps', 'tissue_class_files', 'mixeltype',
        'partial_volume_map', 'partial_volume_files'
    ]),
                         name='outputspec')

    segment = pe.Node(interface=fsl.FAST(), name='segment', mem_gb=1.5)
    segment.inputs.img_type = 1
    segment.inputs.segments = True
    segment.inputs.probability_maps = True
    segment.inputs.out_basename = 'segment'

    check_wm = pe.Node(name='check_wm',
                       interface=Function(function=check_if_file_is_empty,
                                          input_names=['in_file'],
                                          output_names=['out_file']))
    check_gm = pe.Node(name='check_gm',
                       interface=Function(function=check_if_file_is_empty,
                                          input_names=['in_file'],
                                          output_names=['out_file']))
    check_csf = pe.Node(name='check_csf',
                        interface=Function(function=check_if_file_is_empty,
                                           input_names=['in_file'],
                                           output_names=['out_file']))

    preproc.connect(inputNode, 'brain', segment, 'in_files')

    preproc.connect(segment, 'mixeltype', outputNode, 'mixeltype')
    preproc.connect(segment, 'partial_volume_files', outputNode,
                    'partial_volume_files')
    preproc.connect(segment, 'partial_volume_map', outputNode,
                    'partial_volume_map')
    preproc.connect(segment, 'tissue_class_files', outputNode,
                    'tissue_class_files')
    preproc.connect(segment, 'probability_maps', outputNode,
                    'probability_maps')

    process_csf = process_segment_map('CSF',
                                      use_ants,
                                      use_priors,
                                      use_threshold,
                                      use_erosion=csf_use_erosion)

    if use_ants:
        preproc.connect(inputNode, 'standard2highres_init', process_csf,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_csf,
                        'inputspec.standard2highres_rig')

    preproc.connect(inputNode, 'brain', process_csf, 'inputspec.brain')
    preproc.connect(inputNode, 'brain_mask', process_csf,
                    'inputspec.brain_mask')
    preproc.connect(inputnode_csf_threshold, 'csf_threshold', process_csf,
                    'inputspec.threshold')
    preproc.connect(inputnode_csf_erosion_prop, 'csf_erosion_prop',
                    process_csf, 'inputspec.erosion_prop')
    preproc.connect(inputnode_csf_mask_erosion_mm, 'csf_mask_erosion_mm',
                    process_csf, 'inputspec.mask_erosion_mm')
    preproc.connect(inputnode_csf_erosion_mm, 'csf_erosion_mm', process_csf,
                    'inputspec.erosion_mm')
    preproc.connect(inputNode, 'PRIOR_CSF', process_csf,
                    'inputspec.tissue_prior')
    preproc.connect(segment, ('tissue_class_files', pick_wm_class_0),
                    process_csf, 'inputspec.tissue_class_file')
    preproc.connect(segment, ('probability_maps', pick_wm_prob_0), process_csf,
                    'inputspec.probability_tissue_map')
    preproc.connect(inputNode, 'standard2highres_mat', process_csf,
                    'inputspec.standard2highres_mat')
    preproc.connect(process_csf, 'outputspec.segment_mask', outputNode,
                    'csf_mask')
    preproc.connect(process_csf, 'outputspec.probability_tissue_map',
                    outputNode, 'csf_probability_map')

    process_wm = process_segment_map('WM',
                                     use_ants,
                                     use_priors,
                                     use_threshold,
                                     use_erosion=wm_use_erosion)

    if use_ants:
        preproc.connect(inputNode, 'standard2highres_init', process_wm,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_wm,
                        'inputspec.standard2highres_rig')

    preproc.connect(inputNode, 'brain', process_wm, 'inputspec.brain')
    preproc.connect(inputNode, 'brain_mask', process_wm,
                    'inputspec.brain_mask')
    preproc.connect(inputnode_wm_threshold, 'wm_threshold', process_wm,
                    'inputspec.threshold')
    preproc.connect(inputnode_wm_erosion_prop, 'wm_erosion_prop', process_wm,
                    'inputspec.erosion_prop')
    preproc.connect(inputnode_wm_mask_erosion_mm, 'wm_mask_erosion_mm',
                    process_wm, 'inputspec.mask_erosion_mm')
    preproc.connect(inputnode_wm_erosion_mm, 'wm_erosion_mm', process_wm,
                    'inputspec.erosion_mm')
    preproc.connect(inputNode, 'PRIOR_WHITE', process_wm,
                    'inputspec.tissue_prior')
    preproc.connect(segment, ('tissue_class_files', pick_wm_class_2),
                    process_wm, 'inputspec.tissue_class_file')
    preproc.connect(segment, ('probability_maps', pick_wm_prob_2), process_wm,
                    'inputspec.probability_tissue_map')
    preproc.connect(inputNode, 'standard2highres_mat', process_wm,
                    'inputspec.standard2highres_mat')
    preproc.connect(process_wm, 'outputspec.segment_mask', outputNode,
                    'wm_mask')

    process_gm = process_segment_map('GM',
                                     use_ants,
                                     use_priors,
                                     use_threshold,
                                     use_erosion=gm_use_erosion)

    if use_ants:
        preproc.connect(inputNode, 'standard2highres_init', process_gm,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_gm,
                        'inputspec.standard2highres_rig')

    preproc.connect(inputNode, 'brain', process_gm, 'inputspec.brain')
    preproc.connect(inputNode, 'brain_mask', process_gm,
                    'inputspec.brain_mask')
    preproc.connect(inputnode_gm_threshold, 'gm_threshold', process_gm,
                    'inputspec.threshold')
    preproc.connect(inputnode_gm_erosion_prop, 'gm_erosion_prop', process_gm,
                    'inputspec.erosion_prop')
    preproc.connect(inputnode_gm_mask_erosion_mm, 'gm_mask_erosion_mm',
                    process_gm, 'inputspec.mask_erosion_mm')
    preproc.connect(inputnode_gm_erosion_mm, 'gm_erosion_mm', process_gm,
                    'inputspec.erosion_mm')
    preproc.connect(inputNode, 'PRIOR_GRAY', process_gm,
                    'inputspec.tissue_prior')
    preproc.connect(segment, ('tissue_class_files', pick_wm_class_1),
                    process_gm, 'inputspec.tissue_class_file')
    preproc.connect(segment, ('probability_maps', pick_wm_prob_1), process_gm,
                    'inputspec.probability_tissue_map')
    preproc.connect(inputNode, 'standard2highres_mat', process_gm,
                    'inputspec.standard2highres_mat')
    preproc.connect(process_gm, 'outputspec.segment_mask', outputNode,
                    'gm_mask')

    return preproc
Exemplo n.º 6
0
                interface=IdentityInterface(fields=['subject_id']))

subjects.iterables = ('subject_id', ['subject1', 'subject2', 'subject3'])

visits = Node(name='visits', interface=IdentityInterface(fields=['visit_id']))

visits.iterables = ('visit_id', ['visit1', 'visit2'])


# merging subjects and visits ids
def merge(subject_id, visit_id):
    return (subject_id, visit_id)


sessions = Node(Function(input_names=["subject_id", "visit_id"],
                         output_names=["pair"],
                         function=merge,
                         name='sessions'),
                name="session")


# for join node: create a list from all elements
def create_list(pair):
    return list(pair)


join_list = JoinNode(Function(input_names=['pair'],
                              output_names=['pairs_list'],
                              function=create_list),
                     name='list',
                     joinsource='subjects',
                     joinfield=['pair'])
def CreateTractographyWorkflow(WFname):
    ###### UTILITY FUNCTIONS #######
    def computeNumberOfSeedsPerVoxel(inputVolume):
        import operator
        import SimpleITK as sitk
        inVol = sitk.ReadImage(inputVolume)
        voxelVolume = reduce(operator.mul, inVol.GetSpacing())
        # 10 seeds per voxel is used when voxel voluem is 8 mm^3.
        seedsPerVoxel = round(voxelVolume * 10 / 8)
        return int(seedsPerVoxel)

    #################################
    TractWF = pe.Workflow(name=WFname)

    inputsSpec = pe.Node(interface=IdentityInterface(
        fields=['DWI_Corrected_Aligned_CS', 'DWIBrainMask']),
                         name='inputsSpec')

    outputsSpec = pe.Node(interface=IdentityInterface(fields=['ukfTracks']),
                          name='outputsSpec')

    ########
    # Before running the UKF, we need to define the number of seeds per voxel
    # based on the voxel volume of the input DWI scan.
    ########

    # Step1: extract B0 from DWI volume
    EXTRACT_B0 = pe.Node(interface=extractNrrdVectorIndex(), name="EXTRACT_B0")
    EXTRACT_B0.inputs.vectorIndex = 0
    EXTRACT_B0.inputs.outputVolume = 'B0_Image.nrrd'
    TractWF.connect(inputsSpec, 'DWI_Corrected_Aligned_CS', EXTRACT_B0,
                    'inputVolume')

    # Step2: Compute number of seeds per voxel
    computeNumberOfSeedsPerVoxelNode = pe.Node(
        interface=Function(function=computeNumberOfSeedsPerVoxel,
                           input_names=['inputVolume'],
                           output_names=['seedsPerVoxel']),
        name="ComputeNumberOfSeedsPerVoxel")
    TractWF.connect(EXTRACT_B0, 'outputVolume',
                    computeNumberOfSeedsPerVoxelNode, 'inputVolume')

    # Step3: UKF Processing
    UKFNode = pe.Node(interface=UKFTractography(), name="UKFRunRecordStates")
    UKFNode.inputs.tracts = "ukfTracts.vtp"
    UKFNode.inputs.numTensor = '2'
    UKFNode.inputs.freeWater = True  ## default False
    UKFNode.inputs.minFA = 0.06
    UKFNode.inputs.minGA = 0.06
    UKFNode.inputs.seedFALimit = 0.06
    UKFNode.inputs.Ql = 70
    UKFNode.inputs.recordLength = 2
    UKFNode.inputs.recordTensors = True
    UKFNode.inputs.recordFreeWater = True
    UKFNode.inputs.recordFA = True
    UKFNode.inputs.recordTrace = True

    TractWF.connect(inputsSpec, 'DWI_Corrected_Aligned_CS', UKFNode, 'dwiFile')
    TractWF.connect(inputsSpec, 'DWIBrainMask', UKFNode, 'maskFile')
    TractWF.connect(computeNumberOfSeedsPerVoxelNode, 'seedsPerVoxel', UKFNode,
                    'seedsPerVoxel')
    TractWF.connect(UKFNode, 'tracts', outputsSpec, 'ukfTracks')

    return TractWF
# smoothing with SUSAN
susan = Node(
    fsl.SUSAN(brightness_threshold=2000.0,
              fwhm=6.0),  # smoothing filter width (6mm, isotropic)
    name='susan')

# masking the fMRI with a brain mask
applymask = Node(fsl.ApplyMask(), name='applymask')

###########
#
# NODE TO CALL EVENT INFO FUNCTION
#
###########
taskevents = Node(interface=Function(
    input_names=['fileEvent'],
    output_names=['subject_info', 'contrast_list'],
    function=TaskEvents),
                  name='taskevents')

###########
#
# SETTING UP THE FIRST LEVEL ANALYSIS NODES
#
###########

# model specification
modelspec = Node(modelgen.SpecifyModel(input_units='secs',
                                       time_repetition=TR,
                                       high_pass_filter_cutoff=100),
                 name="modelspec")
Exemplo n.º 9
0
def dti_artifact_correction(wf_name="dti_artifact_correction"):
    """ Run the diffusion MRI pre-processing workflow against the diff files in `data_dir`.

    It will resample/regrid the diffusion image to have isometric voxels.
    Corrects for head motion correction and Eddy currents.
    Estimates motion outliers and exports motion reports using nipype.algorithms.RapidArt.

    Nipype Inputs
    -------------
    dti_art_input.diff: traits.File
        path to the diffusion MRI image

    dti_art_input.bval: traits.File
        path to the bvals file

    dti_art_input.bvec: traits.File
        path to the bvecs file


    Nipype Outputs
    --------------
    dti_art_output.eddy_corr_file: traits.File
        Eddy currents corrected DTI image.

    dti_art_output.bvec_rotated: traits.File
        Rotated bvecs file

    dti_art_output.brain_mask_1: traits.File
        Brain mask extracted using BET on the first B0 image.

    dti_art_output.brain_mask_2: traits.File
        Brain mask extracted using BET on the average B0 image,
        after motion correction.

    dti_art_output.acpq: traits.File
        Text file with acquisition parameters calculated for Eddy.

    dti_art_output.index: traits.File
        Text file with acquisition indices calculated for Eddy.

    dti_art_output.avg_b0: traits.File
        The average b=0 image extracted from the motion and eddy
        currents correted diffusion MRI.

    dti_art_output.hmc_corr_file: traits.File

    dti_art_output.hmc_corr_bvec: traits.File

    dti_art_output.hmc_corr_xfms: traits.File

    dti_art_output.art_displacement_files: traits.File

    dti_art_output.art_intensity_files: traits.File

    dti_art_output.art_norm_files: traits.File

    dti_art_output.art_outlier_files: traits.File

    dti_art_output.art_plot_files: traits.File

    dti_art_output.art_statistic_files: traits.File

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["diff", "bval", "bvec"]
    out_fields = [
        "eddy_corr_file",
        "bvec_rotated",
        "brain_mask_1",
        "brain_mask_2",
        "acqp",
        "index",
        "avg_b0",
    ]

    do_rapidart = get_config_setting("dmri.artifact_detect", True)
    if do_rapidart:
        out_fields += [
            "hmc_corr_file",
            "hmc_corr_bvec",
            "hmc_corr_xfms",
            "art_displacement_files",
            "art_intensity_files",
            "art_norm_files",
            "art_outlier_files",
            "art_plot_files",
            "art_statistic_files",
        ]

    # input interface
    dti_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="dti_art_input")

    # resample
    resample = setup_node(Function(
        function=reslice,
        input_names=['in_file', 'new_zooms', 'order', 'out_file'],
        output_names=['out_file']),
                          name='dti_reslice')

    ## extract first b0 for Eddy and HMC brain mask
    list_b0 = pe.Node(Function(
        function=b0_indices,
        input_names=['in_bval'],
        output_names=['out_idx'],
    ),
                      name='b0_indices')

    extract_b0 = pe.Node(ExtractROI(t_size=1), name="extract_first_b0")

    # For Eddy, the mask is only used for selecting voxels for the estimation of the hyperparameters,
    # so isn’t very critical.
    # Note also that it is better with a too conservative (small) mask than a too big.
    bet_dwi0 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_pre')

    pick_first = lambda lst: lst[0]

    # motion artifacts detection, requires linear co-registration for motion estimation.
    if do_rapidart:
        # head motion correction
        hmc = hmc_pipeline()

        art = setup_node(rapidart_dti_artifact_detection(),
                         name="detect_artifacts")

    # Eddy
    eddy = setup_node(Eddy(method='jac'), name="eddy")

    ## acquisition parameters for Eddy
    write_acqp = setup_node(Function(
        function=dti_acquisition_parameters,
        input_names=["in_file"],
        output_names=["out_acqp", "out_index"],
    ),
                            name="write_acqp")

    ## rotate b-vecs
    rot_bvec = setup_node(Function(
        function=eddy_rotate_bvecs,
        input_names=["in_bvec", "eddy_params"],
        output_names=["out_file"],
    ),
                          name="rot_bvec")

    ## extract all b0s and average them after Eddy correction
    avg_b0_post = pe.Node(Function(
        function=b0_average,
        input_names=['in_dwi', 'in_bval'],
        output_names=['out_file'],
    ),
                          name='b0_avg_post')

    bet_dwi1 = setup_node(BET(frac=0.3, mask=True, robust=True),
                          name='bet_dwi_post')

    # nlmeans denoise
    apply_nlmeans = get_config_setting("dmri.apply_nlmeans", True)
    if apply_nlmeans:
        nlmeans = setup_node(Function(
            function=nlmeans_denoise,
            input_names=['in_file', 'mask_file', 'out_file', 'N'],
            output_names=['out_file']),
                             name='nlmeans_denoise')

    # output interface
    dti_output = setup_node(IdentityInterface(fields=out_fields),
                            name="dti_art_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # Connect the nodes
    wf.connect([
        # resample to iso-voxel
        (dti_input, resample, [
            ("diff", "in_file"),
        ]),

        # read from input file the acquisition parameters for eddy
        (dti_input, write_acqp, [("diff", "in_file")]),

        # reference mask for hmc and eddy
        (dti_input, list_b0, [("bval", "in_bval")]),
        (resample, extract_b0, [("out_file", "in_file")]),
        (list_b0, extract_b0, [(("out_idx", pick_first), "t_min")]),
        (extract_b0, bet_dwi0, [("roi_file", "in_file")]),

        # Eddy
        (resample, eddy, [("out_file", "in_file")]),
        (bet_dwi0, eddy, [("mask_file", "in_mask")]),
        (dti_input, eddy, [("bval", "in_bval"), ("bvec", "in_bvec")]),
        (write_acqp, eddy, [("out_acqp", "in_acqp"),
                            ("out_index", "in_index")]),

        # rotate bvecs
        (dti_input, rot_bvec, [("bvec", "in_bvec")]),
        (eddy, rot_bvec, [("out_parameter", "eddy_params")]),

        # final avg b0
        (dti_input, avg_b0_post, [("bval", "in_bval")]),
        (eddy, avg_b0_post, [("out_corrected", "in_dwi")]),
        (avg_b0_post, bet_dwi1, [("out_file", "in_file")]),

        # output
        (write_acqp, dti_output, [("out_acqp", "acqp"),
                                  ("out_index", "index")]),
        (bet_dwi0, dti_output, [("mask_file", "brain_mask_1")]),
        (bet_dwi1, dti_output, [("mask_file", "brain_mask_2")]),
        (rot_bvec, dti_output, [("out_file", "bvec_rotated")]),
        (avg_b0_post, dti_output, [("out_file", "avg_b0")]),
    ])

    if apply_nlmeans:
        wf.connect([
            # non-local means
            (eddy, nlmeans, [("out_corrected", "in_file")]),
            (bet_dwi1, nlmeans, [("mask_file", "mask_file")]),

            # output
            (nlmeans, dti_output, [("out_file", "eddy_corr_file")]),
        ])
    else:
        wf.connect([
            # output
            (eddy, dti_output, [("out_corrected", "eddy_corr_file")]),
        ])

    if do_rapidart:
        wf.connect([
            # head motion correction
            (dti_input, hmc, [
                ("bval", "inputnode.in_bval"),
                ("bvec", "inputnode.in_bvec"),
            ]),
            (resample, hmc, [("out_file", "inputnode.in_file")]),
            (bet_dwi0, hmc, [("mask_file", "inputnode.in_mask")]),
            (list_b0, hmc, [
                (("out_idx", pick_first), "inputnode.ref_num"),
            ]),

            # artifact detection
            (hmc, art, [
                ("outputnode.out_file", "realigned_files"),
                ("outputnode.out_xfms", "realignment_parameters"),
            ]),
            (bet_dwi1, art, [
                ("mask_file", "mask_file"),
            ]),

            # output
            (hmc, dti_output, [
                ("outputnode.out_file", "hmc_corr_file"),
                ("outputnode.out_bvec", "hmc_corr_bvec"),
                ("outputnode.out_xfms", "hmc_corr_xfms"),
            ]),
            (art, dti_output, [
                ("displacement_files", "art_displacement_files"),
                ("intensity_files", "art_intensity_files"),
                ("norm_files", "art_norm_files"),
                ("outlier_files", "art_outlier_files"),
                ("plot_files", "art_plot_files"),
                ("statistic_files", "art_statistic_files"),
            ]),
        ])

    return wf
Exemplo n.º 10
0
    lookup = dcmstack.LookupMeta()
    lookup.inputs.meta_keys = {
        'RepetitionTime': 'TR',
        'CsaImage.MosaicRefAcqTimes': 'ST'
    }
    lookup.inputs.in_file = nifti
    lookup.run()
    slicetimes = [int(lookup.result['ST'][0][x])
                  for x in range(0, imdims[2])]  #Converts slice times to ints.
    tr = lookup.result['TR'] / 1000  #Converts tr to seconds.
    ta = tr - (tr / sliceno)
    return (sliceno, slicetimes, tr, ta, mid_slice)


metadata = Node(Function(
    function=metaread,
    input_names=['nifti'],
    output_names=['sliceno', 'slicetimes', 'tr', 'ta', 'mid_slice']),
                name='metadata')

#Outputs: tr, slicetimes, imdims


def voldrop(epi_list):
    """
    Drops volumes > nvols.
    """
    import numpy as np
    import os
    nvols = 140  #<--------See if there's a way to call a variable outside of a function as input for the function (globals)
    vols = len(epi_list)
    if vols > nvols:
def preprocessing_img(caps_directory,
                      output_dir,
                      diagnosis_tsv,
                      smooth_fwhm,
                      working_directory=None,
                      tissue='c1',
                      modality='T1'):
    """
        Run preprocessing steps to cut 3D T1 images after Clinica Spm with a specific size which can be fit into your CNN model.
    Including the steps below:
        1. Get the preprocessed data by Clinica SPM from Jorge, the GM concatenated images(4D) for all the subjects for three groups(AD, CN, MCInc)
        2. Smooth the result GM images with nilearn (nilearn.image.smooth_img) or FSL.smooth with different smoothing kernel to explore if the classification accuracy would improve.
        3. Convert 4D concatenated GM images for three groups to 2D png format with a package med2image(https://github.com/FNNDSC/med2image) to check out the 0-pixel images of the GM images.
        4. Resize the 2D images to specific size according to your specific architectures.

    Note: need to think of the different dimension for all the images, if not the same dimension, should do something
    :return:
    :param caps_directory: the caps directory of T1-spm pipeline of Clinica
    :param output_dir: the image_preprocessing_output directory contain the sliced image
    :param diagnosis_tsv: the tsv contains the subjects that you want to process
    :param smooth_fwhm: the smooth
    :param working_directory:
    :param tissue: by default, c1==gray matter
    :return:
    """

    if working_directory is None:
        working_directory = mkdtemp()
    else:
        working_directory = os.path.abspath(working_directory)

    # Use datagrabber and inputnode to take the input images
    # get the info of 'subject_dir', 'subject_id', 'subject_list', 'session_list' from the tsv file.
    inputnode = pe.Node(name='inputnode',
                        interface=Function(
                            input_names=['output_dir', 'diagnosis_tsv'],
                            output_names=[
                                'subject_dir', 'subject_id', 'subject_list',
                                'session_list'
                            ],
                            function=get_info_from_tsv))
    inputnode.inputs.output_dir = output_dir
    inputnode.inputs.diagnosis_tsv = diagnosis_tsv

    # Node to grab the GM images of ADNI_baseline_t1 of SPM pipeline.
    datagrabbernode = pe.Node(interface=nio.DataGrabber(
        infields=[
            'subject_list', 'session_list', 'subject_repeat', 'session_repeat'
        ],
        outfields=['spm_tissuee']),
                              name="datagrabbernode")
    datagrabbernode.inputs.template = '*'
    datagrabbernode.inputs.base_directory = caps_directory
    datagrabbernode.inputs.field_template = dict(
        spm_tissuee=
        'subjects/%s/%s/t1/spm/dartel/group-ADNIbl/%s_%s_T1w_segm-graymatter_space-Ixi549Space_modulated-on_fwhm-'
        + str(int(smooth_fwhm)) + 'mm_probability.nii.gz')
    datagrabbernode.inputs.template_args = dict(spm_tissuee=[[
        'subject_list', 'session_list', 'subject_repeat', 'session_repeat'
    ]])
    datagrabbernode.inputs.sort_filelist = False

    ## Merge all the GM images to one big 4D image, the contatenated nifti is under output_dir/merged_nifti
    mergenode = pe.Node(name='mergeimgnibebel',
                        interface=Function(
                            input_names=['tissue_img', 'output_dir'],
                            output_names=['out_img_path'],
                            function=merge_img))
    mergenode.inputs.output_dir = output_dir

    # Slice 4D img to 2D jpg
    slicenode = pe.Node(name='nii2png_nibabel',
                        interface=Function(input_names=[
                            'smoothed_img', 'output_dir', 'output_file_stem',
                            'subject_list'
                        ],
                                           output_names=['output_dir'],
                                           function=nii_png_nibabel_scipy))
    slicenode.inputs.output_dir = join(output_dir, 'sliced_image')
    slicenode.inputs.output_file_stem = tissue

    ## Datasink node to grab every useful file that you need
    datasinknode = pe.Node(name='datasinker', interface=nio.DataSink())
    datasinknode.inputs.base_directory = output_dir
    # datasinknode.inputs.container = 'prep_out'

    wf_preprocessing = pe.Workflow(name='preprocessing_nifti2png_2DCNN',
                                   base_dir=working_directory)

    wf_preprocessing.connect(inputnode, 'subject_list', datagrabbernode,
                             'subject_list')
    wf_preprocessing.connect(inputnode, 'session_list', datagrabbernode,
                             'session_list')
    wf_preprocessing.connect(inputnode, 'subject_list', datagrabbernode,
                             'subject_repeat')
    wf_preprocessing.connect(inputnode, 'session_list', datagrabbernode,
                             'session_repeat')
    wf_preprocessing.connect(datagrabbernode, 'spm_tissuee', mergenode,
                             'tissue_img')
    wf_preprocessing.connect(mergenode, 'out_img_path', slicenode,
                             'smoothed_img')
    wf_preprocessing.connect(inputnode, 'subject_list', slicenode,
                             'subject_list')
    wf_preprocessing.connect(mergenode, 'out_img_path', datasinknode,
                             'prep_out')

    return wf_preprocessing
Exemplo n.º 12
0
def create_jim_workflow(config, fixed, warped):
    """
    Inputs::

        config: Dictionary with PBR configuration options. See config.py
        fixed: full path of t1 image from fixed image
        warped: full path of t1 image from warped image

    Outputs::

        nipype.pipeline.engine.Workflow object

    """

    import nipype.interfaces.ants as ants
    from nipype.pipeline.engine import Node, Workflow, MapNode
    from nipype.interfaces.io import DataSink, DataGrabber
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.fsl as fsl
    from nipype.utils.filemanip import load_json
    import os
    from nipype.caching import Memory

    mse_tp1 = get_warped_mseid(warped)
    mse_tp2 = get_fixed_mseid(fixed)
    msid = get_msid(fixed)
    Jim_node = "Jim_substract_{0}_{1}-{2}".format(msid, mse_tp2, mse_tp1)
    register = Workflow(name=Jim_node)
    register.base_dir = config["working_directory"]
    inputnode = Node(IdentityInterface(fields=["fixed_image", "moving_image"]),
                     name="inputspec")
    inputnode.inputs.moving_image = warped
    inputnode.inputs.fixed_image = fixed

    unicorr = Node(Function(input_names=['inputImage', 'outputImage'],
                            output_names=['uni_output'],
                            function=run_unicorr),
                   name='Unicorr')
    unicorr.inputs.outputImage = os.path.join(
        config["working_directory"], Jim_node, 'Unicorr',
        os.path.split(fixed)[1].split('.')[0] + '_corrected.nii.gz')
    register.connect(inputnode, 'fixed_image', unicorr, 'inputImage')

    substract = Node(Function(
        input_names=['var', 'formula', 'image1', 'image2', 'out_image'],
        output_names=['sub_output'],
        function=run_algebra),
                     name='Algebra')
    substract.inputs.out_image = os.path.join(config["working_directory"],
                                              Jim_node, 'Algebra',
                                              'fixed-warped.hdr')
    substract.inputs.var = "I1,I2"
    substract.inputs.formula = "I1-I2"
    register.connect(unicorr, 'uni_output', substract, 'image1')
    register.connect(inputnode, 'moving_image', substract, 'image2')

    sinker = Node(DataSink(), name="sinker")
    sinker.inputs.base_directory = os.path.join(config["james_output_dir"],
                                                msid, 'affine_substraction')
    sinker.inputs.container = mse_tp2 + '-' + mse_tp1

    register.connect(unicorr, 'uni_output', sinker, '@corrected')
    register.connect(substract, 'sub_output', sinker, '@substracted')

    register.write_graph(graph2use='orig')
    register.config["Execution"] = {
        "keep_inputs": True,
        "remove_unnecessary_outputs": False
    }

    #Memory.clear_previous_runs(register, warn=True)
    return register
Exemplo n.º 13
0
def spm_mrpet_preprocessing(wf_name="spm_mrpet_preproc"):
    """ Run the PET pre-processing workflow against the
    gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this
    has not been run, this function will run it too.

    # TODO: organize the anat2pet hack/condition somehow:
    If anat2pet:
    - SPM12 Coregister T1 and tissues to PET
    - PVC the PET image in PET space
    - SPM12 Warp PET to MNI
    else:
    - SPM12 Coregister PET to T1
    - PVC the PET image in anatomical space
    - SPM12 Warp PET in anatomical space to MNI through the
    `anat_to_mni_warp`.

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the
        anatomical image in its native space.

    pet_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process.
        At least the first 3 tissues must be present.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pet_output.pvc_warped: existing file
        Results from PETPVC normalized to MNI.
        The result of every internal pre-processing step
        is normalized to MNI here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files

    pet_output.gm_norm: existing file
        The output of the grey matter intensity
        normalization process.
        This is the last step in the PET signal correction,
        before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and
        `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = [
        "in_file",
        "anat",
        "anat_to_mni_warp",
        "tissues",
    ]

    out_fields = [
        "brain_mask",
        "coreg_others",
        "coreg_ref",
        "pvc_warped",
        "pet_warped",  # 'pet_warped' is a dummy entry to keep the fields pattern.
        "warp_field",
        "pvc_out",
        "pvc_mask",
        "gm_norm",
    ]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_pet"]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    merge_list = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    warp_pet = setup_node(spm_normalize(), name="warp_pet")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (petpvc, warp_pet, [("pvc_output.coreg_ref", "image_to_align")]),
            (gunzipper, warp_pet, [("out_file", "apply_to_files")]),
            (tpm_bbox, warp_pet, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (warp_pet, pet_output, [("normalized_files", "pvc_warped"),
                                    ("deformation_field", "warp_field")]),
        ])
    else:  # PET 2 ANAT
        collector = setup_node(Merge(2), name='merge_for_warp')
        apply_warp = setup_node(spm_apply_deformations(), name="warp_pet")

        wf.connect([
            # inputs
            (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                                 ("anat", "pvc_input.reference_file"),
                                 ("tissues", "pvc_input.tissues")]),

            # gunzip some files for SPM Normalize
            (petpvc, merge_list, [("pvc_output.pvc_out", "in1"),
                                  ("pvc_output.brain_mask", "in2"),
                                  ("pvc_output.gm_norm", "in3")]),
            (pet_input, merge_list, [("in_file", "in4")]),
            (merge_list, gunzipper, [("out", "in_file")]),

            # warp the PET PVCed to MNI
            (gunzipper, collector, [("out_file", "in1")]),
            (petpvc, collector, [("pvc_output.coreg_ref", "in2")]),
            (pet_input, apply_warp, [("anat_to_mni_warp", "deformation_file")
                                     ]),
            (collector, apply_warp, [("out", "apply_to_files")]),
            (tpm_bbox, apply_warp, [("bbox", "write_bounding_box")]),

            # output
            (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                                  ("pvc_output.brain_mask", "brain_mask"),
                                  ("pvc_output.petpvc_mask", "petpvc_mask"),
                                  ("pvc_output.coreg_ref", "coreg_ref"),
                                  ("pvc_output.coreg_others", "coreg_others"),
                                  ("pvc_output.gm_norm", "gm_norm")]),

            # output
            (apply_warp, pet_output, [("normalized_files", "pvc_warped"),
                                      ("deformation_field", "warp_field")]),
        ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (pet_input, coreg_atlas, [("anat", "source")]),
            (petpvc, coreg_atlas, [("pvc_output.coreg_ref", "target")]),
            (pet_input, coreg_atlas, [("atlas_anat", "apply_to_files")]),
            (coreg_atlas, pet_output, [("coregistered_files", "atlas_pet")]),
        ])

    return wf
Exemplo n.º 14
0
def spm_mrpet_grouptemplate_preprocessing(
        wf_name="spm_mrpet_grouptemplate_preproc"):
    """ Run the PET pre-processing workflow against the gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this has not been run, this function
    will run it too.

    This is identical to the workflow defined in `spm_mrpet_preprocessing`,
    with the only difference that we now normalize all subjects agains a custom
    template using the spm Old Normalize interface.

    It does:
    - SPM12 Coregister T1 and tissues to PET
    - PVC the PET image in PET space
    - SPM12 Warp PET to the given template

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the anatomical image in its native space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process. At least the first
        3 tissues must be present.

    pet_input.pet_template: traits.File
        The template file for inter-subject registration reference.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process.

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files.

    pet_output.pet_warped: existing file
        PET image normalized to the group template.

    pet_output.pvc_warped: existing file
        The outputs of the PETPVC workflow normalized to the group template.
        The result of every internal pre-processing step is normalized to the
        group template here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files.

    pet_output.gm_norm: existing file
        The output of the grey matter intensity normalization process.
        This is the last step in the PET signal correction, before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = ["in_file", "anat", "tissues", "pet_template"]

    out_fields = [
        "brain_mask",
        "coreg_others",
        "coreg_ref",
        "pvc_warped",
        "pet_warped",
        "warp_field",
        "pvc_out",
        "pvc_mask",
        "gm_norm",
    ]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_anat"]
        out_fields += ["atlas_pet"]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    unzip_mrg = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # warp each subject to the group template
    gunzip_template = setup_node(
        Gunzip(),
        name="gunzip_template",
    )
    gunzip_pet = setup_node(
        Gunzip(),
        name="gunzip_pet",
    )

    warp_mrg = setup_node(Merge(2), name='merge_for_warp')
    warp2template = setup_node(
        spm.Normalize(jobtype="estwrite", out_prefix="wgrptemplate_"),
        name="warp2template",
    )

    get_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="get_bbox")

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # inputs
        (pet_input, petpvc, [("in_file", "pvc_input.in_file"),
                             ("anat", "pvc_input.reference_file"),
                             ("tissues", "pvc_input.tissues")]),

        # get template bounding box to apply to results
        (pet_input, get_bbox, [("pet_template", "in_file")]),

        # gunzip some inputs
        (pet_input, gunzip_pet, [("in_file", "in_file")]),
        (pet_input, gunzip_template, [("pet_template", "in_file")]),

        # gunzip some files for SPM Normalize
        (petpvc, unzip_mrg, [("pvc_output.pvc_out", "in1"),
                             ("pvc_output.brain_mask", "in2"),
                             ("pvc_output.gm_norm", "in3")]),
        (pet_input, unzip_mrg, [("in_file", "in4")]),
        (unzip_mrg, gunzipper, [("out", "in_file")]),
        (gunzipper, warp_mrg, [("out_file", "in1")]),
        (warp_mrg, warp2template, [(("out", flatten_list), "apply_to_files")]),

        # prepare the target parameters of the warp to template
        (gunzip_pet, warp2template, [("out_file", "source")]),
        (gunzip_template, warp2template, [("out_file", "template")]),
        (get_bbox, warp2template, [("bbox", "write_bounding_box")]),

        # output
        (warp2template, pet_output, [
            ("normalization_parameters", "warp_field"),
            ("normalized_files", "pvc_warped"),
            ("normalized_source", "pet_warped"),
        ]),

        # output
        (petpvc, pet_output, [("pvc_output.pvc_out", "pvc_out"),
                              ("pvc_output.brain_mask", "brain_mask"),
                              ("pvc_output.coreg_ref", "coreg_ref"),
                              ("pvc_output.coreg_others", "coreg_others"),
                              ("pvc_output.gm_norm", "gm_norm")]),
    ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"),
                                 name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
            (pet_input, coreg_atlas, [("anat", "source")]),
            (petpvc, coreg_atlas, [("pvc_output.coreg_ref", "target")]),
            (pet_input, coreg_atlas, [("atlas_anat", "apply_to_files")]),
            (coreg_atlas, pet_output, [("coregistered_files", "atlas_pet")]),

            # warp the atlas to the template space as well
            (coreg_atlas, warp_mrg, [("coregistered_files", "in2")]),
        ])

    return wf
Exemplo n.º 15
0
def spm_warp_fmri_wf(wf_name="spm_warp_fmri", register_to_grptemplate=False):
    """ Run SPM to warp resting-state fMRI pre-processed data to MNI or a given template.

    Tasks:
    - Warping the inputs to MNI or a template, if `do_group_template` is True

    Parameters
    ----------
    wf_name: str

    register_to_grptemplate: bool
        If True will expect the wfmri_input.epi_template input and use it as a group template
        for inter-subject registratio.

    Nipype Inputs
    -------------
    wfmri_input.in_file: traits.File
        The slice time and motion corrected fMRI file.

    wfmri_input.reference_file: traits.File
        The anatomical image in its native space. For registration reference.

    wfmri_input.anat_fmri: traits.File
        The anatomical image in fMRI space.

    wfmri_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the anatomical image to the standard MNI space.

    wfmri_input.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    wfmri_input.avg_epi: traits.File
        The average EPI from the fMRI file.

    wfmri_input.epi_template: traits.File
        Reference EPI template file for inter subject registration.
        If `do_group_template` is True you must specify this input.

    wfmri_input.brain_mask: traits.File
        Brain mask in fMRI space.

    Nipype Outputs
    --------------
    wfmri_output.warped_fmri: traits.File
        The slice time, motion, and nuisance corrected fMRI file registered to the template.

    wfmri_output.wtime_filtered: traits.File
        The bandpass time filtered fMRI file registered to the template.

    wfmri_output.smooth: traits.File
        The smooth bandpass time filtered fMRI file registered to the template.

    wfmri_output.wavg_epi: traits.File
        The average EPI from the fMRI file registered to the template.

    wfmri_output.warp_field: traits.File
        The fMRI to template warp field.

    wfmri_output.coreg_avg_epi: traits.File
        The average EPI image in anatomical space.
        Only if registration.anat2fmri is false

    wfmri_output.wbrain_mask: traits.File
        Brain mask in fMRI space warped to MNI.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat_fmri",
        "anat_to_mni_warp",
        "brain_mask",
        "reference_file",
        "time_filtered",
        "avg_epi",
    ]

    out_fields = [
        "warped_fmri", "wtime_filtered", "smooth", "wavg_epi", "wbrain_mask",
        "warp_field", "coreg_avg_epi"
    ]

    if register_to_grptemplate:
        in_fields += ['epi_template']

    # input identities
    wfmri_input = setup_node(IdentityInterface(fields=in_fields,
                                               mandatory_inputs=True),
                             name="wfmri_input")

    # in file unzipper
    in_gunzip = pe.Node(Gunzip(), name="in_gunzip")

    # merge list for normalization input
    merge_list = pe.Node(Merge(2), name='merge_for_warp')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # the template bounding box
    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")

    # smooth
    # smooth = setup_node(Function(function=smooth_img,
    #                              input_names=["in_file", "fwhm"],
    #                              output_names=["out_file"],
    #                              imports=['from pypes.interfaces.nilearn import ni2file']),
    #                      name="smooth_fmri")
    # smooth.inputs.fwhm = get_config_setting('fmri_smooth.fwhm', default=8)
    # smooth.inputs.out_file = "smooth_{}.nii.gz".format(wf_name)
    smooth = setup_node(fsl.IsotropicSmooth(fwhm=8, output_type='NIFTI'),
                        name="smooth_fmri")

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields),
                             name="wfmri_output")

    # check how to perform the registration, to decide how to build the pipeline
    anat2fmri = get_config_setting('registration.anat2fmri', False)
    # register to group template
    if register_to_grptemplate:
        gunzip_template = pe.Node(
            Gunzip(),
            name="gunzip_template",
        )
        warp = setup_node(
            spm.Normalize(jobtype="estwrite", out_prefix="wgrptmpl_"),
            name="fmri_grptemplate_warp",
        )
        warp_source_arg = "source"
        warp_outsource_arg = "normalized_source"
        warp_field_arg = "normalization_parameters"

    elif anat2fmri:
        # register to standard template
        warp = setup_node(spm_normalize(), name="fmri_warp")
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()
        warp_source_arg = "image_to_align"
        warp_outsource_arg = "normalized_image"
        warp_field_arg = "deformation_field"

        # wf.connect([
        #             # warp source file
        #             (wfmri_input, warp,   [("anat_fmri",  warp_source_arg)]),
        #            ])
    else:  # anat2fmri is False
        coreg = setup_node(spm_coregister(cost_function="mi"),
                           name="coreg_fmri")
        warp = setup_node(spm_apply_deformations(), name="apply_warp_fmri")
        coreg_files = pe.Node(Merge(3), name='merge_for_coreg')
        warp_files = pe.Node(Merge(2), name='merge_for_warp')
        tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    if register_to_grptemplate:
        wf.connect([
            # unzip and forward the template file
            (wfmri_input, gunzip_template, [("epi_template", "in_file")]),
            (gunzip_template, warp, [("out_file", "template")]),

            # get template bounding box to apply to results
            (wfmri_input, tpm_bbox, [("epi_template", "in_file")]),
        ])

    if anat2fmri or register_to_grptemplate:
        # prepare the inputs
        wf.connect([
            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),

            # warp source file
            (in_gunzip, warp, [("out_file", warp_source_arg)]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # merge the other input files into a list
            (wfmri_input, merge_list, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
            ]),

            # gunzip them for SPM
            (merge_list, gunzipper, [("out", "in_file")]),

            # apply to files
            (gunzipper, warp, [("out_file", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                (warp_field_arg, "warp_field"),
                (warp_outsource_arg, "wavg_epi"),
            ]),
        ])

    else:  # FMRI to ANAT
        wf.connect([
            (wfmri_input, coreg, [("reference_file", "target")]),

            # unzip the in_file input file
            (wfmri_input, in_gunzip, [("avg_epi", "in_file")]),
            (in_gunzip, coreg, [("out_file", "source")]),

            # merge the other input files into a list
            (wfmri_input, coreg_files, [
                ("in_file", "in1"),
                ("time_filtered", "in2"),
                ("brain_mask", "in3"),
            ]),

            # gunzip them for SPM
            (coreg_files, gunzipper, [("out", "in_file")]),

            # coregister fmri to anat
            (gunzipper, coreg, [("out_file", "apply_to_files")]),

            # anat to mni warp field
            (wfmri_input, warp, [("anat_to_mni_warp", "deformation_file")]),

            # bounding box
            (tpm_bbox, warp, [("bbox", "write_bounding_box")]),

            # apply to files
            (coreg, warp_files, [("coregistered_source", "in1")]),
            (coreg, warp_files, [("coregistered_files", "in2")]),
            (warp_files, warp, [("out", "apply_to_files")]),

            # outputs
            (warp, rest_output, [
                ("normalized_files", "warped_files"),
            ]),
            (warp, rest_output, [
                (("normalized_files", selectindex, [0]), "wavg_epi"),
            ]),
            (coreg, rest_output, [("coregistered_source", "coreg_avg_epi")]),
            #(coreg, rest_output, [("coregistered_files",  "coreg_others")]),
        ])

    # smooth and sink
    wf.connect([
        # smooth the final bandpassed image
        (warp, smooth, [(("normalized_files", selectindex, [1]), "in_file")]),

        # output
        (smooth, rest_output, [("out_file", "smooth")]),
        (warp, rest_output, [
            (("normalized_files", selectindex, [0]), "warped_fmri"),
            (("normalized_files", selectindex, [1]), "wtime_filtered"),
        ]),
    ])

    return wf
Exemplo n.º 16
0
def _template_runner(argv, environment, experiment, pipeline_options, cluster):
    print("Getting subjects from database...")
    # subjects = argv["--subjects"].split(',')
    subjects, subjects_sessions_dictionary = get_subjects_sessions_dictionary(
        argv['SUBJECTS'], experiment['cachedir'], experiment['resultdir'],
        environment['prefix'], experiment['dbfile'], argv['--use-sentinal'],
        argv['--use-shuffle'])  # Build database before parallel section
    useSentinal = argv['--use-sentinal']

    # Quick preliminary sanity check
    for thisSubject in subjects:
        if len(subjects_sessions_dictionary[thisSubject]) == 0:
            print(
                "ERROR: subject {0} has no sessions found.  Did you supply a valid subject id on the command line?"
                .format(thisSubject))
            sys.exit(-1)

    for thisSubject in subjects:
        print("Processing atlas generation for this subject: {0}".format(
            thisSubject))
        print("=" * 80)
        print(
            "Copying Atlas directory and determining appropriate Nipype options..."
        )
        subj_pipeline_options = nipype_options(
            argv, pipeline_options, cluster, experiment,
            environment)  # Generate Nipype options
        print("Dispatching jobs to the system...")
        ######
        ###### Now start workflow construction
        ######
        # Set universal pipeline options
        nipype_config.update_config(subj_pipeline_options)

        ready_for_template_building = True
        for thisSession in subjects_sessions_dictionary[thisSubject]:
            path_test = os.path.join(
                experiment['previousresult'],
                '*/{0}/{1}/TissueClassify/t1_average_BRAINSABC.nii.gz'.format(
                    thisSubject, thisSession))
            t1_file_result = glob.glob(path_test)
            if len(t1_file_result) != 1:
                print(
                    "Incorrect number of t1 images found for data grabber {0}".
                    format(t1_file_result))
                print("     at path {0}".format(path_test))
                ready_for_template_building = False
        if not ready_for_template_building:
            print("TEMPORARY SKIPPING:  Not ready to process {0}".format(
                thisSubject))
            continue

        base_output_directory = os.path.join(
            subj_pipeline_options['logging']['log_directory'], thisSubject)
        template = pe.Workflow(name='SubjectAtlas_Template_' + thisSubject)
        template.base_dir = base_output_directory

        subjectNode = pe.Node(interface=IdentityInterface(fields=['subject']),
                              run_without_submitting=True,
                              name='99_subjectIterator')
        subjectNode.inputs.subject = thisSubject

        sessionsExtractorNode = pe.Node(Function(
            function=getSessionsFromSubjectDictionary,
            input_names=['subject_session_dictionary', 'subject'],
            output_names=['sessions']),
                                        run_without_submitting=True,
                                        name="99_sessionsExtractor")
        sessionsExtractorNode.inputs.subject_session_dictionary = subjects_sessions_dictionary

        baselineOptionalDG = pe.MapNode(nio.DataGrabber(
            infields=['subject', 'session'],
            outfields=['t2_average', 'pd_average', 'fl_average'],
            run_without_submitting=True),
                                        run_without_submitting=True,
                                        iterfield=['session'],
                                        name='BaselineOptional_DG')

        baselineOptionalDG.inputs.base_directory = experiment['previousresult']
        baselineOptionalDG.inputs.sort_filelist = True
        baselineOptionalDG.inputs.raise_on_empty = False
        baselineOptionalDG.inputs.template = '*'

        baselineOptionalDG.inputs.field_template = {
            't2_average': '*/%s/%s/TissueClassify/t2_average_BRAINSABC.nii.gz',
            'pd_average': '*/%s/%s/TissueClassify/pd_average_BRAINSABC.nii.gz',
            'fl_average': '*/%s/%s/TissueClassify/fl_average_BRAINSABC.nii.gz'
        }
        baselineOptionalDG.inputs.template_args = {
            't2_average': [['subject', 'session']],
            'pd_average': [['subject', 'session']],
            'fl_average': [['subject', 'session']]
        }

        baselineRequiredDG = pe.MapNode(nio.DataGrabber(
            infields=['subject', 'session'],
            outfields=[
                't1_average', 'brainMaskLabels', 'posteriorImages',
                'passive_intensities', 'passive_masks',
                'BCD_ACPC_Landmarks_fcsv'
            ],
            run_without_submitting=True),
                                        run_without_submitting=True,
                                        iterfield=['session'],
                                        name='Baseline_DG')

        baselineRequiredDG.inputs.base_directory = experiment['previousresult']
        baselineRequiredDG.inputs.sort_filelist = True
        baselineRequiredDG.inputs.raise_on_empty = True
        baselineRequiredDG.inputs.template = '*'
        posterior_files = [
            'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
            'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB',
            'WM'
        ]
        passive_intensities_files = [
            'rho.nii.gz', 'phi.nii.gz', 'theta.nii.gz',
            'l_thalamus_ProbabilityMap.nii.gz',
            'r_accumben_ProbabilityMap.nii.gz',
            'l_globus_ProbabilityMap.nii.gz',
            'l_accumben_ProbabilityMap.nii.gz',
            'l_caudate_ProbabilityMap.nii.gz',
            'l_putamen_ProbabilityMap.nii.gz',
            'r_thalamus_ProbabilityMap.nii.gz',
            'r_putamen_ProbabilityMap.nii.gz',
            'r_caudate_ProbabilityMap.nii.gz',
            'r_hippocampus_ProbabilityMap.nii.gz',
            'r_globus_ProbabilityMap.nii.gz',
            'l_hippocampus_ProbabilityMap.nii.gz'
        ]
        passive_mask_files = [
            'template_WMPM2_labels.nii.gz', 'hncma_atlas.nii.gz',
            'template_nac_labels.nii.gz', 'template_leftHemisphere.nii.gz',
            'template_rightHemisphere.nii.gz', 'template_ventricles.nii.gz',
            'template_headregion.nii.gz'
        ]

        baselineRequiredDG.inputs.field_template = {
            't1_average': '*/%s/%s/TissueClassify/t1_average_BRAINSABC.nii.gz',
            'brainMaskLabels':
            '*/%s/%s/TissueClassify/complete_brainlabels_seg.nii.gz',
            'BCD_ACPC_Landmarks_fcsv':
            '*/%s/%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv',
            'posteriorImages': '*/%s/%s/TissueClassify/POSTERIOR_%s.nii.gz',
            'passive_intensities': '*/%s/%s/WarpedAtlas2Subject/%s',
            'passive_masks': '*/%s/%s/WarpedAtlas2Subject/%s',
        }
        baselineRequiredDG.inputs.template_args = {
            't1_average': [['subject', 'session']],
            'brainMaskLabels': [['subject', 'session']],
            'BCD_ACPC_Landmarks_fcsv': [['subject', 'session']],
            'posteriorImages': [['subject', 'session', posterior_files]],
            'passive_intensities':
            [['subject', 'session', passive_intensities_files]],
            'passive_masks': [['subject', 'session', passive_mask_files]]
        }

        MergeByExtendListElementsNode = pe.Node(
            Function(function=MergeByExtendListElements,
                     input_names=[
                         't1s', 't2s', 'pds', 'fls', 'labels', 'posteriors',
                         'passive_intensities', 'passive_masks'
                     ],
                     output_names=[
                         'ListOfImagesDictionaries', 'registrationImageTypes',
                         'interpolationMapping'
                     ]),
            run_without_submitting=True,
            name="99_MergeByExtendListElements")

        template.connect([
            (subjectNode, baselineRequiredDG, [('subject', 'subject')]),
            (subjectNode, baselineOptionalDG, [('subject', 'subject')]),
            (subjectNode, sessionsExtractorNode, [('subject', 'subject')]),
            (sessionsExtractorNode, baselineRequiredDG, [('sessions',
                                                          'session')]),
            (sessionsExtractorNode, baselineOptionalDG, [('sessions',
                                                          'session')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [('t1_average', 't1s'), ('brainMaskLabels', 'labels'),
              (('posteriorImages',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'posteriors')]),
            (baselineOptionalDG, MergeByExtendListElementsNode,
             [('t2_average', 't2s'), ('pd_average', 'pds'),
              ('fl_average', 'fls')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [(('passive_intensities',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'passive_intensities')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [(('passive_masks',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'passive_masks')])
        ])

        myInitAvgWF = pe.Node(
            interface=ants.AverageImages(),
            name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
        myInitAvgWF.inputs.dimension = 3
        myInitAvgWF.inputs.normalize = True
        myInitAvgWF.inputs.num_threads = -1
        template.connect(baselineRequiredDG, 't1_average', myInitAvgWF,
                         "images")
        ####################################################################################################
        # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
        # if numSessions == 1:
        #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
        ####################################################################################################
        CLUSTER_QUEUE = cluster['queue']
        CLUSTER_QUEUE_LONG = cluster['long_q']
        buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF(
            'iteration01', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG)
        # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
        buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF(
            'Iteration02', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG)

        CreateAtlasXMLAndCleanedDeformedAveragesNode = pe.Node(
            interface=Function(
                function=CreateAtlasXMLAndCleanedDeformedAverages,
                input_names=[
                    't1_image', 'deformed_list', 'AtlasTemplate',
                    'outDefinition'
                ],
                output_names=['outAtlasFullPath', 'clean_deformed_list']),
            # This is a lot of work, so submit it run_without_submitting=True,
            run_without_submitting=
            True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
            name='99_CreateAtlasXMLAndCleanedDeformedAverages')

        if subj_pipeline_options['plugin_name'].startswith(
                'SGE'
        ):  # for some nodes, the qsub call needs to be modified on the cluster

            CreateAtlasXMLAndCleanedDeformedAveragesNode.plugin_args = {
                'template': subj_pipeline_options['plugin_args']['template'],
                'qsub_args': modify_qsub_args(cluster['queue'], 1, 1, 1),
                'overwrite': True
            }
            for bt in [buildTemplateIteration1, buildTemplateIteration2]:
                BeginANTS = bt.get_node("BeginANTS")
                BeginANTS.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 7, 4, 16)
                }
                wimtdeformed = bt.get_node("wimtdeformed")
                wimtdeformed.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 2)
                }

                #AvgAffineTransform = bt.get_node("AvgAffineTransform")
                #AvgAffineTransform.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                #                                  'qsub_args': modify_qsub_args(cluster['queue'], 2, 1, 1)}

                wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
                wimtPassivedeformed.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 4)
                }

        # Running off previous baseline experiment
        NACCommonAtlas = MakeAtlasNode(
            experiment['atlascache'], 'NACCommonAtlas_{0}'.format('subject'),
            ['S_BRAINSABCSupport']
        )  ## HACK : replace 'subject' with subject id once this is a loop rather than an iterable.
        template.connect([
            (myInitAvgWF, buildTemplateIteration1,
             [('output_average_image', 'inputspec.fixed_image')]),
            (MergeByExtendListElementsNode, buildTemplateIteration1,
             [('ListOfImagesDictionaries',
               'inputspec.ListOfImagesDictionaries'),
              ('registrationImageTypes', 'inputspec.registrationImageTypes'),
              ('interpolationMapping', 'inputspec.interpolationMapping')]),
            (buildTemplateIteration1, buildTemplateIteration2,
             [('outputspec.template', 'inputspec.fixed_image')]),
            (MergeByExtendListElementsNode, buildTemplateIteration2,
             [('ListOfImagesDictionaries',
               'inputspec.ListOfImagesDictionaries'),
              ('registrationImageTypes', 'inputspec.registrationImageTypes'),
              ('interpolationMapping', 'inputspec.interpolationMapping')]),
            (subjectNode, CreateAtlasXMLAndCleanedDeformedAveragesNode,
             [(('subject', xml_filename), 'outDefinition')]),
            (NACCommonAtlas, CreateAtlasXMLAndCleanedDeformedAveragesNode,
             [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]),
            (buildTemplateIteration2,
             CreateAtlasXMLAndCleanedDeformedAveragesNode, [
                 ('outputspec.template', 't1_image'),
                 ('outputspec.passive_deformed_templates', 'deformed_list')
             ]),
        ])

        ## Genearate an average lmks file.
        myAverageLmk = pe.Node(interface=GenerateAverageLmkFile(),
                               name="myAverageLmk")
        myAverageLmk.inputs.outputLandmarkFile = "AVG_LMKS.fcsv"
        template.connect(baselineRequiredDG, 'BCD_ACPC_Landmarks_fcsv',
                         myAverageLmk, 'inputLandmarkFiles')

        # Create DataSinks
        SubjectAtlas_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
        SubjectAtlas_DataSink.overwrite = subj_pipeline_options['ds_overwrite']
        SubjectAtlas_DataSink.inputs.base_directory = experiment['resultdir']

        template.connect([
            (subjectNode, SubjectAtlas_DataSink, [('subject', 'container')]),
            (CreateAtlasXMLAndCleanedDeformedAveragesNode,
             SubjectAtlas_DataSink, [('outAtlasFullPath', 'Atlas.@definitions')
                                     ]),
            (CreateAtlasXMLAndCleanedDeformedAveragesNode,
             SubjectAtlas_DataSink, [('clean_deformed_list',
                                      'Atlas.@passive_deformed_templates')]),
            (subjectNode, SubjectAtlas_DataSink, [(('subject', outputPattern),
                                                   'regexp_substitutions')]),
            (buildTemplateIteration2, SubjectAtlas_DataSink,
             [('outputspec.template', 'Atlas.@template')]),
            (myAverageLmk, SubjectAtlas_DataSink,
             [('outputLandmarkFile', 'Atlas.@outputLandmarkFile')]),
        ])

        dotfilename = argv['--dotfilename']
        if dotfilename is not None:
            print("WARNING: Printing workflow, but not running pipeline")
            print_workflow(template,
                           plugin=subj_pipeline_options['plugin_name'],
                           dotfilename=dotfilename)
        else:
            run_workflow(template,
                         plugin=subj_pipeline_options['plugin_name'],
                         plugin_args=subj_pipeline_options['plugin_args'])
Exemplo n.º 17
0
    def __init__(self, experiment_dir, output_dir, func_source, struct_source,
                 datasink):
        self.experiment_dir = experiment_dir
        self.output_dir = output_dir

        # specify input and output nodes
        self.func_source = func_source
        self.struct_source = struct_source
        self.datasink = datasink

        # specify nodes
        # structual process
        self.refit_struct = pe.Node(interface=afni.Refit(),
                                    name='de_oblique_struct')
        self.refit_struct.inputs.deoblique = True

        self.resample_struct = pe.Node(interface=afni.Resample(),
                                       name='re_orientation_struct')
        self.resample_struct.inputs.orientation = 'RPI'
        self.resample_struct.inputs.outputtype = "NIFTI"

        self.bet_struct = pe.Node(interface=fsl.BET(),
                                  name='non_brain_removal_BET_struct')
        self.bet_struct.inputs.output_type = "NIFTI"

        # functional process
        self.refit_func = pe.Node(interface=afni.Refit(),
                                  name='de_oblique_func')
        self.refit_func.inputs.deoblique = True

        self.resample_func = pe.Node(interface=afni.Resample(),
                                     name='re_orientation_func')
        self.resample_func.inputs.orientation = 'RPI'
        self.resample_func.inputs.outputtype = "NIFTI"

        self.slice_timer = pe.Node(interface=fsl.SliceTimer(),
                                   name='time_slice_correction')

        self.mcflirt = pe.Node(interface=fsl.MCFLIRT(),
                               name='motion_correction')
        self.mcflirt.inputs.output_type = "NIFTI"
        self.mcflirt.inputs.mean_vol = True

        self.fslsplit = pe.Node(interface=fsl.Split(), name='fslsplit')
        self.fslsplit.inputs.dimension = 't'
        self.fslsplit.inputs.output_type = "NIFTI"

        self.fslmerge = pe.Node(interface=fsl.Merge(), name='fslmerge')
        self.fslmerge.inputs.dimension = 't'
        self.fslmerge.inputs.output_type = "NIFTI"

        self.bet_mean = pe.Node(interface=fsl.BET(),
                                name='non_brain_removal_BET_mean')
        self.bet_mean.inputs.output_type = "NIFTI"

        # helper function(s)
        def bet_each(in_files):
            '''
            @param in_files: list of image files
            @return out_files: list of image files after applied fsl.BET on it
            '''
            from nipype.interfaces import fsl
            import nipype.pipeline.engine as pe

            out_files = list()
            step_no = 0
            for file_ in in_files:
                bet = pe.Node(interface=fsl.BET(),
                              name='BET_for_step_{}'.format(step_no))
                bet.inputs.in_file = file_
                bet.inputs.out_file = file_[:len(file_) - 4] + '_bet.nii'
                bet.inputs.output_type = "NIFTI"

                bet.run()
                out_files.append(bet.inputs.out_file)

                step_no += 1
            return out_files

        # bet_func return a list of NIFITI files
        self.bet_func = pe.Node(interface=Function(input_names=['in_files'],
                                                   output_names=['out_files'],
                                                   function=bet_each),
                                name='non_brain_removal_BET_func')

        self.coregister = pe.Node(interface=spm.Coregister(),
                                  name="coregister")
        self.coregister.inputs.jobtype = 'estimate'

        self.segment = pe.Node(interface=spm.Segment(), name="segment")
        self.segment.inputs.affine_regularization = 'mni'

        self.normalize = pe.Node(interface=spm.Normalize(), name="normalize")
        self.normalize.inputs.jobtype = "write"

        # self.fourier = pe.Node(interface=afni.Fourier(), name='temporal_filtering')
        # self.fourier.inputs.highpass = 0.01
        # self.fourier.inputs.lowpass = 0.1

        self.smooth = pe.Node(interface=spm.Smooth(), name="smooth")
        self.smooth.inputs.fwhm = [8, 8, 8]

        # specify workflow instance
        self.workflow = pe.Workflow(name='FuNP_workflow')

        # connect nodes
        self.workflow.connect([
            (self.struct_source, self.refit_struct, [('outfiles', 'in_file')]),
            (self.refit_struct, self.resample_struct, [('out_file', 'in_file')
                                                       ]),
            (self.resample_struct, self.bet_struct, [('out_file', 'in_file')]),
            # (self.func_source, self.refit_func, [('outfiles', 'in_file')]),
            # (self.refit_func, self.resample_func, [('out_file', 'in_file')]),
            # (self.resample_func, self.slice_timer, [('out_file', 'in_file')]),
            (self.func_source, self.slice_timer, [('outfiles', 'in_file')]),
            (self.slice_timer, self.mcflirt, [('slice_time_corrected_file',
                                               'in_file')]),
            (self.mcflirt, self.bet_mean, [('mean_img', 'in_file')]),
            (self.mcflirt, self.fslsplit, [('out_file', 'in_file')]),
            (self.fslsplit, self.bet_func, [('out_files', 'in_files')]),
            (self.bet_func, self.fslmerge, [('out_files', 'in_files')
                                            ]),  # intersect
            (self.bet_struct, self.coregister, [('out_file', 'source')]),
            (self.bet_mean, self.coregister, [('out_file', 'target')]),
            (self.coregister, self.segment, [('coregistered_source', 'data')]),
            (self.segment, self.normalize, [('transformation_mat',
                                             'parameter_file')]),
            (self.fslmerge, self.normalize, [('merged_file', 'apply_to_files')
                                             ]),
            (self.normalize, self.smooth, [('normalized_files', 'in_files')]),
            (self.coregister, self.datasink, [('coregistered_source',
                                               'registered_file')]),
            (self.normalize, self.datasink, [('normalized_files',
                                              'before_smooth')]),
            (self.smooth, self.datasink, [('smoothed_files', 'final_out')])
        ])
Exemplo n.º 18
0
def create_bold_wholebrain_fir_workflow(analysis_info, name='wb_roi'):
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function, Merge, IdentityInterface
    from nipype.interfaces.io import SelectFiles, DataSink

    from utils.bold_wholebrain_fir import BOLD_FIR_files
    from spynoza.nodes.utils import pickfirst

    imports = [
        'from utils.behavior import behavior_timing',
        'from utils.plotting import plot_fir_results_unpredictable',
        'from utils.plotting import plot_fir_results_predictable',
        'from utils.plotting import plot_fir_results_variable',
    ]

    input_node = pe.Node(
        IdentityInterface(fields=['preprocessed_directory', 'sub_id']),
        name='inputspec')

    # i/o node
    datasource_templates = dict(
        all_roi_file='{sub_id}/h5/roi.h5',
        # predictable reward experiment needs behavior files and moco but no physio
        predictable_in_files='{sub_id}/psc/*-predictable_reward_*.nii.gz',
        predictable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-predictable_reward_*.tsv',
        # unpredictable reward experiment needs behavior files, moco and physio
        unpredictable_in_files='{sub_id}/psc/*-unpredictable_reward_*.nii.gz',
        unpredictable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-unpredictable_reward_*.tsv',
        # variable reward experiment needs behavior files, moco and physio
        variable_in_files='{sub_id}/psc/*-variable_*_reward_*.nii.gz',
        variable_behavior_tsv_files=
        '{sub_id}/events/tsv/*-variable_*_reward_*.tsv',
        # mapper_files
        unpredictable_glm_mapper_list=
        '{sub_id}/psc/*-unpredictable_mapper*.nii.gz',
        predictable_glm_mapper_list='{sub_id}/psc/*-predictable_mapper*.nii.gz',
    )
    datasource = pe.Node(SelectFiles(datasource_templates,
                                     sort_filelist=True,
                                     raise_on_empty=False,
                                     force_lists=True),
                         name='datasource')

    predictable_FIR = pe.Node(Function(input_names=[
        'analysis_info', 'experiment', 'fir_file_reward_list',
        'glm_file_mapper_list', 'behavior_file_list', 'mapper_contrast',
        'h5_file', 'roi_list'
    ],
                                       output_names=['out_figures'],
                                       function=BOLD_FIR_files,
                                       imports=imports),
                              name='predictable_FIR')
    predictable_FIR.inputs.analysis_info = analysis_info
    predictable_FIR.inputs.experiment = 'predictable'
    predictable_FIR.inputs.mapper_contrast = 'rewarded_stim'
    predictable_FIR.inputs.roi_list = [
        'V1', 'V2', 'V3', 'V4', 'LO', 'V3AB', 'O'
    ]

    unpredictable_FIR = pe.Node(Function(input_names=[
        'analysis_info', 'experiment', 'fir_file_reward_list',
        'glm_file_mapper_list', 'behavior_file_list', 'mapper_contrast',
        'h5_file', 'roi_list'
    ],
                                         output_names=['out_figures'],
                                         function=BOLD_FIR_files,
                                         imports=imports),
                                name='unpredictable_FIR')
    unpredictable_FIR.inputs.analysis_info = analysis_info
    unpredictable_FIR.inputs.experiment = 'unpredictable'
    unpredictable_FIR.inputs.mapper_contrast = 'stim'
    unpredictable_FIR.inputs.roi_list = [
        'V1', 'V2', 'V3', 'V4', 'LO', 'V3AB', 'O'
    ]

    variable_FIR = pe.Node(Function(input_names=[
        'analysis_info', 'experiment', 'fir_file_reward_list',
        'glm_file_mapper_list', 'behavior_file_list', 'mapper_contrast',
        'h5_file', 'roi_list'
    ],
                                    output_names=['out_figures'],
                                    function=BOLD_FIR_files,
                                    imports=imports),
                           name='variable_FIR')
    variable_FIR.inputs.analysis_info = analysis_info
    variable_FIR.inputs.experiment = 'variable'
    variable_FIR.inputs.mapper_contrast = 'stim'
    variable_FIR.inputs.roi_list = ['V1', 'V2', 'V3', 'V4', 'LO', 'V3AB', 'O']

    # the actual top-level workflow
    wb_fir_roi_workflow = pe.Workflow(name=name)

    wb_fir_roi_workflow.connect(input_node, 'preprocessed_directory',
                                datasource, 'base_directory')
    wb_fir_roi_workflow.connect(input_node, 'sub_id', datasource, 'sub_id')

    # variable reward pupil FIR
    wb_fir_roi_workflow.connect(datasource, 'variable_behavior_tsv_files',
                                variable_FIR, 'behavior_file_list')
    wb_fir_roi_workflow.connect(datasource, ('all_roi_file', pickfirst),
                                variable_FIR, 'h5_file')
    wb_fir_roi_workflow.connect(datasource, 'variable_in_files', variable_FIR,
                                'fir_file_reward_list')
    wb_fir_roi_workflow.connect(datasource, 'unpredictable_glm_mapper_list',
                                variable_FIR, 'glm_file_mapper_list')

    # predictable reward pupil FIR
    wb_fir_roi_workflow.connect(datasource, 'predictable_behavior_tsv_files',
                                predictable_FIR, 'behavior_file_list')
    wb_fir_roi_workflow.connect(datasource, ('all_roi_file', pickfirst),
                                predictable_FIR, 'h5_file')
    wb_fir_roi_workflow.connect(datasource, 'predictable_in_files',
                                predictable_FIR, 'fir_file_reward_list')
    wb_fir_roi_workflow.connect(datasource, 'predictable_glm_mapper_list',
                                predictable_FIR, 'glm_file_mapper_list')

    # unpredictable reward pupil FIR
    wb_fir_roi_workflow.connect(datasource, 'unpredictable_behavior_tsv_files',
                                unpredictable_FIR, 'behavior_file_list')
    wb_fir_roi_workflow.connect(datasource, ('all_roi_file', pickfirst),
                                unpredictable_FIR, 'h5_file')
    wb_fir_roi_workflow.connect(datasource, 'unpredictable_in_files',
                                unpredictable_FIR, 'fir_file_reward_list')
    wb_fir_roi_workflow.connect(datasource, 'unpredictable_glm_mapper_list',
                                unpredictable_FIR, 'glm_file_mapper_list')

    # datasink
    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    wb_fir_roi_workflow.connect(input_node, 'preprocessed_directory', datasink,
                                'base_directory')
    wb_fir_roi_workflow.connect(input_node, 'sub_id', datasink, 'container')

    wb_fir_roi_workflow.connect(unpredictable_FIR, 'out_figures', datasink,
                                'fir.@unpredictable_FIR')
    wb_fir_roi_workflow.connect(predictable_FIR, 'out_figures', datasink,
                                'fir.@predictable_FIR')
    wb_fir_roi_workflow.connect(variable_FIR, 'out_figures', datasink,
                                'fir.@variable_FIR')

    return wb_fir_roi_workflow
Exemplo n.º 19
0
def main(args):
    subjects, master_config = args

    import os
    import sys
    import traceback

    # Set universal pipeline options
    from nipype import config
    config.update_config(master_config)
    assert config.get('execution',
                      'plugin') == master_config['execution']['plugin']

    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.ants as ants

    from template import MergeByExtendListElements, xml_filename
    from PipeLineFunctionHelpers import mapPosteriorList
    from atlasNode import GetAtlasNode, MakeNewAtlasTemplate
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.distributed import modify_qsub_args

    template = pe.Workflow(name='SubjectAtlas_Template')
    template.base_dir = master_config['logging']['log_directory']

    if 'previouscache' in master_config:
        # Running off previous baseline experiment
        BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas')
    else:
        # Running after previous baseline experiment
        BAtlas = GetAtlasNode(os.path.dirname(master_config['atlascache']),
                              'BAtlas')
    inputspec = pe.Node(interface=IdentityInterface(fields=['subject']),
                        name='inputspec')
    inputspec.iterables = ('subject', subjects)

    baselineDG = pe.Node(nio.DataGrabber(infields=['subject'],
                                         outfields=[
                                             't1_average', 't2_average',
                                             'pd_average', 'fl_average',
                                             'outputLabels', 'posteriorImages'
                                         ]),
                         name='Baseline_DG')
    if 'previousresult' in master_config:
        baselineDG.inputs.base_directory = master_config['previousresult']
    else:
        baselineDG.inputs.base_directory = master_config['resultdir']
    baselineDG.inputs.sort_filelist = True
    baselineDG.inputs.raise_on_empty = False
    baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz'
    baselineDG.inputs.template_args['t1_average'] = [[
        'subject', 't1_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['t2_average'] = [[
        'subject', 't2_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['pd_average'] = [[
        'subject', 'pd_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['fl_average'] = [[
        'subject', 'fl_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['outputLabels'] = [[
        'subject', 'brain_label_seg'
    ]]
    baselineDG.inputs.field_template = {
        'posteriorImages': '*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz'
    }
    posterior_files = [
        'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
        'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM'
    ]
    baselineDG.inputs.template_args['posteriorImages'] = [[
        'subject', posterior_files
    ]]

    MergeByExtendListElementsNode = pe.Node(
        Function(
            function=MergeByExtendListElements,
            input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors'],
            output_names=[
                'ListOfImagesDictionaries', 'registrationImageTypes',
                'interpolationMapping'
            ]),
        run_without_submitting=True,
        name="99_MergeByExtendListElements")
    from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc
    template.connect([(inputspec, baselineDG, [('subject', 'subject')]),
                      (baselineDG, MergeByExtendListElementsNode,
                       [('t1_average', 't1s'), ('t2_average', 't2s'),
                        ('pd_average', 'pds'), ('fl_average', 'fls'),
                        ('outputLabels', 'labels'),
                        (('posteriorImages', wrapfunc), 'posteriors')])])

    myInitAvgWF = pe.Node(
        interface=ants.AverageImages(),
        name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
    myInitAvgWF.inputs.dimension = 3
    myInitAvgWF.inputs.normalize = True
    template.connect(baselineDG, 't1_average', myInitAvgWF, "images")
    ####################################################################################################
    # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
    # if numSessions == 1:
    #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
    ####################################################################################################
    from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF
    buildTemplateIteration1 = registrationWF('iteration01')
    # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
    buildTemplateIteration2 = registrationWF('Iteration02')

    MakeNewAtlasTemplateNode = pe.Node(
        interface=Function(
            function=MakeNewAtlasTemplate,
            input_names=[
                't1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'
            ],
            output_names=['outAtlasFullPath', 'clean_deformed_list']),
        # This is a lot of work, so submit it run_without_submitting=True,
        run_without_submitting=
        True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
        name='99_MakeNewAtlasTemplate')

    if master_config['execution'][
            'plugin'] == 'SGE':  # for some nodes, the qsub call needs to be modified on the cluster

        MakeNewAtlasTemplateNode.plugin_args = {
            'template': master_config['plugin_args']['template'],
            'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1,
                                          1),
            'overwrite': True
        }
        for bt in [buildTemplateIteration1, buildTemplateIteration2]:
            ##################################################
            # *** Hans, is this TODO already addressed? ***  #
            # ---->  # TODO:  Change these parameters  <---- #
            ##################################################
            BeginANTS = bt.get_node("BeginANTS")
            BeginANTS.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'],
                                 '9000M',
                                 4,
                                 hard=False)
            }
            wimtdeformed = bt.get_node("wimtdeformed")
            wimtdeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }
            AvgAffineTransform = bt.get_node("AvgAffineTransform")
            AvgAffineTransform.plugin_args = {
                'template': master_config['plugin_args']['template'],
                'overwrite': True,
                'qsub_args': modify_qsub_args(master_config['queue'], '2000M',
                                              1)
            }
            wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
            wimtPassivedeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }

    template.connect([
        (myInitAvgWF, buildTemplateIteration1, [('output_average_image',
                                                 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration1,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (buildTemplateIteration1, buildTemplateIteration2,
         [('outputspec.template', 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration2,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename),
                                                'outDefinition')]),
        (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in',
                                             'AtlasTemplate')]),
        (buildTemplateIteration2, MakeNewAtlasTemplateNode,
         [('outputspec.template', 't1_image'),
          ('outputspec.passive_deformed_templates', 'deformed_list')]),
    ])

    # Create DataSinks
    Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS")
    Atlas_DataSink.overwrite = master_config['ds_overwrite']
    Atlas_DataSink.inputs.base_directory = master_config['resultdir']

    Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
    Subject_DataSink.overwrite = master_config['ds_overwrite']
    Subject_DataSink.inputs.base_directory = master_config['resultdir']

    template.connect([
        (inputspec, Atlas_DataSink, [('subject', 'container')]),
        (buildTemplateIteration1, Atlas_DataSink,
         [('outputspec.template', 'Atlas.iteration1')]),  # Unnecessary
        (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath',
                                                     'Atlas.definitions')]),
        (BAtlas, Atlas_DataSink,
         [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'),
          ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'),
          ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'),
          ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]),
        (inputspec, Subject_DataSink, [(('subject', outputPattern),
                                        'regexp_substitutions')]),
        (buildTemplateIteration2, Subject_DataSink,
         [('outputspec.template', 'ANTSTemplate.@template')]),
        (MakeNewAtlasTemplateNode, Subject_DataSink, [
            ('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates')
        ]),
    ])

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template,
                       plugin=master_config['execution']['plugin'],
                       dotfilename='template')
    return run_workflow(template,
                        plugin=master_config['execution']['plugin'],
                        plugin_args=master_config['plugin_args'])
Exemplo n.º 20
0
    :param input_tracks:
    :param wm_fod:
    :param filtered_tracks:
    :return:
    """
    import subprocess
    from distutils import spawn
    sift = spawn.find_executable("tcksift")
    cmd = sift + ' ' + input_tracks + ' ' + wm_fod + ' ' + filtered_tracks
    subprocess.run(cmd)
    pass


rigid_transform_estimation = pe.Node(name='rigid_transform_estimation',
                                     interface=Function(
                                         input_names=['image', 'template'],
                                         output_names=['transform'],
                                         function=mrregister_rigid))
apply_linear_transform = pe.Node(name="apply_linear_transform",
                                 interface=Function(
                                     input_names=["input", "transform"],
                                     output_names=["output"],
                                     function=mrtransform_linear))
rigid_registration = pe.Workflow(name="rigid_registration")
rigid_registration.connect(rigid_transform_estimation, 'transform',
                           apply_linear_transform, 'transform')
sift_filtering = pe.Node(name="sift_filtering",
                         interface=Function(
                             input_names=["input_tracks", "wm_fod"],
                             output_names=["filtered_tracks"],
                             function=tcksift))
Exemplo n.º 21
0
    lookup = dcmstack.LookupMeta()
    lookup.inputs.meta_keys = {
        'RepetitionTime': 'TR',
        'CsaImage.MosaicRefAcqTimes': 'ST'
    }
    lookup.inputs.in_file = nifti
    lookup.run()
    slicetimes = [int(lookup.result['ST'][0][x])
                  for x in range(0, imdims[2])]  #Converts slice times to ints.
    tr = lookup.result['TR'] / 1000  #Converts tr to seconds.
    ta = tr - (tr / sliceno)
    return (sliceno, slicetimes, tr, ta, mid_slice)


metadata = Node(Function(
    function=metaread,
    input_names=['nifti'],
    output_names=['sliceno', 'slicetimes', 'tr', 'ta', 'mid_slice']),
                name='metadata')


#Custom functions
def make_epi_list(epi1, epi2):
    epis = [epi1, epi2]
    return (epis)


epi_list = Node(Function(function=make_epi_list,
                         input_names=['epi1', 'epi2'],
                         output_names=['epis']),
                name='epi_list')
Exemplo n.º 22
0
def create_merge_network_results_by_group_workflow(group_list, group_id,
                                                   data_dir, subjects_dir,
                                                   output_dir):
    """Creates a second-level pipeline to merge the Connectome File Format (CFF) outputs from the group-level
    MRtrix structural connectivity processing pipeline into a single CFF file for each group.

    Example
    -------

    >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork
    >>> from nipype.testing import example_data
    >>> subjects_dir = '.'
    >>> data_dir = '.'
    >>> output_dir = '.'
    >>> group_list = {}
    >>> group_list['group1'] = ['subj1', 'subj2']
    >>> group_list['group2'] = ['subj3', 'subj4']
    >>> group_id = 'group1'
    >>> l2pipeline = groupwork.create_merge_network_results_by_group_workflow(group_list, group_id, data_dir, subjects_dir, output_dir)
    >>> l2pipeline.run()                 # doctest: +SKIP

    Inputs::

        group_list: Dictionary of subject lists, keyed by group name
        group_id: String containing the group name
        data_dir: Path to the data directory
        subjects_dir: Path to the Freesurfer 'subjects' directory
        output_dir: Path for the output files
    """
    group_infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name="group_infosource")
    group_infosource.inputs.group_id = group_id

    l2infosource = pe.Node(interface=util.IdentityInterface(fields=[
        'group_id',
        'merged',
    ]),
                           name='l2infosource')

    l2source = pe.Node(nio.DataGrabber(infields=['group_id'],
                                       outfields=[
                                           'CFFfiles', 'CSVmatrices',
                                           'CSVfibers', 'CSVnodal', 'CSVglobal'
                                       ]),
                       name='l2source')

    l2source.inputs.template_args = dict(CFFfiles=[['group_id']],
                                         CSVmatrices=[['group_id']],
                                         CSVnodal=[['group_id']],
                                         CSVglobal=[['group_id']],
                                         CSVfibers=[['group_id']])
    l2source.inputs.base_directory = data_dir
    l2source.inputs.template = '%s/%s'
    l2source.inputs.field_template = dict(
        CFFfiles=op.join(output_dir, '%s/cff/*/connectome.cff'),
        CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'),
        CSVnodal=op.join(output_dir, '%s/nxcsv/*/*nodal*.csv'),
        CSVglobal=op.join(output_dir, '%s/nxcsv/*/*global*.csv'),
        CSVfibers=op.join(output_dir, '%s/fiber_csv/*/*fibers*.csv'))
    l2source.inputs.sort_filelist = True

    l2inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'CFFfiles', 'CSVfibers', 'CSVmatrices', 'CSVnodal', 'CSVglobal',
        'network_file'
    ]),
                          name='l2inputnode')

    MergeCNetworks = pe.Node(interface=cmtk.MergeCNetworks(),
                             name="MergeCNetworks")

    l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink")
    l2datasink.inputs.base_directory = output_dir
    l2datasink.inputs.container = group_id

    l2pipeline = pe.Workflow(name="l2output_" + group_id)
    l2pipeline.base_dir = op.join(output_dir, 'l2output')
    l2pipeline.connect([(group_infosource, l2infosource, [('group_id',
                                                           'group_id')])])

    l2pipeline.connect([
        (l2infosource, l2source, [('group_id', 'group_id')]),
        (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]),
        (l2source, l2inputnode, [('CSVmatrices', 'CSVmatrices')]),
        (l2source, l2inputnode, [('CSVnodal', 'CSVnodal')]),
        (l2source, l2inputnode, [('CSVglobal', 'CSVglobal')]),
        (l2source, l2inputnode, [('CSVfibers', 'CSVfibers')]),
    ])

    l2pipeline.connect([(l2inputnode, MergeCNetworks, [('CFFfiles', 'in_files')
                                                       ])])

    l2pipeline.connect([(group_infosource, MergeCNetworks, [('group_id',
                                                             'out_file')])])
    l2pipeline.connect([(MergeCNetworks, l2datasink, [('connectome_file',
                                                       '@l2output')])])

    AddCSVColumn_node = pe.Node(interface=misc.AddCSVColumn(),
                                name="AddCSVColumn_node")
    AddCSVColumn_node.inputs.extra_column_heading = 'group'
    AddCSVColumn_global = AddCSVColumn_node.clone(name="AddCSVColumn_global")
    AddCSVColumn_matrices = AddCSVColumn_node.clone(
        name="AddCSVColumn_matrices")
    AddCSVColumn_fibers = AddCSVColumn_node.clone(name="AddCSVColumn_fibers")

    concat_csv_interface = Function(input_names=["in_files"],
                                    output_names=["out_name"],
                                    function=concatcsv)

    concat_node_csvs = pe.Node(interface=concat_csv_interface,
                               name='concat_node_csvs')
    concat_global_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_global_csvs')
    concat_matrix_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_matrix_csvs')
    concat_fiber_csvs = pe.Node(interface=concat_csv_interface,
                                name='concat_fiber_csvs')

    l2pipeline.connect([(l2inputnode, concat_node_csvs, [('CSVnodal',
                                                          'in_files')])])
    l2pipeline.connect([(concat_node_csvs, AddCSVColumn_node, [('out_name',
                                                                'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_node,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_node, l2datasink,
                         [('csv_file', '@l2output.node_csv')])])
    l2pipeline.connect([(group_infosource, l2datasink, [('group_id',
                                                         '@group_id')])])

    l2pipeline.connect([(l2inputnode, concat_global_csvs, [('CSVglobal',
                                                            'in_files')])])
    l2pipeline.connect([(concat_global_csvs, AddCSVColumn_global,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_global,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_global, l2datasink,
                         [('csv_file', '@l2output.global_csv')])])

    l2pipeline.connect([(l2inputnode, concat_matrix_csvs, [('CSVmatrices',
                                                            'in_files')])])
    l2pipeline.connect([(concat_matrix_csvs, AddCSVColumn_matrices,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_matrices,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_matrices, l2datasink,
                         [('csv_file', '@l2output.cmatrices_csv')])])

    l2pipeline.connect([(l2inputnode, concat_fiber_csvs, [('CSVmatrices',
                                                           'in_files')])])
    l2pipeline.connect([(concat_fiber_csvs, AddCSVColumn_fibers,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_fibers,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_fibers, l2datasink,
                         [('csv_file', '@l2output.fibers_csv')])])
    return l2pipeline
def CreateVolumeMeasureWorkflow(WFname, master_config):
    volumeMeasureWF = pe.Workflow(name=WFname)

    inputsSpec = pe.Node(
        interface=IdentityInterface(fields=[
            'subj_t1_image',  #Input T1 image
            'subj_label_image'  #Input Label image
        ]),
        run_without_submitting=True,
        name='inputspec')

    outputsSpec = pe.Node(
        interface=IdentityInterface(fields=['csvFilename', 'jsonFilename']),
        run_without_submitting=True,
        name='outputspec')
    """
    Measure volumes according to
    1) label image
    2) label look up table (following format for 3D Slicer color lookup table)
    3) Reference image

    and produce measured volumes in
    1) CSV format
    2) JSON Format
    """

    volumeMeasureWF = pe.Workflow(name=WFname)
    makeDictND = pe.Node(Function(
        function=MakeLabelDictionary,
        input_names=['inputColorLookUpTableFilename'],
        output_names=['labelDictionary']),
                         run_without_submitting=True,
                         name='makeLabelDict')
    makeDictND.inputs.inputColorLookUpTableFilename = master_config[
        'labelmap_colorlookup_table']

    getVolumesND = pe.Node(Function(
        function=GetLabelVolumes,
        input_names=['labelVolume', 'RefVolume', 'labelDictionary'],
        output_names=['outputLabelVolumes']),
                           run_without_submitting=False,
                           name='getVolumes')
    volumeMeasureWF.connect(makeDictND, 'labelDictionary', getVolumesND,
                            'labelDictionary')
    volumeMeasureWF.connect(inputsSpec, 'subj_t1_image', getVolumesND,
                            'RefVolume')
    volumeMeasureWF.connect(inputsSpec, 'subj_label_image', getVolumesND,
                            'labelVolume')

    writeCSVND = pe.Node(Function(function=WriteDictionaryToCSV,
                                  input_names=['inputList', 'outputFilename'],
                                  output_names=['outputFilename']),
                         run_without_submitting=False,
                         name='writeCSV')
    volumeMeasureWF.connect(getVolumesND, 'outputLabelVolumes', writeCSVND,
                            'inputList')
    writeCSVND.inputs.outputFilename = 'labelVolume.csv'
    volumeMeasureWF.connect(writeCSVND, 'outputFilename', outputsSpec,
                            'csvFilename')

    writeJSONND = pe.Node(Function(function=WriteDictionaryToJson,
                                   input_names=['inputList', 'outputFilename'],
                                   output_names=['outputFilename']),
                          run_without_submitting=False,
                          name='writeJSON')
    volumeMeasureWF.connect(getVolumesND, 'outputLabelVolumes', writeJSONND,
                            'inputList')
    writeJSONND.inputs.outputFilename = 'labelVolume.json'
    volumeMeasureWF.connect(writeJSONND, 'outputFilename', outputsSpec,
                            'jsonFilename')

    return volumeMeasureWF
Exemplo n.º 24
0
def create_merge_group_network_results_workflow(group_list,
                                                data_dir,
                                                subjects_dir,
                                                output_dir,
                                                title='group'):
    """Creates a third-level pipeline to merge the Connectome File Format (CFF) outputs from each group
	and combines them into a single CFF file for each group. This version of the third-level pipeline also
	concatenates the comma-separated value files for the NetworkX metrics and the connectivity matrices
	into single files.

	Example
	-------

	>>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork
	>>> from nipype.testing import example_data
	>>> subjects_dir = '.'
	>>> data_dir = '.'
	>>> output_dir = '.'
	>>> group_list = {}
	>>> group_list['group1'] = ['subj1', 'subj2']
	>>> group_list['group2'] = ['subj3', 'subj4']
	>>> l3pipeline = groupwork.create_merge_group_network_results_workflow(group_list, data_dir, subjects_dir, output_dir)
	>>> l3pipeline.run()                 # doctest: +SKIP

	Inputs::

		group_list: Dictionary of subject lists, keyed by group name
		data_dir: Path to the data directory
		subjects_dir: Path to the Freesurfer 'subjects' directory
		output_dir: Path for the output files
		title: String to use as a title for the output merged CFF file (default 'group')
	"""
    l3infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name='l3infosource')
    l3infosource.inputs.group_id = group_list.keys()

    l3source = pe.Node(nio.DataGrabber(infields=['group_id'],
                                       outfields=[
                                           'CFFfiles', 'CSVnodemetrics',
                                           'CSVglobalmetrics', 'CSVmatrices'
                                       ]),
                       name='l3source')
    l3source.inputs.template_args = dict(CFFfiles=[['group_id']],
                                         CSVnodemetrics=[['group_id']],
                                         CSVglobalmetrics=[['group_id']],
                                         CSVmatrices=[['group_id']])
    l3source.inputs.template = op.join(output_dir, '%s/%s')
    l3source.inputs.sort_filelist = True

    l3source.inputs.field_template = dict(
        CFFfiles=op.join(output_dir, '%s/*.cff'),
        CSVnodemetrics=op.join(output_dir, '%s/node_csv/*.csv'),
        CSVglobalmetrics=op.join(output_dir, '%s/global_csv/*.csv'),
        CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'))

    l3inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'Group_CFFs', 'Group_CSVnodemetrics', 'Group_CSVglobalmetrics',
        'Group_CSVmatrices'
    ]),
                          name='l3inputnode')

    MergeCNetworks_grp = pe.Node(interface=cmtk.MergeCNetworks(),
                                 name="MergeCNetworks_grp")
    MergeCNetworks_grp.inputs.out_file = title

    l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink")
    l3datasink.inputs.base_directory = output_dir

    l3pipeline = pe.Workflow(name="l3output")
    l3pipeline.base_dir = output_dir
    l3pipeline.connect([
        (l3infosource, l3source, [('group_id', 'group_id')]),
        (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]),
        (l3source, l3inputnode, [('CSVnodemetrics', 'Group_CSVnodemetrics')]),
        (l3source, l3inputnode, [('CSVglobalmetrics', 'Group_CSVglobalmetrics')
                                 ]),
        (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]),
    ])

    l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs',
                                                            'in_files')])])
    l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file',
                                                           '@l3output')])])

    concat_csv_interface = Function(input_names=["in_files"],
                                    output_names=["out_name"],
                                    function=concatcsv)

    concat_node_csvs = pe.Node(interface=concat_csv_interface,
                               name='concat_node_csvs')
    concat_global_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_global_csvs')
    concat_matrix_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_matrix_csvs')

    l3pipeline.connect([(l3inputnode, concat_node_csvs,
                         [('Group_CSVnodemetrics', 'in_files')])])
    l3pipeline.connect([(concat_node_csvs, l3datasink,
                         [('out_name', '@l3output.nodal_csv')])])

    l3pipeline.connect([(l3inputnode, concat_global_csvs,
                         [('Group_CSVglobalmetrics', 'in_files')])])
    l3pipeline.connect([(concat_global_csvs, l3datasink,
                         [('out_name', '@l3output.global_csv')])])

    l3pipeline.connect([(l3inputnode, concat_matrix_csvs,
                         [('Group_CSVmatrices', 'in_files')])])
    l3pipeline.connect([(concat_matrix_csvs, l3datasink,
                         [('out_name', '@l3output.csvmatrices')])])
    return l3pipeline
def BAWantsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''):
    """

    Inputs::

           inputspec.images :
           inputspec.fixed_image :
           inputspec.ListOfPassiveImagesDictionaries :
           inputspec.interpolationMapping :

    Outputs::

           outputspec.template :
           outputspec.transforms_list :
           outputspec.passive_deformed_templates :
    """
    TemplateBuildSingleIterationWF = pe.Workflow(name='antsRegistrationTemplateBuildSingleIterationWF_' + str(iterationPhasePrefix))

    inputSpec = pe.Node(interface=util.IdentityInterface(fields=[
        'ListOfImagesDictionaries', 'registrationImageTypes',
        #'maskRegistrationImageType',
        'interpolationMapping', 'fixed_image']),
        run_without_submitting=True,
        name='inputspec')
    ## HACK: TODO: We need to have the AVG_AIR.nii.gz be warped with a default voxel value of 1.0
    ## HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that
    ##             they do not change due to re-indenting.  Otherwise re-indenting for flow control will trigger
    ##             their hash to change.
    ## HACK: TODO: REMOVE 'transforms_list' it is not used.  That will change all the hashes
    ## HACK: TODO: Need to run all python files through the code beutifiers.  It has gotten pretty ugly.
    outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template', 'transforms_list',
                                                                  'passive_deformed_templates']),
                         run_without_submitting=True,
                         name='outputspec')

    ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template
    BeginANTS = pe.MapNode(interface=Registration(), name='BeginANTS', iterfield=['moving_image'])
    BeginANTS.inputs.dimension = 3
    """ This is the recommended set of parameters from the ANTS developers """
    BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix) + '_tfm'
    BeginANTS.inputs.transforms = ["Rigid","Affine","SyN","SyN","SyN"]
    BeginANTS.inputs.transform_parameters = [[0.1],[0.1],[0.1,3.0,0.0],[0.1,3.0,0.0],[0.1,3.0,0.0]]
    BeginANTS.inputs.metric = ['MI','MI','CC','CC','CC']
    BeginANTS.inputs.sampling_strategy = ['Regular','Regular',None,None,None]
    BeginANTS.inputs.sampling_percentage = [0.27,0.27,1.0,1.0,1.0]
    BeginANTS.inputs.metric_weight = [1.0,1.0,1.0,1.0,1.0]
    BeginANTS.inputs.radius_or_number_of_bins = [32,32,4,4,4]
    BeginANTS.inputs.number_of_iterations = [[1000,1000,1000,1000],[1000,1000,1000,1000],[1000,250],[140],[25]]
    BeginANTS.inputs.convergence_threshold = [5e-8,5e-8,5e-7,5e-6,5e-5]
    BeginANTS.inputs.convergence_window_size = [10,10,10,10,10]
    BeginANTS.inputs.use_histogram_matching = [True,True,True,True,True]
    BeginANTS.inputs.shrink_factors =   [[8,4,2,1],[8,4,2,1],[8,4],[2],[1]]
    BeginANTS.inputs.smoothing_sigmas = [[3,2,1,0],[3,2,1,0],[3,2],[1],[0]]
    BeginANTS.inputs.sigma_units = ["vox","vox","vox","vox","vox"]
    BeginANTS.inputs.use_estimate_learning_rate_once = [False,False,False,False,False]
    BeginANTS.inputs.write_composite_transform = True
    BeginANTS.inputs.collapse_output_transforms = False
    BeginANTS.inputs.initialize_transforms_per_stage = True
    BeginANTS.inputs.winsorize_lower_quantile = 0.01
    BeginANTS.inputs.winsorize_upper_quantile = 0.99
    BeginANTS.inputs.output_warped_image = 'atlas2subject.nii.gz'
    BeginANTS.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz'
    BeginANTS.inputs.save_state = 'SavedBeginANTSSyNState.h5'

    GetMovingImagesNode = pe.Node(interface=util.Function(function=GetMovingImages,
                                                          input_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping'],
                                                          output_names=['moving_images', 'moving_interpolation_type']),
                                  run_without_submitting=True,
                                  name='99_GetMovingImagesNode')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetMovingImagesNode, 'ListOfImagesDictionaries')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetMovingImagesNode, 'registrationImageTypes')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', GetMovingImagesNode, 'interpolationMapping')

    TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', BeginANTS, 'moving_image')
    TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_interpolation_type', BeginANTS, 'interpolation')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, 'fixed_image')

    ## Now warp all the input_images images
    wimtdeformed = pe.MapNode(interface=ApplyTransforms(),
                              iterfield=['transforms', 'input_image'],
                              #iterfield=['transforms', 'invert_transform_flags', 'input_image'],
                              name='wimtdeformed')
    wimtdeformed.inputs.interpolation = 'Linear'
    wimtdeformed.default_value = 0
    # HACK: Should try using forward_composite_transform
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transform', wimtdeformed, 'transforms')
    TemplateBuildSingleIterationWF.connect(BeginANTS, 'composite_transform', wimtdeformed, 'transforms')
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', wimtdeformed, 'invert_transform_flags')
    ## NOTE: forward_invert_flags:: List of flags corresponding to the forward transforms
    #wimtdeformed.inputs.invert_transform_flags = [False,False,False,False,False]
    TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', wimtdeformed, 'input_image')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', wimtdeformed, 'reference_image')

    ##  Shape Update Next =====
    ## Now  Average All input_images deformed images together to create an updated template average
    AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages')
    AvgDeformedImages.inputs.dimension = 3
    AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix) + '.nii.gz'
    AvgDeformedImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images')

    ## Now average all affine transforms together
    AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform')
    AvgAffineTransform.inputs.dimension = 3
    AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str(iterationPhasePrefix) + '_Affine.h5'

    SplitCompositeTransform = pe.MapNode(interface=util.Function(function=SplitCompositeToComponentTransforms,
                                      input_names=['composite_transform_as_list'],
                                      output_names=['affine_component_list', 'warp_component_list']),
                                      iterfield=['composite_transform_as_list'],
                                      run_without_submitting=True,
                                      name='99_SplitCompositeTransform')
    TemplateBuildSingleIterationWF.connect(BeginANTS, 'composite_transform', SplitCompositeTransform, 'composite_transform_as_list')
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', SplitCompositeTransform, 'composite_transform_as_list')
    TemplateBuildSingleIterationWF.connect(SplitCompositeTransform, 'affine_component_list', AvgAffineTransform, 'transforms')

    ## Now average the warp fields togther
    AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages')
    AvgWarpImages.inputs.dimension = 3
    AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix) + 'warp.nii.gz'
    AvgWarpImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(SplitCompositeTransform, 'warp_component_list', AvgWarpImages, 'images')

    ## Now average the images together
    ## TODO:  For now GradientStep is set to 0.25 as a hard coded default value.
    GradientStep = 0.25
    GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage')
    GradientStepWarpImage.inputs.dimension = 3
    GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep
    GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str(iterationPhasePrefix) + '_warp.nii.gz'
    TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input')

    ## Now create the new template shape based on the average of all deformed images
    UpdateTemplateShape = pe.Node(interface=ApplyTransforms(), name='UpdateTemplateShape')
    UpdateTemplateShape.inputs.invert_transform_flags = [True]
    UpdateTemplateShape.inputs.interpolation = 'Linear'
    UpdateTemplateShape.default_value = 0

    TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', UpdateTemplateShape, 'reference_image')
    TemplateBuildSingleIterationWF.connect([(AvgAffineTransform, UpdateTemplateShape, [(('affine_transform', makeListOfOneElement), 'transforms')]), ])
    TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, 'output_product_image', UpdateTemplateShape, 'input_image')

    ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(interface=util.Function(function=MakeTransformListWithGradientWarps,
                                                                                       input_names=['averageAffineTranform', 'gradientStepWarp'],
                                                                                       output_names=['TransformListWithGradientWarps']),
                                                               run_without_submitting=True,
                                                               name='99_MakeTransformListWithGradientWarps')
    ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True

    TemplateBuildSingleIterationWF.connect(AvgAffineTransform, 'affine_transform', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'averageAffineTranform')
    TemplateBuildSingleIterationWF.connect(UpdateTemplateShape, 'output_image', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp')

    ReshapeAverageImageWithShapeUpdate = pe.Node(interface=ApplyTransforms(), name='ReshapeAverageImageWithShapeUpdate')
    ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False]
    ReshapeAverageImageWithShapeUpdate.inputs.interpolation = 'Linear'
    ReshapeAverageImageWithShapeUpdate.default_value = 0
    ReshapeAverageImageWithShapeUpdate.inputs.output_image = 'ReshapeAverageImageWithShapeUpdate.nii.gz'
    TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', ReshapeAverageImageWithShapeUpdate, 'input_image')
    TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', ReshapeAverageImageWithShapeUpdate, 'reference_image')
    TemplateBuildSingleIterationWF.connect(ApplyInvAverageAndFourTimesGradientStepWarpImage, 'TransformListWithGradientWarps', ReshapeAverageImageWithShapeUpdate, 'transforms')
    TemplateBuildSingleIterationWF.connect(ReshapeAverageImageWithShapeUpdate, 'output_image', outputSpec, 'template')

    ######
    ######
    ######  Process all the passive deformed images in a way similar to the main image used for registration
    ######
    ######
    ######
    ##############################################
    ## Now warp all the ListOfPassiveImagesDictionaries images
    FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList,
                                                         input_names=['ListOfPassiveImagesDictionaries', 'transforms',
                                                                      'interpolationMapping', 'invert_transform_flags'],
                                                         output_names=['flattened_images', 'flattened_transforms', 'flattened_invert_transform_flags',
                                                                       'flattened_image_nametypes', 'flattened_interpolation_type']),
                                                run_without_submitting=True, name="99_FlattenTransformAndImagesList")

    GetPassiveImagesNode = pe.Node(interface=util.Function(function=GetPassiveImages,
                                                           input_names=['ListOfImagesDictionaries', 'registrationImageTypes'],
                                                           output_names=['ListOfPassiveImagesDictionaries']),
                                   run_without_submitting=True,
                                   name='99_GetPassiveImagesNode')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetPassiveImagesNode, 'ListOfImagesDictionaries')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetPassiveImagesNode, 'registrationImageTypes')

    TemplateBuildSingleIterationWF.connect(GetPassiveImagesNode, 'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries')
    TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', FlattenTransformAndImagesListNode, 'interpolationMapping')
    TemplateBuildSingleIterationWF.connect(BeginANTS, 'composite_transform', FlattenTransformAndImagesListNode, 'transforms')
    ## FlattenTransformAndImagesListNode.inputs.invert_transform_flags = [False,False,False,False,False,False]
    ## TODO: Please check of invert_transform_flags has a fixed number.
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags')
    wimtPassivedeformed = pe.MapNode(interface=ApplyTransforms(),
                                     iterfield=['transforms', 'invert_transform_flags', 'input_image', 'interpolation'],
                                     name='wimtPassivedeformed')
    wimtPassivedeformed.default_value = 0
    TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image')
    TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_interpolation_type', wimtPassivedeformed, 'interpolation')
    TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image')
    TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transforms')
    TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_invert_transform_flags', wimtPassivedeformed, 'invert_transform_flags')

    RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages,
                                                       input_names=['deformedPassiveImages', 'flattened_image_nametypes', 'interpolationMapping'],
                                                       output_names=['nested_imagetype_list', 'outputAverageImageName_list',
                                                                     'image_type_list', 'nested_interpolation_type']),
                                              run_without_submitting=True, name="99_RenestDeformedPassiveImages")
    TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', RenestDeformedPassiveImagesNode, 'interpolationMapping')
    TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages')
    TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes')
    ## Now  Average All passive input_images deformed images together to create an updated template average
    AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(),
                                          iterfield=['images', 'output_average_image'],
                                          name='AvgDeformedPassiveImages')
    AvgDeformedPassiveImages.inputs.dimension = 3
    AvgDeformedPassiveImages.inputs.normalize = False
    TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images')
    TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "outputAverageImageName_list", AvgDeformedPassiveImages, 'output_average_image')

    ## -- TODO:  Now neeed to reshape all the passive images as well
    ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface=ApplyTransforms(),
                                                           iterfield=['input_image', 'reference_image', 'output_image', 'interpolation'],
                                                           name='ReshapeAveragePassiveImageWithShapeUpdate')
    ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False]
    ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0
    TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'nested_interpolation_type', ReshapeAveragePassiveImageWithShapeUpdate, 'interpolation')
    TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'outputAverageImageName_list', ReshapeAveragePassiveImageWithShapeUpdate, 'output_image')
    TemplateBuildSingleIterationWF.connect(AvgDeformedPassiveImages, 'output_average_image', ReshapeAveragePassiveImageWithShapeUpdate, 'input_image')
    TemplateBuildSingleIterationWF.connect(AvgDeformedPassiveImages, 'output_average_image', ReshapeAveragePassiveImageWithShapeUpdate, 'reference_image')
    TemplateBuildSingleIterationWF.connect(ApplyInvAverageAndFourTimesGradientStepWarpImage, 'TransformListWithGradientWarps', ReshapeAveragePassiveImageWithShapeUpdate, 'transforms')
    TemplateBuildSingleIterationWF.connect(ReshapeAveragePassiveImageWithShapeUpdate, 'output_image', outputSpec, 'passive_deformed_templates')

    return TemplateBuildSingleIterationWF
Exemplo n.º 26
0
def create_AutoRecon1(name="AutoRecon1", longitudinal=False, distance=None,
                      custom_atlas=None, plugin_args=None, shrink=None, stop=None,
                      fsvernum=5.3):
    """Creates the AutoRecon1 workflow in nipype.

    Inputs::
           inputspec.T1_files : T1 files (mandatory)
           inputspec.T2_file : T2 file (optional)
           inputspec.FLAIR_file : FLAIR file (optional)
           inputspec.cw256 : Conform inputs to 256 FOV (optional)
           inputspec.num_threads: Number of threads to use with EM Register (default=1)
    Outpus::

    """
    ar1_wf = pe.Workflow(name=name)
    inputspec = pe.Node(interface=IdentityInterface(fields=['T1_files',
                                                            'T2_file',
                                                            'FLAIR_file',
                                                            'cw256',
                                                            'num_threads',
                                                            'reg_template_withskull',
                                                            'awk_file']),
                        run_without_submitting=True,
                        name='inputspec')

    if not longitudinal:
        # single session processing
        verify_inputs = pe.Node(Function(["T1_files", "cw256"],
                                         ["T1_files", "cw256", "resample_type", "origvol_names"],
                                         checkT1s),
                                name="Check_T1s")
        ar1_wf.connect([(inputspec, verify_inputs, [('T1_files', 'T1_files'),
                                                    ('cw256', 'cw256')])])


        # T1 image preparation
        # For all T1's mri_convert ${InputVol} ${out_file}
        T1_image_preparation = pe.MapNode(MRIConvert(),
                                          iterfield=['in_file', 'out_file'],
                                          name="T1_prep")

        ar1_wf.connect([(verify_inputs, T1_image_preparation, [('T1_files', 'in_file'),
                                                               ('origvol_names', 'out_file')]),
                        ])

        def convert_modalities(in_file=None, out_file=None):
            """Returns an undefined output if the in_file is not defined"""
            from nipype.interfaces.freesurfer import MRIConvert
            import os
            if in_file:
                convert = MRIConvert()
                convert.inputs.in_file = in_file
                convert.inputs.out_file = out_file
                convert.inputs.no_scale = True
                out = convert.run()
                out_file = os.path.abspath(out.outputs.out_file)
            return out_file

        T2_convert = pe.Node(Function(['in_file', 'out_file'],
                                      ['out_file'],
                                      convert_modalities),
                             name="T2_Convert")
        T2_convert.inputs.out_file = 'T2raw.mgz'
        ar1_wf.connect([(inputspec, T2_convert, [('T2_file', 'in_file')])])

        FLAIR_convert = pe.Node(Function(['in_file', 'out_file'],
                                         ['out_file'],
                                         convert_modalities),
                                name="FLAIR_Convert")
        FLAIR_convert.inputs.out_file = 'FLAIRraw.mgz'
        ar1_wf.connect([(inputspec, FLAIR_convert, [('FLAIR_file', 'in_file')])])
    else:
        # longitudinal inputs
        inputspec = pe.Node(interface=IdentityInterface(fields=['T1_files',
                                                                'iscales',
                                                                'ltas',
                                                                'subj_to_template_lta',
                                                                'template_talairach_xfm',
                                                                'template_brainmask']),
                            run_without_submitting=True,
                            name='inputspec')

        def output_names(T1_files):
            """Create file names that are dependent on the number of T1 inputs"""
            iscale_names = list()
            lta_names = list()
            for i, t1 in enumerate(T1_files):
                # assign an input number
                file_num = str(i + 1)
                while len(file_num) < 3:
                    file_num = '0' + file_num
                iscale_names.append("{0}-iscale.txt".format(file_num))
                lta_names.append("{0}.lta".format(file_num))
            return iscale_names, lta_names

        filenames = pe.Node(Function(['T1_files'],
                                     ['iscale_names', 'lta_names'],
                                     output_names),
                            name="Longitudinal_Filenames")
        ar1_wf.connect([(inputspec, filenames, [('T1_files', 'T1_files')])])

        copy_ltas = pe.MapNode(Function(['in_file', 'out_file'],
                                        ['out_file'],
                                        copy_file),
                               iterfield=['in_file', 'out_file'],
                               name='Copy_ltas')
        ar1_wf.connect([(inputspec, copy_ltas, [('ltas', 'in_file')]),
                        (filenames, copy_ltas, [('lta_names', 'out_file')])])

        copy_iscales = pe.MapNode(Function(['in_file', 'out_file'],
                                           ['out_file'],
                                           copy_file),
                                  iterfield=['in_file', 'out_file'],
                                  name='Copy_iscales')
        ar1_wf.connect([(inputspec, copy_iscales, [('iscales', 'in_file')]),
                        (filenames, copy_iscales, [('iscale_names', 'out_file')])])

        concatenate_lta = pe.MapNode(ConcatenateLTA(), iterfield=['in_file'],
                                     name="Concatenate_ltas")
        ar1_wf.connect([(copy_ltas, concatenate_lta, [('out_file', 'in_file')]),
                        (inputspec, concatenate_lta, [('subj_to_template_lta', 'subj_to_base')])])


    # Motion Correction
    """
    When there are multiple source volumes, this step will correct for small
    motions between them and then average them together.  The output of the
    motion corrected average is mri/rawavg.mgz which is then conformed to
    255 cubed char images (1mm isotropic voxels) in mri/orig.mgz.
    """

    def createTemplate(in_files, out_file):
        import os
        import shutil
        if len(in_files) == 1:
            # if only 1 T1 scan given, no need to run RobustTemplate
            print("WARNING: only one run found. This is OK, but motion correction " +
                  "cannot be performed on one run, so I'll copy the run to rawavg " +
                  "and continue.")
            shutil.copyfile(in_files[0], out_file)
            intensity_scales = None
            transforms = None
        else:
            from nipype.interfaces.freesurfer import RobustTemplate
            # if multiple T1 scans are given run RobustTemplate
            intensity_scales = [os.path.basename(f.replace('.mgz', '-iscale.txt')) for f in in_files]
            transforms = [os.path.basename(f.replace('.mgz', '.lta')) for f in in_files]
            robtemp = RobustTemplate()
            robtemp.inputs.in_files = in_files
            robtemp.inputs.average_metric = 'median'
            robtemp.inputs.out_file = out_file
            robtemp.inputs.no_iteration = True
            robtemp.inputs.fixed_timepoint = True
            robtemp.inputs.auto_detect_sensitivity = True
            robtemp.inputs.initial_timepoint = 1
            robtemp.inputs.scaled_intensity_outputs = intensity_scales
            robtemp.inputs.transform_outputs = transforms
            robtemp.inputs.subsample_threshold = 200
            robtemp.inputs.intensity_scaling = True
            robtemp_result = robtemp.run()
            # collect the outputs from RobustTemplate
            out_file = robtemp_result.outputs.out_file
            intensity_scales = [os.path.abspath(f) for f in robtemp_result.outputs.scaled_intensity_outputs]
            transforms = [os.path.abspath(f) for f in robtemp_result.outputs.transform_outputs]
        out_file = os.path.abspath(out_file)
        return out_file, intensity_scales, transforms

    if not longitudinal:
        create_template = pe.Node(Function(['in_files', 'out_file'],
                                           ['out_file', 'intensity_scales', 'transforms'],
                                           createTemplate),
                                  name="Robust_Template")
        create_template.inputs.out_file = 'rawavg.mgz'
        ar1_wf.connect([(T1_image_preparation, create_template, [('out_file', 'in_files')])])
    else:
        create_template = pe.Node(RobustTemplate(), name="Robust_Template")
        create_template.inputs.average_metric = 'median'
        create_template.inputs.out_file = 'rawavg.mgz'
        create_template.inputs.no_iteration = True
        ar1_wf.connect([(concatenate_lta, create_template, [('out_file', 'initial_transforms')]),
                        (inputSpec, create_template, [('in_t1s', 'in_files')]),
                        (copy_iscales, create_template, [('out_file','in_intensity_scales')])])

    # mri_convert
    conform_template = pe.Node(MRIConvert(), name='Conform_Template')
    conform_template.inputs.out_file = 'orig.mgz'
    if not longitudinal:
        conform_template.inputs.conform = True
        ar1_wf.connect([(verify_inputs, conform_template, [('cw256', 'cw256'),
                                                           ('resample_type', 'resample_type')])])
    else:
        conform_template.inputs.out_datatype = 'uchar'

    ar1_wf.connect([(create_template, conform_template, [('out_file', 'in_file')])])

    # Talairach
    """
    This computes the affine transform from the orig volume to the MNI305 atlas using Avi Snyders 4dfp
    suite of image registration tools, through a FreeSurfer script called talairach_avi.
    Several of the downstream programs use talairach coordinates as seed points.
    """

    bias_correction = pe.Node(MNIBiasCorrection(), name="Bias_correction")
    bias_correction.inputs.iterations = 1
    bias_correction.inputs.protocol_iterations = 1000
    bias_correction.inputs.distance = distance
    if stop:
        bias_correction.inputs.stop = stop
    if shrink:
        bias_correction.inputs.shrink =  shrink
    bias_correction.inputs.no_rescale = True
    bias_correction.inputs.out_file = 'orig_nu.mgz'

    ar1_wf.connect([(conform_template, bias_correction, [('out_file', 'in_file')]),
                ])

    if not longitudinal:
        # single session processing
        talairach_avi = pe.Node(TalairachAVI(), name="Compute_Transform")
        if custom_atlas != None:
            # allows to specify a custom atlas
            talairach_avi.inputs.atlas = custom_atlas
        talairach_avi.inputs.out_file = 'talairach.auto.xfm'
        ar1_wf.connect([(bias_correction, talairach_avi, [('out_file', 'in_file')])])
    else:
        # longitudinal processing
        # Just copy the template xfm
        talairach_avi = pe.Node(Function(['in_file', 'out_file'],
                                             ['out_file'],
                                             copy_file),
                                    name='Copy_Template_Transform')
        talairach_avi.inputs.out_file = 'talairach.auto.xfm'

        ar1_wf.connect([(inputspec, talairach_avi, [('template_talairach_xfm', 'in_file')])])

    copy_transform = pe.Node(Function(['in_file', 'out_file'],
                                      ['out_file'],
                                      copy_file),
                             name='Copy_Transform')
    copy_transform.inputs.out_file = 'talairach.xfm'

    ar1_wf.connect([(talairach_avi, copy_transform, [('out_file', 'in_file')])])


    # In recon-all the talairach.xfm is added to orig.mgz, even though
    # it does not exist yet. This is a compromise to keep from
    # having to change the time stamp of the orig volume after talairaching.
    # Here we are going to add xfm to the header after the xfm has been created.
    # This may mess up the timestamp.

    add_xform_to_orig = pe.Node(AddXFormToHeader(), name="Add_Transform_to_Orig")
    add_xform_to_orig.inputs.copy_name = True
    add_xform_to_orig.inputs.out_file = conform_template.inputs.out_file

    ar1_wf.connect([(conform_template, add_xform_to_orig, [('out_file', 'in_file')]),
                    (copy_transform, add_xform_to_orig, [('out_file', 'transform')])])

    # This node adds the transform to the orig_nu.mgz file. This step does not
    # exist in the recon-all workflow, because that workflow adds the talairach
    # to the orig.mgz file header before the talairach actually exists.
    add_xform_to_orig_nu = pe.Node(AddXFormToHeader(), name="Add_Transform_to_Orig_Nu")
    add_xform_to_orig_nu.inputs.copy_name = True
    add_xform_to_orig_nu.inputs.out_file = bias_correction.inputs.out_file

    ar1_wf.connect([(bias_correction, add_xform_to_orig_nu, [('out_file', 'in_file')]),
                    (copy_transform, add_xform_to_orig_nu, [('out_file', 'transform')])])



    # check the alignment of the talairach
    # TODO: Figure out how to read output from this node.
    check_alignment = pe.Node(CheckTalairachAlignment(),
                              name="Check_Talairach_Alignment")
    check_alignment.inputs.threshold = 0.005
    ar1_wf.connect([(copy_transform, check_alignment, [('out_file', 'in_file')]),
                    ])

    if not longitudinal:
        def awkfile(in_file, log_file):
            """
            This method uses 'awk' which must be installed prior to running the workflow and is not a
            part of nipype or freesurfer.
            """
            import subprocess
            import os
            command = ['awk', '-f', in_file, log_file]
            print(''.join(command))
            subprocess.call(command)
            log_file = os.path.abspath(log_file)
            return log_file

        awk_logfile = pe.Node(Function(['in_file', 'log_file'],
                                       ['log_file'],
                                       awkfile),
                              name='Awk')

        ar1_wf.connect([(talairach_avi, awk_logfile, [('out_log', 'log_file')]),
                        (inputspec, awk_logfile, [('awk_file', 'in_file')])])

        # TODO datasink the output from TalirachQC...not sure how to do this
        tal_qc = pe.Node(TalairachQC(), name="Detect_Aligment_Failures")
        ar1_wf.connect([(awk_logfile, tal_qc, [('log_file', 'log_file')])])


    if fsvernum < 6:
        # intensity correction is performed before normalization
        intensity_correction = pe.Node(
            MNIBiasCorrection(), name="Intensity_Correction")
        intensity_correction.inputs.out_file = 'nu.mgz'
        intensity_correction.inputs.iterations = 2
        ar1_wf.connect([(add_xform_to_orig, intensity_correction, [('out_file', 'in_file')]),
                        (copy_transform, intensity_correction, [('out_file', 'transform')])])


        add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU")
        add_to_header_nu.inputs.copy_name = True
        add_to_header_nu.inputs.out_file = 'nu.mgz'
        ar1_wf.connect([(intensity_correction, add_to_header_nu, [('out_file', 'in_file'),
                                                              ]),
                        (copy_transform, add_to_header_nu,
                         [('out_file', 'transform')])
                    ])


    # Intensity Normalization
    # Performs intensity normalization of the orig volume and places the result in mri/T1.mgz.
    # Attempts to correct for fluctuations in intensity that would otherwise make intensity-based
    # segmentation much more difficult. Intensities for all voxels are scaled so that the mean
    # intensity of the white matter is 110.

    mri_normalize = pe.Node(Normalize(), name="Normalize_T1")
    mri_normalize.inputs.gradient = 1
    mri_normalize.inputs.out_file = 'T1.mgz'

    if fsvernum < 6:
        ar1_wf.connect([(add_to_header_nu, mri_normalize, [('out_file', 'in_file')])])
    else:
        ar1_wf.connect([(add_xform_to_orig_nu, mri_normalize, [('out_file', 'in_file')])])

    ar1_wf.connect([(copy_transform, mri_normalize, [('out_file', 'transform')])])

    # Skull Strip
    """
    Removes the skull from mri/T1.mgz and stores the result in
    mri/brainmask.auto.mgz and mri/brainmask.mgz. Runs the mri_watershed program.
    """
    if not longitudinal:
        mri_em_register = pe.Node(EMRegister(), name="EM_Register")
        mri_em_register.inputs.out_file = 'talairach_with_skull.lta'
        mri_em_register.inputs.skull = True
        if plugin_args:
            mri_em_register.plugin_args = plugin_args

        if fsvernum < 6:
            ar1_wf.connect(add_to_header_nu, 'out_file', mri_em_register, 'in_file')
        else:
            ar1_wf.connect(add_xform_to_orig_nu, 'out_file', mri_em_register, 'in_file')

        ar1_wf.connect([(inputspec, mri_em_register, [('num_threads', 'num_threads'),
                                                      ('reg_template_withskull', 'template')])])

        brainmask = pe.Node(WatershedSkullStrip(),
                            name='Watershed_Skull_Strip')
        brainmask.inputs.t1 = True
        brainmask.inputs.out_file = 'brainmask.auto.mgz'
        ar1_wf.connect([(mri_normalize, brainmask, [('out_file', 'in_file')]),
                        (mri_em_register, brainmask, [('out_file', 'transform')]),
                        (inputspec, brainmask, [('reg_template_withskull', 'brain_atlas')])])
    else:
        copy_template_brainmask = pe.Node(Function(['in_file', 'out_file'],
                                                   ['out_file'],
                                                   copy_file),
                                          name='Copy_Template_Brainmask')
        copy_template_brainmask.inputs.out_file = 'brainmask_{0}.mgz'.format(config['long_template'])

        ar1_wf.connect([(inputspec, copy_template_brainmask, [('template_brainmask', 'in_file')])])

        mask1 = pe.Node(ApplyMask(), name="ApplyMask1")
        mask1.inputs.keep_mask_deletion_edits = True
        mask1.inputs.out_file = 'brainmask.auto.mgz'

        ar1_wf.connect([(mri_normalize, mask1, [('out_file', 'in_file')]),
                        (copy_template_brainmask, mask1, [('out_file', 'mask_file')])])

        brainmask = pe.Node(ApplyMask(), name="ApplyMask2")
        brainmask.inputs.keep_mask_deletion_edits = True
        brainmask.inputs.transfer = 255
        brainmask.inputs.out_file = mask1.inputs.out_file

        ar1_wf.connect([(mask1, brainmask, [('out_file', 'in_file')]),
                        (copy_template_brainmask, brainmask, [('out_file', 'mask_file')])])

    copy_brainmask = pe.Node(Function(['in_file', 'out_file'],
                                      ['out_file'],
                                      copy_file),
                             name='Copy_Brainmask')
    copy_brainmask.inputs.out_file = 'brainmask.mgz'

    ar1_wf.connect([(brainmask, copy_brainmask, [('out_file', 'in_file')])])

    outputs = ['origvols',
               't2_raw',
               'flair',
               'rawavg',
               'orig_nu',
               'orig',
               'talairach_auto',
               'talairach',
               't1',
               'talskull',
               'brainmask_auto',
               'brainmask',
               'braintemplate']

    if fsvernum < 6:
        outputspec = pe.Node(IdentityInterface(fields=outputs + ['nu']),
                             name="outputspec")
        ar1_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')])])
    else:
        outputspec = pe.Node(IdentityInterface(fields=outputs),
                             name="outputspec")

    ar1_wf.connect([(T1_image_preparation, outputspec, [('out_file', 'origvols')]),
                    (T2_convert, outputspec, [('out_file', 't2_raw')]),
                    (FLAIR_convert, outputspec, [('out_file', 'flair')]),
                    (create_template, outputspec, [('out_file', 'rawavg')]),
                    (add_xform_to_orig, outputspec, [('out_file', 'orig')]),
                    (add_xform_to_orig_nu, outputspec, [('out_file', 'orig_nu')]),
                    (talairach_avi, outputspec, [('out_file', 'talairach_auto')]),
                    (copy_transform, outputspec, [('out_file', 'talairach')]),
                    (mri_normalize, outputspec, [('out_file', 't1')]),
                    (brainmask, outputspec, [('out_file', 'brainmask_auto')]),
                    (copy_brainmask, outputspec, [('out_file', 'brainmask')]),
                    ])


    if not longitudinal:
        ar1_wf.connect([(mri_em_register, outputspec, [('out_file', 'talskull')]),
                        ])
    else:
        ar1_wf.connect([(copy_template_brainmask, outputspec, [('out_file', 'braintemplate')]),
                        ])

    return ar1_wf, outputs
Exemplo n.º 27
0
ValueError: BIDS root does not exist: /tmp/tmp9g7ddldw/bids-grabber/examples/examples/BIDS



#problem with base_dir  
bg.inputs.base_dir = "/mnt/Filbey/Evan/MJXProcessing/examples/examples/BIDS"

bg.inputs.subject = 'M7500516'
#bg.inputs.output_query = {'T1w': dict(type='anat')}
res = bg.run()
res.outputs
print("done")
"""


def printMe(paths):
    print("\n\nanalyzing " + str(paths) + "\n\n")
    
analyzeANAT = Node(Function(function=printMe, input_names=["paths"],
                            output_names=[]), name="analyzeANAT")

bg_all = Node(BIDSDataGrabber(), name='bids-grabber')
bg_all.inputs.base_dir = '/mnt/Filbey/Evan/MJXProcessing/examples/examples/BIDS'
bg_all.inputs.output_query = {'ses': dict(type='session')}
bg_all.iterables = ('subject', layout.get_subjects()[0])
wf = Workflow(name="bids_demo")
wf.connect(bg_all, "session", analyzeANAT, "paths")
wf.run()


    plt.title('Rotations in radians')
    plt.plot(movement[:, 3:])
    plt.legend(['x', 'y', 'z'])

    plt.subplot(313)
    plt.title('Displacement in mm')
    plt.plot(abs_disp)
    plt.plot(rel_disp)
    plt.legend(['abs', 'rel'])

    plt.savefig('Motion')


Plot_Motion = Node(name='Plot_Motion',
                   interface=Function(
                       input_names=['motion_par', 'abs_disp', 'rel_disp'],
                       function=Plot_Motion))

# In[12]:

#-----------------------------------------------------------------------------------------------------
#Use spm smoothin, because, as you know, fsl does not support anisotropic smoothing
Spm_Smoothing = Node(spm.Smooth(), name='Smoothing')
#I tried all these kernels and this one is most reasonable one
Spm_Smoothing.inputs.fwhm = [5.75, 5.75, 8]
#Spm_Smoothing.iterables = ('fwhm', [[5,5,8],[5.75,5.75,8],[5.75,5.75,10], [5.75,5.75,16]])

#-----------------------------------------------------------------------------------------------------
#Getting median intensity
Median_Intensity = Node(fsl.ImageStats(), name='Median_Intensity')
#Put -k before -p 50
Exemplo n.º 29
0
def create_tbss_non_FA(name='tbss_non_FA', output_file=None):
    """
    A pipeline that implement tbss_non_FA in FSL

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss_MD = tbss.create_tbss_non_FA()
    >>> tbss_MD.inputs.inputnode.file_list = []
    >>> tbss_MD.inputs.inputnode.field_list = []
    >>> tbss_MD.inputs.inputnode.skeleton_thresh = 0.2
    >>> tbss_MD.inputs.inputnode.groupmask = './xxx'
    >>> tbss_MD.inputs.inputnode.meanfa_file = './xxx'
    >>> tbss_MD.inputs.inputnode.distance_map = []
    >>> tbss_MD.inputs.inputnode.all_FA_file = './xxx'

    Inputs::

        inputnode.file_list
        inputnode.field_list
        inputnode.skeleton_thresh
        inputnode.groupmask
        inputnode.meanfa_file
        inputnode.distance_map
        inputnode.all_FA_file

    Outputs::

        outputnode.projected_nonFA_file

    """

    # Define the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'file_list', 'field_list', 'skeleton_thresh', 'groupmask',
        'meanfa_file', 'distance_map', 'all_FA_file'
    ]),
                        name='inputnode')

    # Apply the warpfield to the non FA image
    applywarp = pe.MapNode(interface=fsl.ApplyWarp(),
                           iterfield=['in_file', 'field_file'],
                           name="applywarp")
    if fsl.no_fsl():
        warn('NO FSL found')
    else:
        applywarp.inputs.ref_file = fsl.Info.standard_image(
            "FMRIB58_FA_1mm.nii.gz")
    # Merge the non FA files into a 4D file
    merge = pe.Node(  # fsl.Merge(dimension="t"),
        interface=Function(
            input_names=['in_files', 'dimension', 'tr', 'merged_file'],
            output_names=['merged_file'],
            function=merge_volumes_4d),
        name="merge")

    # merged_file="all_FA.nii.gz"
    maskgroup = pe.Node(fsl.ImageMaths(op_string="-mas", suffix="_masked"),
                        name="maskgroup")

    gunzip = Node(Gunzip(), name="gunzip")

    projectfa = pe.Node(fsl.TractSkeleton(project_data=True,
                                          projected_data=output_file,
                                          use_cingulum_mask=True),
                        name="projectfa")

    tbss_non_FA = pe.Workflow(name=name)
    tbss_non_FA.connect([
        (inputnode, applywarp, [
            ('file_list', 'in_file'),
            ('field_list', 'field_file'),
        ]),
        (applywarp, merge, [("out_file", "in_files")]),
        (merge, maskgroup, [("merged_file", "in_file")]),
        (inputnode, maskgroup, [('groupmask', 'in_file2')]),
        (maskgroup, gunzip, [('out_file', 'in_file')]),
        (gunzip, projectfa, [('out_file', 'alt_data_file')]),
        (inputnode, projectfa, [('skeleton_thresh', 'threshold'),
                                ("meanfa_file", "in_file"),
                                ("distance_map", "distance_map"),
                                ("all_FA_file", 'data_file')]),
    ])

    # Define the outputnode
    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=['projected_nonFA_file']),
        name='outputnode')
    tbss_non_FA.connect([
        (projectfa, outputnode, [
            ('projected_data', 'projected_nonFA_file'),
        ]),
    ])
    return tbss_non_FA
Exemplo n.º 30
0
datasource.inputs.template = '%s%s%s'
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True
datasource.iterables = [("atlas_id", atlassubjects)]


def repeat_elements(ref):
    import itertools
    ref_list = itertools.repeat(ref, 3)
    #ref_list=np.repeat(ref,3)
    return list(ref_list)


collect_inputs = Node(interface=util.Merge(3), name='collect_inputs')
collect_ref = Node(Function(input_names=["ref"],
                            output_names=["ref_list"],
                            function=repeat_elements),
                   name='collect_ref')

##do quick registration between target image and atlas-transformed images
quickreg = create_quick_registration()

#combine warps and propagate the labels from the atlas images to the target image
#The label propagation is then performed by using the composition of the stored forward transforms of each atlas image (Aaffi and Adefi)
#and the inverses of the correction transform for that atlas (Cdefi) and the inverse of the initial transforms for the subject image (Saff and Sdef),
warp = create_warp_transform()

#sink the data
sink = Node(nio.DataSink(parameterization=True,
                         base_directory=out_dir,
                         substitutions=[('_pair_', ''),
Exemplo n.º 31
0
def create_dust_cleanup_workflow(workflowFileName, onlyT1, master_config):
    """
    This function...

    :param workflowFileName:
    :param onlyT1:
    :param master_config:
    :return:
    """
    # if onlyT1:
    #    n_modality = 1
    # else:
    #    n_modality = 2
    # CLUSTER_QUEUE = master_config['queue']
    # CLUSTER_QUEUE_LONG = master_config['long_q']

    dustCleanupWF = pe.Workflow(name=workflowFileName)

    inputsSpec = pe.Node(
        interface=IdentityInterface(
            fields=[
                "subj_t1_image",  # Input T1 image
                "subj_t2_image",  # Input T2 image
                "subj_label_atlas",  # Input label atlas image
            ]
        ),
        run_without_submitting=True,
        name="inputspec",
    )

    outputsSpec = pe.Node(
        interface=IdentityInterface(
            fields=["JointFusion_HDAtlas20_2015_dustCleaned_label"]
        ),
        run_without_submitting=True,
        name="outputspec",
    )

    """
    Multimodal atlas dust cleanup if T2 exists to clean 'suspicious' dust. This stage builds islands
    using four-neighbor connectivity (useFullyConnected = False), has a max island count of 6 (instead
    of 5 as in the next stage), forces labels to change for these islands, does NOT dilate the label
    mask in order to clean all dust particles even clusters, and only suspicious (999) is cleaned.
    """
    sessionRunDustCleanupOnSuspicious = pe.Node(
        Function(
            function=run_automatic_cleanup_script,
            input_names=[
                "inFN1",
                "inFN2",
                "inAtlas",
                "outAtlas",
                "maxIslandCount",
                "useFullyConnected",
                "forceLabelChange",
                "noDilation",
                "includeList",
                "excludeList",
            ],
            output_names=["cleanedLabelImage"],
        ),
        run_without_submitting=True,
        name="sessionRunDustCleanupOnSuspicious",
    )
    dustCleanupWF.connect(
        inputsSpec, "subj_t1_image", sessionRunDustCleanupOnSuspicious, "inFN1"
    )
    if not onlyT1:
        dustCleanupWF.connect(
            inputsSpec, "subj_t2_image", sessionRunDustCleanupOnSuspicious, "inFN2"
        )
    else:
        pass

    dustCleanupWF.connect(
        inputsSpec, "subj_label_atlas", sessionRunDustCleanupOnSuspicious, "inAtlas"
    )
    sessionRunDustCleanupOnSuspicious.inputs.outAtlas = (
        "JointFusion_HDAtlas20_2015_dustCleaned_label_suspicious_maxIsland6.nii.gz"
    )
    sessionRunDustCleanupOnSuspicious.inputs.maxIslandCount = 6
    sessionRunDustCleanupOnSuspicious.inputs.useFullyConnected = False
    sessionRunDustCleanupOnSuspicious.inputs.forceLabelChange = True
    sessionRunDustCleanupOnSuspicious.inputs.noDilation = True
    sessionRunDustCleanupOnSuspicious.inputs.includeList = "999"
    sessionRunDustCleanupOnSuspicious.inputs.excludeList = None

    """
    Multimodal atlas dust cleanup if T2 exists to clean most labels after suspicious has been cleaned.
    Labels excluded from this cleaning stage may have viable isolated islands that should not be changed.
    This stage builds islands using eight-neighbor connectivity (useFullyConnected = True), has a max
    island count of 5 (instead of 6 as in the previous stage), forces labels to change for these islands,
    does dilate the label mask to avoid cleaning clustered dust, and several labels are excluded.
    """
    sessionRunDustCleanup = pe.Node(
        Function(
            function=run_automatic_cleanup_script,
            input_names=[
                "inFN1",
                "inFN2",
                "inAtlas",
                "outAtlas",
                "maxIslandCount",
                "useFullyConnected",
                "forceLabelChange",
                "noDilation",
                "includeList",
                "excludeList",
            ],
            output_names=["cleanedLabelImage"],
        ),
        run_without_submitting=True,
        name="sessionRunDustCleanup",
    )
    dustCleanupWF.connect(inputsSpec, "subj_t1_image", sessionRunDustCleanup, "inFN1")
    if not onlyT1:
        dustCleanupWF.connect(
            inputsSpec, "subj_t2_image", sessionRunDustCleanup, "inFN2"
        )
    else:
        pass

    dustCleanupWF.connect(
        sessionRunDustCleanupOnSuspicious,
        "cleanedLabelImage",
        sessionRunDustCleanup,
        "inAtlas",
    )
    sessionRunDustCleanup.inputs.outAtlas = (
        "JointFusion_HDAtlas20_2015_dustCleaned_label.nii.gz"
    )
    sessionRunDustCleanup.inputs.maxIslandCount = 5
    sessionRunDustCleanup.inputs.useFullyConnected = True
    sessionRunDustCleanup.inputs.forceLabelChange = True
    sessionRunDustCleanup.inputs.noDilation = False
    sessionRunDustCleanup.inputs.includeList = None
    sessionRunDustCleanup.inputs.excludeList = (
        "4,5,14,15,21,24,31,43,44,63,72,85,98,128,219,15000"
    )

    dustCleanupWF.connect(
        sessionRunDustCleanup,
        "cleanedLabelImage",
        outputsSpec,
        "JointFusion_HDAtlas20_2015_dustCleaned_label",
    )

    return dustCleanupWF
Exemplo n.º 32
0
def create_seg_preproc(use_ants, wf_name='seg_preproc'):
    """
    Segment the subject's anatomical brain into cerebral spinal fluids, white matter and gray matter
    and binarize them.

    Parameters
    ----------

    wf_name : string
        name of the workflow

    Returns
    -------

    seg_preproc : workflow

        Workflow Object for Segmentation Workflow
    

    Notes
    -----

    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/seg_preproc/seg_preproc.py>`_ 

    Workflow Inputs: ::
        inputspec.brain : string (existing nifti file)
            Anatomical image(without skull)
    
        inputspec.standard2highres_mat : string (existing affine transformation .mat file)
            File for transformation from mni space to anatomical space
    
        inputspec.PRIOR_CSF : string (existing nifti file)
            FSL Standard CSF Tissue prior image , binarized with threshold of 0.4 
    
        inputspec.PRIOR_GRAY : string (existing nifti file)
            FSL Standard GRAY Matter Tissue prior image , binarized with threshold of 0.66
    
        inputspec.PRIOR_WHITE : string (existing nifti file)
            FSL Standard White Matter Tissue prior image , binarized with threshold of 0.2
        
    Workflow Outputs: ::

        outputspec.csf_mni2t1 : string (nifti file)
            outputs CSF prior template(in MNI space) registered to anatomical space
        
        outputspec.gm_mni2t1 : string (nifti file)
            outputs gray matter prior template registered to anatomical space
    
        outputspec.gm_mask : string (nifti file)
            outputs image after masking gm_combo with gm prior in t1 space
    
        outputspec.wm_mni2t1 : string (nifti file)
            outputs White Matter prior template(in MNI space) registered to anatomical space
    
        outputspec.wm_mask : string (nifti file)
            outputs image after masking wm_combo with white matter(wm) prior in t1 space
    
        outputspec.probability_maps : string (nifti file)
            outputs individual probability maps (output from brain segmentation using FAST)
    
        outputspec.mixeltype : string (nifti file)
            outputs mixeltype volume file _mixeltype (output from brain segmentation using FAST)
    
        outputspec.partial_volume_map : string (nifti file)
            outputs partial volume file _pveseg (output from brain segmentation using FAST)
    
        outputspec.partial_volume_files : string (nifti file)
            outputs partial volume estimate files _pve_ (output from brain segmentation using FAST)


    Order of commands:

    - Segment the Anatomical brain. For details see `fast <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST>`_::

        fast
        -t 1
        -g
        -p
        -o segment
        mprage_brain.nii.gz
    
    - Register CSF template in MNI space to t1 space. For details see `flirt <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FLIRT>`_::
    
        flirt
        -in PRIOR_CSF
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres_inv.mat
        -out csf_mni2t1

    - Threshold and binarize CSF probability map ::

        fslmaths
        csf_combo.nii.gz
        -thr 0.4
        -bin csf_bin.nii.gz

    - Generate CSF csf_mask, by applying csf prior in t1 space to thresholded binarized csf probability map ::

        fslmaths
        csf_bin.nii.gz
        -mas csf_mni2t1
        csf_mask


    - Register WM template in MNI space to t1 space ::
        
        flirt
        -in PRIOR_WM
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres.mat
        -out wm_mni2t1

    - Threshold and binarize WM probability map ::

        fslmaths
        wm_combo.nii.gz
        -thr 0.4
        -bin wm_bin.nii.gz

    - Generate WM csf_mask, by applying wm_prior in t1 space to thresholded binarized wm probability map ::

        fslmaths
        wm_bin.nii.gz
        -mas wm_mni2t1
        wm_mask
 
    - Register GM template in MNI space to t1 space ::
    
        flirt
        -in PRIOR_GM
        -ref mprage_brain.nii.gz
        -applyxfm
        -init standard2highres.mat
        -out gm_mni2t1

    - Threshold and binarize GM probability map ::

        fslmaths
        gm_combo.nii.gz
        -thr 0.4
        -bin gm_bin.nii.gz

    - Generate GM csf_mask, by applying gm prior in t1 space to thresholded binarized gm probability map ::

        fslmaths
        gm_bin.nii.gz
        -mas gm_mni2t1
        gm_mask
    
    
    Examples
    --------
    >>> import CPAC.seg_preproc as seg_wflow
    >>> seg = seg_wflow.create_seg_preproc()
    >>> seg.inputs.inputspec.standard2highres_mat = '/home/data/Projects/C-PAC/working_directory/s1001/reg_preproc/standard2highres.mat'
    >>> seg.inputs.inputspec.PRIOR_CSF = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_csf_bin.nii.gz'
    >>> seg.inputs.inputspec.PRIOR_WHITE = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_white_bin.nii.gz'
    >>> seg.inputs.inputspec.PRIOR_GRAY = '/home/data/Projects/C-PAC/tissuepriors/2mm/avg152T1_gray_bin.nii.gz'
    >>> seg.inputs.inputspec.brain = '/home/data/Projects/C-PAC/working_directory/s1001/anat_preproc/mprage_brain.nii.gz'
    >>> seg_preproc.run() # doctest: +SKIP
    
    
    High Level Graph:
    
    .. image:: ../images/seg_preproc.dot.png
        :width: 1100
        :height: 480
        
    Detailed Graph:
    
    .. image:: ../images/seg_preproc_detailed.dot.png
        :width: 1100
        :height: 480
    """

    preproc = pe.Workflow(name=wf_name)
    inputNode = pe.Node(util.IdentityInterface(fields=[
        'brain', 'standard2highres_init', 'standard2highres_mat',
        'standard2highres_rig', 'PRIOR_CSF', 'PRIOR_GRAY', 'PRIOR_WHITE'
    ]),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'csf_mni2t1', 'csf_mask', 'gm_mni2t1', 'gm_mask', 'wm_mni2t1',
        'probability_maps', 'mixeltype', 'partial_volume_map',
        'partial_volume_files', 'wm_mask'
    ]),
                         name='outputspec')

    segment = pe.Node(interface=fsl.FAST(), name='segment', mem_gb=1.5)
    segment.inputs.img_type = 1
    segment.inputs.segments = True
    segment.inputs.probability_maps = True
    segment.inputs.out_basename = 'segment'

    check_wm = pe.Node(name='check_wm',
                       interface=Function(function=check_if_file_is_empty,
                                          input_names=['in_file'],
                                          output_names=['out_file']))
    check_gm = pe.Node(name='check_gm',
                       interface=Function(function=check_if_file_is_empty,
                                          input_names=['in_file'],
                                          output_names=['out_file']))
    check_csf = pe.Node(name='check_csf',
                        interface=Function(function=check_if_file_is_empty,
                                           input_names=['in_file'],
                                           output_names=['out_file']))

    #connections
    preproc.connect(inputNode, 'brain', segment, 'in_files')

    preproc.connect(segment, 'probability_maps', outputNode,
                    'probability_maps')
    preproc.connect(segment, 'mixeltype', outputNode, 'mixeltype')
    preproc.connect(segment, 'partial_volume_files', outputNode,
                    'partial_volume_files')
    preproc.connect(segment, 'partial_volume_map', outputNode,
                    'partial_volume_map')

    ##get binarize thresholded csf mask
    process_csf = process_segment_map('CSF', use_ants)

    if use_ants == True:
        preproc.connect(inputNode, 'standard2highres_init', process_csf,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_csf,
                        'inputspec.standard2highres_rig')

    preproc.connect(
        inputNode,
        'brain',
        process_csf,
        'inputspec.brain',
    )
    preproc.connect(inputNode, 'PRIOR_CSF', process_csf,
                    'inputspec.tissue_prior')

    #tissue_class_files = binary segmented volume file one val for each class
    preproc.connect(segment, ('tissue_class_files', pick_wm_0), process_csf,
                    'inputspec.probability_map')

    preproc.connect(inputNode, 'standard2highres_mat', process_csf,
                    'inputspec.standard2highres_mat')

    preproc.connect(process_csf, 'outputspec.segment_mask', outputNode,
                    'csf_mask')

    #get binarize thresholded wm mask
    process_wm = process_segment_map('WM', use_ants)

    if use_ants == True:
        preproc.connect(inputNode, 'standard2highres_init', process_wm,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_wm,
                        'inputspec.standard2highres_rig')

    preproc.connect(
        inputNode,
        'brain',
        process_wm,
        'inputspec.brain',
    )
    preproc.connect(inputNode, 'PRIOR_WHITE', process_wm,
                    'inputspec.tissue_prior')
    preproc.connect(segment, ('tissue_class_files', pick_wm_2), process_wm,
                    'inputspec.probability_map')

    preproc.connect(inputNode, 'standard2highres_mat', process_wm,
                    'inputspec.standard2highres_mat')

    preproc.connect(process_wm, 'outputspec.tissueprior_mni2t1', outputNode,
                    'wm_mni2t1')
    preproc.connect(process_wm, 'outputspec.segment_mask', outputNode,
                    'wm_mask')

    # get binarize thresholded gm mask
    process_gm = process_segment_map('GM', use_ants)

    if use_ants == True:
        preproc.connect(inputNode, 'standard2highres_init', process_gm,
                        'inputspec.standard2highres_init')
        preproc.connect(inputNode, 'standard2highres_rig', process_gm,
                        'inputspec.standard2highres_rig')

    preproc.connect(
        inputNode,
        'brain',
        process_gm,
        'inputspec.brain',
    )
    preproc.connect(inputNode, 'PRIOR_GRAY', process_gm,
                    'inputspec.tissue_prior')
    preproc.connect(segment, ('tissue_class_files', pick_wm_1), process_gm,
                    'inputspec.probability_map')
    preproc.connect(inputNode, 'standard2highres_mat', process_gm,
                    'inputspec.standard2highres_mat')

    preproc.connect(process_gm, 'outputspec.tissueprior_mni2t1', outputNode,
                    'gm_mni2t1')
    preproc.connect(process_gm, 'outputspec.segment_mask', outputNode,
                    'gm_mask')

    return preproc