Esempio n. 1
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    r"""
    Prepare GIFTI surfaces from a FreeSurfer subjects directory.

    If midthickness (or graymid) surfaces do not exist, they are generated and
    saved to the subject directory as ``lh/rh.midthickness``.
    These, along with the gray/white matter boundary (``lh/rh.smoothwm``), pial
    sufaces (``lh/rh.pial``) and inflated surfaces (``lh/rh.inflated``) are
    converted to GIFTI files.
    Additionally, the vertex coordinates are :py:class:`recentered
    <smriprep.interfaces.NormalizeSurf>` to align with native T1w space.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.surfaces import init_gifti_surface_wf
            wf = init_gifti_surface_wf()

    Inputs
    ------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    fsnative2t1w_xfm
        LTA formatted affine transform file (inverse)

    Outputs
    -------
    surfaces
        GIFTI surfaces for gray/white matter boundary, pial surface,
        midthickness (or graymid) surface, and inflated surfaces

    """
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        ['subjects_dir', 'subject_id', 'fsnative2t1w_xfm']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']),
                         name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(MakeMidthickness(thickness=True,
                                               distance=0.5,
                                               out_name='midthickness'),
                              iterfield='in_file',
                              name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list',
                           run_without_submitting=True)
    fs2gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                        iterfield='in_file',
                        name='fs2gii')
    fix_surfs = pe.MapNode(NormalizeSurf(),
                           iterfield='in_file',
                           name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'), ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs2gii, [('out', 'in_file')]),
        (fs2gii, fix_surfs, [('converted', 'in_file')]),
        (inputnode, fix_surfs, [('fsnative2t1w_xfm', 'transform_file')]),
        (fix_surfs, outputnode, [('out_file', 'surfaces')]),
    ])
    return workflow
Esempio n. 2
0
def create_tbss_1_preproc(name='tbss_1_preproc'):
    """Preprocess FA data for TBSS: erodes a little and zero end slicers and
    creates masks(for use in FLIRT & FNIRT from FSL).
    A pipeline that does the same as tbss_1_preproc script in FSL

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss1 = tbss.create_tbss_1_preproc()
    >>> tbss1.inputs.inputnode.fa_list = ['s1_FA.nii', 's2_FA.nii', 's3_FA.nii']

    Inputs::

        inputnode.fa_list

    Outputs::

        outputnode.fa_list
        outputnode.mask_list
        outputnode.slices

    """

    # Define the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list"]),
                        name="inputnode")

    # Prep the FA images
    prepfa = pe.MapNode(fsl.ImageMaths(suffix="_prep"),
                                    name="prepfa",
                                    iterfield=['in_file', 'op_string'])

    # Slicer
    slicer = pe.MapNode(fsl.Slicer(all_axial=True, image_width=1280),
                        name='slicer',
                        iterfield=['in_file'])

    # Create a mask
    getmask1 = pe.MapNode(fsl.ImageMaths(op_string="-bin", suffix="_mask"),
                        name="getmask1",
                        iterfield=['in_file'])
    getmask2 = pe.MapNode(fsl.MultiImageMaths(op_string="-dilD -dilD -sub 1 -abs -add %s"),
                        name="getmask2",
                        iterfield=['in_file', 'operand_files'])

#    $FSLDIR/bin/fslmaths FA/${f}_FA_mask -dilD -dilD -sub 1 -abs -add FA/${f}_FA_mask FA/${f}_FA_mask -odt char
    # Define the tbss1 workflow
    tbss1 = pe.Workflow(name=name)
    tbss1.connect([
        (inputnode, prepfa, [("fa_list", "in_file")]),
        (inputnode, prepfa, [(("fa_list", tbss1_op_string), "op_string")]),
        (prepfa, getmask1, [("out_file", "in_file")]),
        (getmask1, getmask2, [("out_file", "in_file"),
                              ("out_file", "operand_files")]),
        (prepfa, slicer, [('out_file', 'in_file')]),
        ])

    # Define the outputnode
    outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list",
                                                                "mask_list",
                                                                "slices"]),
                        name="outputnode")
    tbss1.connect([
                (prepfa, outputnode, [("out_file", "fa_list")]),
                (getmask2, outputnode, [("out_file", "mask_list")]),
                (slicer, outputnode, [('out_file', 'slices')])
                ])
    return tbss1
Esempio n. 3
0
def create_tbss_2_reg(name="tbss_2_reg"):
    """TBSS nonlinear registration:
    A pipeline that does the same as 'tbss_2_reg -t' script in FSL. '-n' option
    is not supported at the moment.

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss2 = create_tbss_2_reg(name="tbss2")
    >>> tbss2.inputs.inputnode.target = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz")  # doctest: +SKIP
    >>> tbss2.inputs.inputnode.fa_list = ['s1_FA.nii', 's2_FA.nii', 's3_FA.nii']
    >>> tbss2.inputs.inputnode.mask_list = ['s1_mask.nii', 's2_mask.nii', 's3_mask.nii']

    Inputs::

        inputnode.fa_list
        inputnode.mask_list
        inputnode.target

    Outputs::

        outputnode.field_list

    """

    # Define the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list",
                                                                   "mask_list",
                                                                   "target"]),
                        name="inputnode")

    # Flirt the FA image to the target
    flirt = pe.MapNode(interface=fsl.FLIRT(dof=12),
                    iterfield=['in_file', 'in_weight'],
                    name="flirt")

    fnirt = pe.MapNode(interface=fsl.FNIRT(fieldcoeff_file=True),
                       iterfield=['in_file', 'inmask_file', 'affine_file'],
                       name="fnirt")
    # Fnirt the FA image to the target
    if fsl.no_fsl():
        warn('NO FSL found')
    else:
        config_file = os.path.join(os.environ["FSLDIR"],
                                    "etc/flirtsch/FA_2_FMRIB58_1mm.cnf")
        fnirt.inputs.config_file=config_file

    # Define the registration workflow
    tbss2 = pe.Workflow(name=name)

    # Connect up the registration workflow
    tbss2.connect([
        (inputnode, flirt, [("fa_list", "in_file"),
                         ("target", "reference"),
                         ("mask_list", "in_weight")]),
        (inputnode, fnirt, [("fa_list", "in_file"),
                         ("mask_list", "inmask_file"),
                         ("target", "ref_file")]),
        (flirt, fnirt, [("out_matrix_file", "affine_file")]),
        ])

    # Define the outputnode
    outputnode = pe.Node(interface=util.IdentityInterface(fields=['field_list']),
                         name="outputnode")

    tbss2.connect([
        (fnirt, outputnode, [('fieldcoeff_file', 'field_list')])
        ])
    return tbss2
Esempio n. 4
0
def generate_single_session_template_WF(projectid,
                                        subjectid,
                                        sessionid,
                                        onlyT1,
                                        master_config,
                                        phase,
                                        interpMode,
                                        pipeline_name,
                                        doDenoise=True):
    """
    Run autoworkup on a single sessionid

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """

    #if  not 'landmark' in master_config['components'] or not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']:
    #    print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!"
    #    raise NotImplementedError
    # master_config['components'].append('auxlmk')
    # master_config['components'].append('tissue_classify')

    assert phase in [
        'atlas-based-reference', 'subject-based-reference'
    ], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'"

    if 'tissue_classify' in master_config['components']:
        assert ('landmark' in master_config['components']
                ), "tissue_classify Requires landmark step!"
    if 'landmark' in master_config['components']:
        assert 'denoise' in master_config[
            'components'], "landmark Requires denoise step!"

    from workflows.atlasNode import MakeAtlasNode

    baw201 = pe.Workflow(name=pipeline_name)

    inputsSpec = pe.Node(interface=IdentityInterface(fields=[
        'atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel',
        'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s',
        'PDs', 'FLs', 'OTHERs', 'hncma_atlas', 'template_rightHemisphere',
        'template_leftHemisphere', 'template_WMPM2_labels',
        'template_nac_labels', 'template_ventricles'
    ]),
                         run_without_submitting=True,
                         name='inputspec')

    outputsSpec = pe.Node(
        interface=IdentityInterface(fields=[
            't1_average',
            't2_average',
            'pd_average',
            'fl_average',
            'posteriorImages',
            'outputLabels',
            'outputHeadLabels',
            'atlasToSubjectTransform',
            'atlasToSubjectInverseTransform',
            'atlasToSubjectRegistrationState',
            'BCD_ACPC_T1_CROPPED',
            'outputLandmarksInACPCAlignedSpace',
            'outputLandmarksInInputSpace',
            'output_tx',
            'LMIatlasToSubject_tx',
            'writeBranded2DImage',
            'brainStemMask',
            'UpdatedPosteriorsList'  # Longitudinal
        ]),
        run_without_submitting=True,
        name='outputspec')

    dsName = "{0}_ds_{1}".format(phase, sessionid)
    DataSink = pe.Node(name=dsName, interface=nio.DataSink())
    DataSink.overwrite = master_config['ds_overwrite']
    DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid,
                                                     sessionid)
    DataSink.inputs.base_directory = master_config['resultdir']

    atlas_static_directory = master_config['atlascache']
    if master_config['workflow_phase'] == 'atlas-based-reference':
        atlas_warped_directory = master_config['atlascache']
        atlasABCNode_XML = MakeAtlasNode(atlas_warped_directory,
                                         'BABCXMLAtlas_{0}'.format(sessionid),
                                         ['W_BRAINSABCSupport'])
        baw201.connect(atlasABCNode_XML, 'ExtendedAtlasDefinition_xml',
                       inputsSpec, 'atlasDefinition')

        atlasABCNode_W = MakeAtlasNode(
            atlas_warped_directory, 'BABCAtlas_W{0}'.format(sessionid),
            ['W_BRAINSABCSupport', 'W_LabelMapsSupport'])
        baw201.connect([(atlasABCNode_W, inputsSpec, [
            ('hncma_atlas', 'hncma_atlas'),
            ('template_leftHemisphere', 'template_leftHemisphere'),
            ('template_rightHemisphere', 'template_rightHemisphere'),
            ('template_WMPM2_labels', 'template_WMPM2_labels'),
            ('template_nac_labels', 'template_nac_labels'),
            ('template_ventricles', 'template_ventricles')
        ])])
        ## These landmarks are only relevant for the atlas-based-reference case
        atlasBCDNode_W = MakeAtlasNode(atlas_warped_directory,
                                       'BBCDAtlas_W{0}'.format(sessionid),
                                       ['W_BCDSupport'])
        baw201.connect([
            (atlasBCDNode_W, inputsSpec, [
                ('template_t1', 'template_t1'),
                ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'),
            ]),
        ])
        ## Needed for both segmentation and template building prep
        atlasBCUTNode_W = MakeAtlasNode(atlas_warped_directory,
                                        'BBCUTAtlas_W{0}'.format(sessionid),
                                        ['W_BRAINSCutSupport'])

    elif master_config['workflow_phase'] == 'subject-based-reference':
        print master_config['previousresult']
        atlas_warped_directory = os.path.join(master_config['previousresult'],
                                              subjectid, 'Atlas')

        template_DG = pe.Node(interface=nio.DataGrabber(
            infields=['subject'],
            outfields=[
                'outAtlasXMLFullPath', 'hncma_atlas',
                'template_leftHemisphere', 'template_rightHemisphere',
                'template_WMPM2_labels', 'template_nac_labels',
                'template_ventricles', 'template_t1',
                'template_landmarks_50Lmks_fcsv'
            ]),
                              name='Template_DG')
        template_DG.inputs.base_directory = master_config['previousresult']
        template_DG.inputs.subject = subjectid
        template_DG.inputs.field_template = {
            'outAtlasXMLFullPath': '%s/Atlas/AtlasDefinition_%s.xml',
            'hncma_atlas': '%s/Atlas/AVG_hncma_atlas.nii.gz',
            'template_leftHemisphere':
            '%s/Atlas/AVG_template_leftHemisphere.nii.gz',
            'template_rightHemisphere':
            '%s/Atlas/AVG_template_rightHemisphere.nii.gz',
            'template_WMPM2_labels':
            '%s/Atlas/AVG_template_WMPM2_labels.nii.gz',
            'template_nac_labels': '%s/Atlas/AVG_template_nac_labels.nii.gz',
            'template_ventricles': '%s/Atlas/AVG_template_ventricles.nii.gz',
            'template_t1': '%s/Atlas/AVG_T1.nii.gz',
            'template_landmarks_50Lmks_fcsv': '%s/Atlas/AVG_LMKS.fcsv',
        }
        template_DG.inputs.template_args = {
            'outAtlasXMLFullPath': [['subject', 'subject']],
            'hncma_atlas': [['subject']],
            'template_leftHemisphere': [['subject']],
            'template_rightHemisphere': [['subject']],
            'template_WMPM2_labels': [['subject']],
            'template_nac_labels': [['subject']],
            'template_ventricles': [['subject']],
            'template_t1': [['subject']],
            'template_landmarks_50Lmks_fcsv': [['subject']]
        }
        template_DG.inputs.template = '*'
        template_DG.inputs.sort_filelist = True
        template_DG.inputs.raise_on_empty = True

        baw201.connect(template_DG, 'outAtlasXMLFullPath', inputsSpec,
                       'atlasDefinition')
        baw201.connect([(
            template_DG,
            inputsSpec,
            [
                ## Already connected ('template_t1','template_t1'),
                ('hncma_atlas', 'hncma_atlas'),
                ('template_leftHemisphere', 'template_leftHemisphere'),
                ('template_rightHemisphere', 'template_rightHemisphere'),
                ('template_WMPM2_labels', 'template_WMPM2_labels'),
                ('template_nac_labels', 'template_nac_labels'),
                ('template_ventricles', 'template_ventricles')
            ])])
        ## These landmarks are only relevant for the atlas-based-reference case
        baw201.connect([
            (template_DG, inputsSpec, [
                ('template_t1', 'template_t1'),
                ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'),
            ]),
        ])

    else:
        assert 0 == 1, "Invalid workflow type specified for singleSession"

    atlasBCDNode_S = MakeAtlasNode(atlas_static_directory,
                                   'BBCDAtlas_S{0}'.format(sessionid),
                                   ['S_BCDSupport'])
    baw201.connect([
        (atlasBCDNode_S, inputsSpec,
         [('template_weights_50Lmks_wts', 'atlasWeightFilename'),
          ('LLSModel_50Lmks_h5', 'LLSModel'),
          ('T1_50Lmks_mdl', 'inputTemplateModel')]),
    ])

    if doDenoise:
        print("\ndenoise image filter\n")
        makeDenoiseInImageList = pe.Node(Function(
            function=MakeOutFileList,
            input_names=[
                'T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix',
                'PrimaryT1'
            ],
            output_names=['inImageList', 'outImageList', 'imageTypeList']),
                                         run_without_submitting=True,
                                         name="99_makeDenoiseInImageList")
        baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List')
        baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List')
        baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList')
        makeDenoiseInImageList.inputs.FLList = []  # an emptyList HACK
        makeDenoiseInImageList.inputs.PrimaryT1 = None  # an emptyList HACK
        makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz"
        # HACK baw201.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' )
        baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList,
                       'OtherList')

        print("\nDenoise:\n")
        DenoiseInputImgs = pe.MapNode(
            interface=UnbiasedNonLocalMeans(),
            name='denoiseInputImgs',
            iterfield=['inputVolume', 'outputVolume'])
        DenoiseInputImgs.inputs.rc = [1, 1, 1]
        DenoiseInputImgs.inputs.rs = [4, 4, 4]
        DenoiseInputImgs.plugin_args = {
            'qsub_args': modify_qsub_args(master_config['queue'], .2, 1, 1),
            'overwrite': True
        }
        baw201.connect([(makeDenoiseInImageList, DenoiseInputImgs,
                         [('inImageList', 'inputVolume')]),
                        (makeDenoiseInImageList, DenoiseInputImgs,
                         [('outImageList', 'outputVolume')])])
        print("\nMerge all T1 and T2 List\n")
        makePreprocessingOutList = pe.Node(Function(
            function=GenerateSeparateImageTypeList,
            input_names=['inFileList', 'inTypeList'],
            output_names=['T1s', 'T2s', 'PDs', 'FLs', 'OtherList']),
                                           run_without_submitting=True,
                                           name="99_makePreprocessingOutList")
        baw201.connect(DenoiseInputImgs, 'outputVolume',
                       makePreprocessingOutList, 'inFileList')
        baw201.connect(makeDenoiseInImageList, 'imageTypeList',
                       makePreprocessingOutList, 'inTypeList')

    else:
        makePreprocessingOutList = inputsSpec

    if 'landmark' in master_config['components']:
        DoReverseMapping = False  # Set to true for debugging outputs
        if 'auxlmk' in master_config['components']:
            DoReverseMapping = True
        myLocalLMIWF = CreateLandmarkInitializeWorkflow(
            "LandmarkInitialize", interpMode, DoReverseMapping)

        baw201.connect([
            (makePreprocessingOutList, myLocalLMIWF,
             [(('T1s', get_list_element, 0), 'inputspec.inputVolume')]),
            (inputsSpec, myLocalLMIWF,
             [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'),
              ('atlasWeightFilename', 'inputspec.atlasWeightFilename'),
              ('LLSModel', 'inputspec.LLSModel'),
              ('inputTemplateModel', 'inputspec.inputTemplateModel'),
              ('template_t1', 'inputspec.atlasVolume')]),
            (myLocalLMIWF, outputsSpec,
             [('outputspec.outputResampledCroppedVolume',
               'BCD_ACPC_T1_CROPPED'),
              ('outputspec.outputLandmarksInACPCAlignedSpace',
               'outputLandmarksInACPCAlignedSpace'),
              ('outputspec.outputLandmarksInInputSpace',
               'outputLandmarksInInputSpace'),
              ('outputspec.outputTransform', 'output_tx'),
              ('outputspec.atlasToSubjectTransform', 'LMIatlasToSubject_tx'),
              ('outputspec.writeBranded2DImage', 'writeBranded2DImage')])
        ])
        baw201.connect([(
            outputsSpec,
            DataSink,  # TODO: change to myLocalLMIWF -> DataSink
            [
                ('outputLandmarksInACPCAlignedSpace',
                 'ACPCAlign.@outputLandmarks_ACPC'),
                ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'),
                ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'),
                ('outputLandmarksInInputSpace',
                 'ACPCAlign.@outputLandmarks_Input'),
                ('output_tx', 'ACPCAlign.@output_tx'),
                ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'),
            ])])

    if 'tissue_classify' in master_config['components']:
        useRegistrationMask = master_config['use_registration_masking']

        myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify",
                                                   master_config, interpMode,
                                                   useRegistrationMask)
        baw201.connect([
            (makePreprocessingOutList, myLocalTCWF, [('T1s',
                                                      'inputspec.T1List')]),
            (makePreprocessingOutList, myLocalTCWF, [('T2s',
                                                      'inputspec.T2List')]),
            (inputsSpec, myLocalTCWF,
             [('atlasDefinition', 'inputspec.atlasDefinition'),
              ('template_t1', 'inputspec.atlasVolume'),
              (('T1s', getAllT1sLength), 'inputspec.T1_count'),
              ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'),
              ('OTHERs', 'inputspec.OtherList')]),
            (myLocalLMIWF, myLocalTCWF,
             [('outputspec.outputResampledCroppedVolume',
               'inputspec.PrimaryT1'),
              ('outputspec.atlasToSubjectTransform',
               'inputspec.atlasToSubjectInitialTransform')]),
            (myLocalTCWF, outputsSpec,
             [('outputspec.t1_average', 't1_average'),
              ('outputspec.t2_average', 't2_average'),
              ('outputspec.pd_average', 'pd_average'),
              ('outputspec.fl_average', 'fl_average'),
              ('outputspec.posteriorImages', 'posteriorImages'),
              ('outputspec.outputLabels', 'outputLabels'),
              ('outputspec.outputHeadLabels', 'outputHeadLabels'),
              ('outputspec.atlasToSubjectTransform',
               'atlasToSubjectTransform'),
              ('outputspec.atlasToSubjectInverseTransform',
               'atlasToSubjectInverseTransform'),
              ('outputspec.atlasToSubjectRegistrationState',
               'atlasToSubjectRegistrationState')]),
        ])

        baw201.connect([(
            outputsSpec,
            DataSink,  # TODO: change to myLocalTCWF -> DataSink
            [(('t1_average', convertToList), 'TissueClassify.@t1'),
             (('t2_average', convertToList), 'TissueClassify.@t2'),
             (('pd_average', convertToList), 'TissueClassify.@pd'),
             (('fl_average', convertToList), 'TissueClassify.@fl')])])

        currentFixWMPartitioningName = "_".join(
            ['FixWMPartitioning',
             str(subjectid),
             str(sessionid)])
        FixWMNode = pe.Node(interface=Function(
            function=FixWMPartitioning,
            input_names=['brainMask', 'PosteriorsList'],
            output_names=[
                'UpdatedPosteriorsList', 'MatchingFGCodeList',
                'MatchingLabelList', 'nonAirRegionMask'
            ]),
                            name=currentFixWMPartitioningName)

        baw201.connect([
            (myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'),
                                      (('outputspec.posteriorImages',
                                        flattenDict), 'PosteriorsList')]),
            (FixWMNode, outputsSpec, [('UpdatedPosteriorsList',
                                       'UpdatedPosteriorsList')]),
        ])

        currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(
            subjectid) + "_" + str(sessionid)
        BRAINSCreateLabelMapNode = pe.Node(
            interface=BRAINSCreateLabelMapFromProbabilityMaps(),
            name=currentBRAINSCreateLabelMapName)

        ## TODO:  Fix the file names
        BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz'
        BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz'

        baw201.connect([
            (FixWMNode, BRAINSCreateLabelMapNode,
             [('UpdatedPosteriorsList', 'inputProbabilityVolume'),
              ('MatchingFGCodeList', 'foregroundPriors'),
              ('MatchingLabelList', 'priorLabelCodes'),
              ('nonAirRegionMask', 'nonAirRegionMask')]),
            (
                BRAINSCreateLabelMapNode,
                DataSink,
                [  # brainstem code below replaces this ('cleanLabelVolume', 'TissueClassify.@outputLabels'),
                    ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')
                ]),
            (myLocalTCWF, DataSink,
             [('outputspec.atlasToSubjectTransform',
               'TissueClassify.@atlas2session_tx'),
              ('outputspec.atlasToSubjectInverseTransform',
               'TissueClassify.@atlas2sessionInverse_tx')]),
            (FixWMNode, DataSink, [('UpdatedPosteriorsList',
                                    'TissueClassify.@posteriors')]),
        ])

        currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(
            subjectid) + "_" + str(sessionid)
        AccumulateLikeTissuePosteriorsNode = pe.Node(
            interface=Function(
                function=AccumulateLikeTissuePosteriors,
                input_names=['posteriorImages'],
                output_names=['AccumulatePriorsList',
                              'AccumulatePriorsNames']),
            name=currentAccumulateLikeTissuePosteriorsName)

        baw201.connect([
            (FixWMNode, AccumulateLikeTissuePosteriorsNode,
             [('UpdatedPosteriorsList', 'posteriorImages')]),
            (AccumulateLikeTissuePosteriorsNode, DataSink,
             [('AccumulatePriorsList',
               'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir'
               )])
        ])
        """
        brain stem adds on feature
        inputs:
            - landmark (fcsv) file
            - fixed brainlabels seg.nii.gz
        output:
            - complete_brainlabels_seg.nii.gz Segmentation
        """
        myLocalBrainStemWF = CreateBrainstemWorkflow(
            "BrainStem", master_config['queue'],
            "complete_brainlabels_seg.nii.gz")

        baw201.connect([(myLocalLMIWF, myLocalBrainStemWF,
                         [('outputspec.outputLandmarksInACPCAlignedSpace',
                           'inputspec.inputLandmarkFilename')]),
                        (BRAINSCreateLabelMapNode, myLocalBrainStemWF,
                         [('cleanLabelVolume',
                           'inputspec.inputTissueLabelFilename')])])

        baw201.connect(myLocalBrainStemWF,
                       'outputspec.ouputTissuelLabelFilename', DataSink,
                       'TissueClassify.@complete_brainlabels_seg')

    ###########################
    do_BRAINSCut_Segmentation = DetermineIfSegmentationShouldBeDone(
        master_config)
    if do_BRAINSCut_Segmentation:
        from workflows.segmentation import segmentation
        from workflows.WorkupT1T2BRAINSCut import GenerateWFName

        sname = 'segmentation'
        segWF = segmentation(projectid,
                             subjectid,
                             sessionid,
                             master_config,
                             onlyT1,
                             pipeline_name=sname)

        baw201.connect([(inputsSpec, segWF, [('template_t1',
                                              'inputspec.template_t1')])])
        atlasBCUTNode_W = pe.Node(interface=nio.DataGrabber(
            infields=['subject'],
            outfields=[
                "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap",
                "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap",
                "l_globus_ProbabilityMap", "r_globus_ProbabilityMap",
                "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap",
                "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap",
                "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap",
                "phi", "rho", "theta"
            ]),
                                  name='PerSubject_atlasBCUTNode_W')
        atlasBCUTNode_W.inputs.base_directory = master_config['previousresult']
        atlasBCUTNode_W.inputs.subject = subjectid
        atlasBCUTNode_W.inputs.field_template = {
            'l_accumben_ProbabilityMap':
            '%s/Atlas/AVG_l_accumben_ProbabilityMap.nii.gz',
            'r_accumben_ProbabilityMap':
            '%s/Atlas/AVG_r_accumben_ProbabilityMap.nii.gz',
            'l_caudate_ProbabilityMap':
            '%s/Atlas/AVG_l_caudate_ProbabilityMap.nii.gz',
            'r_caudate_ProbabilityMap':
            '%s/Atlas/AVG_r_caudate_ProbabilityMap.nii.gz',
            'l_globus_ProbabilityMap':
            '%s/Atlas/AVG_l_globus_ProbabilityMap.nii.gz',
            'r_globus_ProbabilityMap':
            '%s/Atlas/AVG_r_globus_ProbabilityMap.nii.gz',
            'l_hippocampus_ProbabilityMap':
            '%s/Atlas/AVG_l_hippocampus_ProbabilityMap.nii.gz',
            'r_hippocampus_ProbabilityMap':
            '%s/Atlas/AVG_r_hippocampus_ProbabilityMap.nii.gz',
            'l_putamen_ProbabilityMap':
            '%s/Atlas/AVG_l_putamen_ProbabilityMap.nii.gz',
            'r_putamen_ProbabilityMap':
            '%s/Atlas/AVG_r_putamen_ProbabilityMap.nii.gz',
            'l_thalamus_ProbabilityMap':
            '%s/Atlas/AVG_l_thalamus_ProbabilityMap.nii.gz',
            'r_thalamus_ProbabilityMap':
            '%s/Atlas/AVG_r_thalamus_ProbabilityMap.nii.gz',
            'phi': '%s/Atlas/AVG_phi.nii.gz',
            'rho': '%s/Atlas/AVG_rho.nii.gz',
            'theta': '%s/Atlas/AVG_theta.nii.gz'
        }
        atlasBCUTNode_W.inputs.template_args = {
            'l_accumben_ProbabilityMap': [['subject']],
            'r_accumben_ProbabilityMap': [['subject']],
            'l_caudate_ProbabilityMap': [['subject']],
            'r_caudate_ProbabilityMap': [['subject']],
            'l_globus_ProbabilityMap': [['subject']],
            'r_globus_ProbabilityMap': [['subject']],
            'l_hippocampus_ProbabilityMap': [['subject']],
            'r_hippocampus_ProbabilityMap': [['subject']],
            'l_putamen_ProbabilityMap': [['subject']],
            'r_putamen_ProbabilityMap': [['subject']],
            'l_thalamus_ProbabilityMap': [['subject']],
            'r_thalamus_ProbabilityMap': [['subject']],
            'phi': [['subject']],
            'rho': [['subject']],
            'theta': [['subject']]
        }
        atlasBCUTNode_W.inputs.template = '*'
        atlasBCUTNode_W.inputs.sort_filelist = True
        atlasBCUTNode_W.inputs.raise_on_empty = True

        baw201.connect([(atlasBCUTNode_W, segWF, [
            ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'),
            ('theta', 'inputspec.theta'),
            ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'),
            ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'),
            ('l_hippocampus_ProbabilityMap',
             'inputspec.l_hippocampus_ProbabilityMap'),
            ('r_hippocampus_ProbabilityMap',
             'inputspec.r_hippocampus_ProbabilityMap'),
            ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'),
            ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'),
            ('l_thalamus_ProbabilityMap',
             'inputspec.l_thalamus_ProbabilityMap'),
            ('r_thalamus_ProbabilityMap',
             'inputspec.r_thalamus_ProbabilityMap'),
            ('l_accumben_ProbabilityMap',
             'inputspec.l_accumben_ProbabilityMap'),
            ('r_accumben_ProbabilityMap',
             'inputspec.r_accumben_ProbabilityMap'),
            ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'),
            ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap')
        ])])

        atlasBCUTNode_S = MakeAtlasNode(atlas_static_directory,
                                        'BBCUTAtlas_S{0}'.format(sessionid),
                                        ['S_BRAINSCutSupport'])
        baw201.connect(atlasBCUTNode_S, 'trainModelFile_txtD0060NT0060_gz',
                       segWF, 'inputspec.trainModelFile_txtD0060NT0060_gz')

        ## baw201_outputspec = baw201.get_node('outputspec')
        baw201.connect([
            (myLocalTCWF, segWF,
             [('outputspec.t1_average', 'inputspec.t1_average'),
              ('outputspec.atlasToSubjectRegistrationState',
               'inputspec.atlasToSubjectRegistrationState'),
              ('outputspec.outputLabels', 'inputspec.inputLabels'),
              ('outputspec.posteriorImages', 'inputspec.posteriorImages'),
              ('outputspec.outputHeadLabels', 'inputspec.inputHeadLabels')]),
            (myLocalLMIWF, segWF, [('outputspec.atlasToSubjectTransform',
                                    'inputspec.LMIatlasToSubject_tx')]),
            (FixWMNode, segWF, [('UpdatedPosteriorsList',
                                 'inputspec.UpdatedPosteriorsList')]),
        ])
        if not onlyT1:
            baw201.connect([(myLocalTCWF, segWF, [('outputspec.t2_average',
                                                   'inputspec.t2_average')])])

    if 'warp_atlas_to_subject' in master_config['components']:
        ##
        ##~/src/NEP-build/bin/BRAINSResample
        # --warpTransform AtlasToSubjectPreBABC_Composite.h5
        #  --inputVolume  /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/Atlas/hncma-atlas.nii.gz
        #  --referenceVolume  /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/singleSession_KID1_KT1/LandmarkInitialize/BROIAuto_cropped/Cropped_BCD_ACPC_Aligned.nii.gz
        # !--outputVolume hncma.nii.gz
        # !--interpolationMode NearestNeighbor
        # !--pixelType short
        ##
        ##

        ## TODO : SHOULD USE BRAINSCut transform that was refined even further!

        BResample = dict()
        AtlasLabelMapsToResample = [
            'hncma_atlas',
            'template_WMPM2_labels',
            'template_nac_labels',
        ]

        for atlasImage in AtlasLabelMapsToResample:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'short'
            BResample[atlasImage].inputs.interpolationMode = 'NearestNeighbor'
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(inputsSpec, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        AtlasBinaryMapsToResample = [
            'template_rightHemisphere', 'template_leftHemisphere',
            'template_ventricles'
        ]

        for atlasImage in AtlasBinaryMapsToResample:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'binary'
            BResample[
                atlasImage].inputs.interpolationMode = 'Linear'  ## Conversion to distance map, so use linear to resample distance map
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(inputsSpec, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        BRAINSCutAtlasImages = [
            'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap',
            'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap',
            'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap',
            'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap',
            'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap',
            'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap',
            'r_globus_ProbabilityMap'
        ]
        for atlasImage in BRAINSCutAtlasImages:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BCUTBRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'float'
            BResample[
                atlasImage].inputs.interpolationMode = 'Linear'  ## Conversion to distance map, so use linear to resample distance map
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(atlasBCUTNode_W, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        WhiteMatterHemisphereNode = pe.Node(interface=Function(
            function=CreateLeftRightWMHemispheres,
            input_names=[
                'BRAINLABELSFile', 'HDCMARegisteredVentricleMaskFN',
                'LeftHemisphereMaskName', 'RightHemisphereMaskName',
                'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'
            ],
            output_names=[
                'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'
            ]),
                                            name="WhiteMatterHemisphere")
        WhiteMatterHemisphereNode.inputs.WM_LeftHemisphereFileName = "left_hemisphere_wm.nii.gz"
        WhiteMatterHemisphereNode.inputs.WM_RightHemisphereFileName = "right_hemisphere_wm.nii.gz"

        baw201.connect(myLocalBrainStemWF,
                       'outputspec.ouputTissuelLabelFilename',
                       WhiteMatterHemisphereNode, 'BRAINLABELSFile')
        baw201.connect(BResample['hncma_atlas'], 'outputVolume',
                       WhiteMatterHemisphereNode,
                       'HDCMARegisteredVentricleMaskFN')
        baw201.connect(BResample['template_leftHemisphere'], 'outputVolume',
                       WhiteMatterHemisphereNode, 'LeftHemisphereMaskName')
        baw201.connect(BResample['template_rightHemisphere'], 'outputVolume',
                       WhiteMatterHemisphereNode, 'RightHemisphereMaskName')

        baw201.connect(WhiteMatterHemisphereNode, 'WM_LeftHemisphereFileName',
                       DataSink, 'WarpedAtlas2Subject.@LeftHemisphereWM')
        baw201.connect(WhiteMatterHemisphereNode, 'WM_RightHemisphereFileName',
                       DataSink, 'WarpedAtlas2Subject.@RightHemisphereWM')

    if 'malf_2012_neuro' in master_config[
            'components']:  ## HACK Do MALF labeling
        good_subjects = [
            '1001', '1004', '1005', '1011', '1012', '1018', '1019', '1102',
            '1103', '1104', '1120', '1129', '1009', '1010', '1013', '1014',
            '1036', '1109', '1117', '1122'
        ]

        ## HACK FOR NOW SHOULD BE MORE ELEGANT FROM THE .config file
        BASE_DATA_GRABBER_DIR = '/Shared/johnsonhj/HDNI/Neuromorphometrics/20141116_Neuromorphometrics_base_Results/Neuromorphometrics/2012Subscription'

        myLocalMALF = CreateMALFWorkflow("MALF", master_config, good_subjects,
                                         BASE_DATA_GRABBER_DIR)
        baw201.connect(myLocalTCWF, 'outputspec.t1_average', myLocalMALF,
                       'inputspec.subj_t1_image')
        baw201.connect(myLocalLMIWF,
                       'outputspec.outputLandmarksInACPCAlignedSpace',
                       myLocalMALF, 'inputspec.subj_lmks')
        baw201.connect(atlasBCDNode_S, 'template_weights_50Lmks_wts',
                       myLocalMALF, 'inputspec.atlasWeightFilename')
        baw201.connect(myLocalMALF, 'outputspec.MALF_neuro2012_labelmap',
                       DataSink, 'TissueClassify.@MALF_neuro2012_labelmap')

    return baw201
Esempio n. 5
0
def temporal_variance_mask(threshold, by_slice=False):

    threshold_method = "VAR"

    if isinstance(threshold, str):
        regex_match = {
            "SD": r"([0-9]+(\.[0-9]+)?)\s*SD",
            "PCT": r"([0-9]+(\.[0-9]+)?)\s*PCT",
        }

        for method, regex in regex_match.items():
            matched = re.match(regex, threshold)
            if matched:
                threshold_method = method
                threshold_value = matched.groups()[0]

    try:
        threshold_value = float(threshold_value)
    except:
        raise ValueError(
            "Error converting threshold value {0} from {1} to a "
            "floating point number. The threshold value can "
            "contain SD or PCT for selecting a threshold based on "
            "the variance distribution, otherwise it should be a "
            "floating point number.".format(threshold_value, threshold))

    if threshold_value < 0:
        raise ValueError(
            "Threshold value should be positive, instead of {0}.".format(
                threshold_value))

    if threshold_method is "PCT" and threshold_value >= 100.0:
        raise ValueError(
            "Percentile should be less than 100, received {0}.".format(
                threshold_value))

    threshold = threshold_value

    wf = pe.Workflow(name='tcompcor')

    input_node = pe.Node(util.IdentityInterface(
        fields=['functional_file_path', 'mask_file_path']),
                         name='inputspec')
    output_node = pe.Node(util.IdentityInterface(fields=['mask']),
                          name='outputspec')

    detrend = pe.Node(afni.Detrend(args='-polort 1', outputtype='NIFTI'),
                      name='detrend')
    wf.connect(input_node, 'functional_file_path', detrend, 'in_file')

    std = pe.Node(afni.TStat(args='-nzstdev', outputtype='NIFTI'), name='std')
    wf.connect(input_node, 'mask_file_path', std, 'mask')
    wf.connect(detrend, 'out_file', std, 'in_file')

    var = pe.Node(afni.Calc(expr='a*a', outputtype='NIFTI'), name='var')
    wf.connect(std, 'out_file', var, 'in_file_a')

    if by_slice:
        slices = pe.Node(fsl.Slice(), name='slicer')
        wf.connect(var, 'out_file', slices, 'in_file')

        mask_slices = pe.Node(fsl.Slice(), name='mask_slicer')
        wf.connect(input_node, 'mask_file_path', mask_slices, 'in_file')

        mapper = pe.MapNode(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper',
            iterfield=['out_file', 'mask_file'])
        wf.connect(slices, 'out_files', mapper, 'out_file')
        wf.connect(mask_slices, 'out_files', mapper, 'mask_file')

    else:
        mapper_list = pe.Node(util.Merge(1), name='slice_mapper_list')
        wf.connect(var, 'out_file', mapper_list, 'in1')

        mask_mapper_list = pe.Node(util.Merge(1),
                                   name='slice_mask_mapper_list')
        wf.connect(input_node, 'mask_file_path', mask_mapper_list, 'in1')

        mapper = pe.Node(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper')
        wf.connect(mapper_list, 'out', mapper, 'out_file')
        wf.connect(mask_mapper_list, 'out', mapper, 'mask_file')

    if threshold_method is "PCT":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_pct'],
            output_names=['threshold'],
            function=compute_pct_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_pct = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    elif threshold_method is "SD":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_sd'],
            output_names=['threshold'],
            function=compute_sd_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_sd = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    else:
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold'],
            output_names=['threshold'],
            function=compute_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    threshold_mask = pe.MapNode(interface=fsl.maths.Threshold(),
                                name='threshold',
                                iterfield=['in_file', 'thresh'])
    threshold_mask.inputs.args = '-bin'
    wf.connect(mapper, 'out_file', threshold_mask, 'in_file')
    wf.connect(threshold_node, 'threshold', threshold_mask, 'thresh')

    merge_slice_masks = pe.Node(interface=fsl.Merge(),
                                name='merge_slice_masks')
    merge_slice_masks.inputs.dimension = 'z'
    wf.connect(threshold_mask, 'out_file', merge_slice_masks, 'in_files')

    wf.connect(merge_slice_masks, 'merged_file', output_node, 'mask')

    return wf
Esempio n. 6
0
def create_get_stats_flow(name='getstats', withreg=False):
    """Retrieves stats from labels

    Parameters
    ----------

    name : string
        name of workflow
    withreg : boolean
        indicates whether to register source to label

    Example
    -------


    Inputs::

           inputspec.source_file : reference image for mask generation
           inputspec.label_file : label file from which to get ROIs

           (optionally with registration)
           inputspec.reg_file : bbreg file (assumes reg from source to label
           inputspec.inverse : boolean whether to invert the registration
           inputspec.subjects_dir : freesurfer subjects directory

    Outputs::

           outputspec.stats_file : stats file
    """
    """
    Initialize the workflow
    """

    getstats = pe.Workflow(name=name)
    """
    Define the inputs to the workflow.
    """

    if withreg:
        inputnode = pe.Node(niu.IdentityInterface(
            fields=['source_file', 'label_file', 'reg_file', 'subjects_dir']),
                            name='inputspec')
    else:
        inputnode = pe.Node(
            niu.IdentityInterface(fields=['source_file', 'label_file']),
            name='inputspec')

    statnode = pe.MapNode(fs.SegStats(),
                          iterfield=['segmentation_file', 'in_file'],
                          name='segstats')
    """
    Convert between source and label spaces if registration info is provided

    """
    if withreg:
        voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True),
                                  iterfield=['source_file', 'reg_file'],
                                  name='transform')
        getstats.connect(inputnode, 'reg_file', voltransform, 'reg_file')
        getstats.connect(inputnode, 'source_file', voltransform, 'source_file')
        getstats.connect(inputnode, 'label_file', voltransform, 'target_file')
        getstats.connect(inputnode, 'subjects_dir', voltransform,
                         'subjects_dir')

        def switch_labels(inverse, transform_output, source_file, label_file):
            if inverse:
                return transform_output, source_file
            else:
                return label_file, transform_output

        chooser = pe.MapNode(niu.Function(
            input_names=[
                'inverse', 'transform_output', 'source_file', 'label_file'
            ],
            output_names=['label_file', 'source_file'],
            function=switch_labels),
                             iterfield=['transform_output', 'source_file'],
                             name='chooser')
        getstats.connect(inputnode, 'source_file', chooser, 'source_file')
        getstats.connect(inputnode, 'label_file', chooser, 'label_file')
        getstats.connect(inputnode, 'inverse', chooser, 'inverse')
        getstats.connect(voltransform, 'transformed_file', chooser,
                         'transform_output')
        getstats.connect(chooser, 'label_file', statnode, 'segmentation_file')
        getstats.connect(chooser, 'source_file', statnode, 'in_file')
    else:
        getstats.connect(inputnode, 'label_file', statnode,
                         'segmentation_file')
        getstats.connect(inputnode, 'source_file', statnode, 'in_file')
    """
    Setup an outputnode that defines relevant inputs of the workflow.
    """

    outputnode = pe.Node(niu.IdentityInterface(fields=["stats_file"]),
                         name="outputspec")
    getstats.connect([
        (statnode, outputnode, [("summary_file", "stats_file")]),
    ])
    return getstats
Esempio n. 7
0
def create_getmask_flow(name='getmask', dilate_mask=True):
    """Registers a source file to freesurfer space and create a brain mask in
    source space

    Requires fsl tools for initializing registration

    Parameters
    ----------

    name : string
        name of workflow
    dilate_mask : boolean
        indicates whether to dilate mask or not

    Example
    -------

    >>> getmask = create_getmask_flow()
    >>> getmask.inputs.inputspec.source_file = 'mean.nii'
    >>> getmask.inputs.inputspec.subject_id = 's1'
    >>> getmask.inputs.inputspec.subjects_dir = '.'
    >>> getmask.inputs.inputspec.contrast_type = 't2'


    Inputs::

           inputspec.source_file : reference image for mask generation
           inputspec.subject_id : freesurfer subject id
           inputspec.subjects_dir : freesurfer subjects directory
           inputspec.contrast_type : MR contrast of reference image

    Outputs::

           outputspec.mask_file : binary mask file in reference image space
           outputspec.reg_file : registration file that maps reference image to
                                 freesurfer space
           outputspec.reg_cost : cost of registration (useful for detecting misalignment)
    """
    """
    Initialize the workflow
    """

    getmask = pe.Workflow(name=name)
    """
    Define the inputs to the workflow.
    """

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['source_file', 'subject_id', 'subjects_dir', 'contrast_type']),
                        name='inputspec')
    """
    Define all the nodes of the workflow:

    fssource: used to retrieve aseg.mgz
    threshold : binarize aseg
    register : coregister source file to freesurfer space
    voltransform: convert binarized aseg to source file space
    """

    fssource = pe.Node(nio.FreeSurferSource(), name='fssource')
    threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'), name='threshold')
    register = pe.MapNode(fs.BBRegister(init='fsl'),
                          iterfield=['source_file'],
                          name='register')
    voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True),
                              iterfield=['source_file', 'reg_file'],
                              name='transform')
    """
    Connect the nodes
    """

    getmask.connect([
        (inputnode, fssource, [('subject_id', 'subject_id'),
                               ('subjects_dir', 'subjects_dir')]),
        (inputnode, register, [('source_file', 'source_file'),
                               ('subject_id', 'subject_id'),
                               ('subjects_dir', 'subjects_dir'),
                               ('contrast_type', 'contrast_type')]),
        (inputnode, voltransform, [('subjects_dir', 'subjects_dir'),
                                   ('source_file', 'source_file')]),
        (fssource, threshold, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
        (register, voltransform, [('out_reg_file', 'reg_file')]),
        (threshold, voltransform, [('binary_file', 'target_file')])
    ])
    """
    Add remaining nodes and connections

    dilate : dilate the transformed file in source space
    threshold2 : binarize transformed file
    """

    threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'),
                            iterfield=['in_file'],
                            name='threshold2')
    if dilate_mask:
        threshold2.inputs.dilate = 1
    getmask.connect([(voltransform, threshold2, [('transformed_file',
                                                  'in_file')])])
    """
    Setup an outputnode that defines relevant inputs of the workflow.
    """

    outputnode = pe.Node(
        niu.IdentityInterface(fields=["mask_file", "reg_file", "reg_cost"]),
        name="outputspec")
    getmask.connect([
        (register, outputnode, [("out_reg_file", "reg_file")]),
        (register, outputnode, [("min_cost_file", "reg_cost")]),
        (threshold2, outputnode, [("binary_file", "mask_file")]),
    ])
    return getmask
def baw_ants_registration_template_build_single_iteration_wf(
    iterationPhasePrefix, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG
):
    """

    Inputs::

           inputspec.images :
           inputspec.fixed_image :
           inputspec.ListOfPassiveImagesDictionaries :
           inputspec.interpolationMapping :

    Outputs::

           outputspec.template :
           outputspec.transforms_list :
           outputspec.passive_deformed_templates :
    """
    TemplateBuildSingleIterationWF = pe.Workflow(
        name="antsRegistrationTemplateBuildSingleIterationWF_"
        + str(iterationPhasePrefix)
    )

    inputSpec = pe.Node(
        interface=util.IdentityInterface(
            fields=[
                "ListOfImagesDictionaries",
                "registrationImageTypes",
                # 'maskRegistrationImageType',
                "interpolationMapping",
                "fixed_image",
            ]
        ),
        run_without_submitting=True,
        name="inputspec",
    )
    ## HACK: INFO: We need to have the AVG_AIR.nii.gz be warped with a default voxel value of 1.0
    ## HACK: INFO: Need to move all local functions to a common untility file, or at the top of the file so that
    ##             they do not change due to re-indenting.  Otherwise re-indenting for flow control will trigger
    ##             their hash to change.
    ## HACK: INFO: REMOVE 'transforms_list' it is not used.  That will change all the hashes
    ## HACK: INFO: Need to run all python files through the code beutifiers.  It has gotten pretty ugly.
    outputSpec = pe.Node(
        interface=util.IdentityInterface(
            fields=["template", "transforms_list", "passive_deformed_templates"]
        ),
        run_without_submitting=True,
        name="outputspec",
    )

    ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template
    BeginANTS = pe.MapNode(
        interface=Registration(), name="BeginANTS", iterfield=["moving_image"]
    )
    # SEE template.py many_cpu_BeginANTS_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,4,2,8), 'overwrite': True}
    ## This is set in the template.py file BeginANTS.plugin_args = BeginANTS_cpu_sge_options_dictionary
    common_ants_registration_settings(
        antsRegistrationNode=BeginANTS,
        registrationTypeDescription="SixStageAntsRegistrationT1Only",
        output_transform_prefix=str(iterationPhasePrefix) + "_tfm",
        output_warped_image="atlas2subject.nii.gz",
        output_inverse_warped_image="subject2atlas.nii.gz",
        save_state="SavedantsRegistrationNodeSyNState.h5",
        invert_initial_moving_transform=False,
        initial_moving_transform=None,
    )

    GetMovingImagesNode = pe.Node(
        interface=util.Function(
            function=get_moving_images,
            input_names=[
                "ListOfImagesDictionaries",
                "registrationImageTypes",
                "interpolationMapping",
            ],
            output_names=["moving_images", "moving_interpolation_type"],
        ),
        run_without_submitting=True,
        name="99_GetMovingImagesNode",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "ListOfImagesDictionaries",
        GetMovingImagesNode,
        "ListOfImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "registrationImageTypes",
        GetMovingImagesNode,
        "registrationImageTypes",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "interpolationMapping", GetMovingImagesNode, "interpolationMapping"
    )

    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_images", BeginANTS, "moving_image"
    )
    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_interpolation_type", BeginANTS, "interpolation"
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "fixed_image", BeginANTS, "fixed_image"
    )

    ## Now warp all the input_images images
    wimtdeformed = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=["transforms", "input_image"],
        # iterfield=['transforms', 'invert_transform_flags', 'input_image'],
        name="wimtdeformed",
    )
    wimtdeformed.inputs.interpolation = "Linear"
    wimtdeformed.default_value = 0
    # HACK: Should try using forward_composite_transform
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transform', wimtdeformed, 'transforms')
    TemplateBuildSingleIterationWF.connect(
        BeginANTS, "composite_transform", wimtdeformed, "transforms"
    )
    ##PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', wimtdeformed, 'invert_transform_flags')
    ## NOTE: forward_invert_flags:: List of flags corresponding to the forward transforms
    # wimtdeformed.inputs.invert_transform_flags = [False,False,False,False,False]
    TemplateBuildSingleIterationWF.connect(
        GetMovingImagesNode, "moving_images", wimtdeformed, "input_image"
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec, "fixed_image", wimtdeformed, "reference_image"
    )

    ##  Shape Update Next =====
    ## Now  Average All input_images deformed images together to create an updated template average
    AvgDeformedImages = pe.Node(interface=AverageImages(), name="AvgDeformedImages")
    AvgDeformedImages.inputs.dimension = 3
    AvgDeformedImages.inputs.output_average_image = (
        str(iterationPhasePrefix) + ".nii.gz"
    )
    AvgDeformedImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(
        wimtdeformed, "output_image", AvgDeformedImages, "images"
    )

    ## Now average all affine transforms together
    AvgAffineTransform = pe.Node(
        interface=AverageAffineTransform(), name="AvgAffineTransform"
    )
    AvgAffineTransform.inputs.dimension = 3
    AvgAffineTransform.inputs.output_affine_transform = (
        "Avererage_" + str(iterationPhasePrefix) + "_Affine.h5"
    )

    SplitCompositeTransform = pe.MapNode(
        interface=util.Function(
            function=split_composite_to_component_transform,
            input_names=["transformFilename"],
            output_names=["affine_component_list", "warp_component_list"],
        ),
        iterfield=["transformFilename"],
        run_without_submitting=True,
        name="99_SplitCompositeTransform",
    )
    TemplateBuildSingleIterationWF.connect(
        BeginANTS, "composite_transform", SplitCompositeTransform, "transformFilename"
    )
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', SplitCompositeTransform, 'transformFilename')
    TemplateBuildSingleIterationWF.connect(
        SplitCompositeTransform,
        "affine_component_list",
        AvgAffineTransform,
        "transforms",
    )

    ## Now average the warp fields togther
    AvgWarpImages = pe.Node(interface=AverageImages(), name="AvgWarpImages")
    AvgWarpImages.inputs.dimension = 3
    AvgWarpImages.inputs.output_average_image = (
        str(iterationPhasePrefix) + "warp.nii.gz"
    )
    AvgWarpImages.inputs.normalize = True
    TemplateBuildSingleIterationWF.connect(
        SplitCompositeTransform, "warp_component_list", AvgWarpImages, "images"
    )

    ## Now average the images together
    ## INFO:  For now GradientStep is set to 0.25 as a hard coded default value.
    GradientStep = 0.25
    GradientStepWarpImage = pe.Node(
        interface=MultiplyImages(), name="GradientStepWarpImage"
    )
    GradientStepWarpImage.inputs.dimension = 3
    GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep
    GradientStepWarpImage.inputs.output_product_image = (
        "GradientStep0.25_" + str(iterationPhasePrefix) + "_warp.nii.gz"
    )
    TemplateBuildSingleIterationWF.connect(
        AvgWarpImages, "output_average_image", GradientStepWarpImage, "first_input"
    )

    ## Now create the new template shape based on the average of all deformed images
    UpdateTemplateShape = pe.Node(
        interface=ApplyTransforms(), name="UpdateTemplateShape"
    )
    UpdateTemplateShape.inputs.invert_transform_flags = [True]
    UpdateTemplateShape.inputs.interpolation = "Linear"
    UpdateTemplateShape.default_value = 0

    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        UpdateTemplateShape,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        [
            (
                AvgAffineTransform,
                UpdateTemplateShape,
                [(("affine_transform", make_list_of_one_element), "transforms")],
            )
        ]
    )
    TemplateBuildSingleIterationWF.connect(
        GradientStepWarpImage,
        "output_product_image",
        UpdateTemplateShape,
        "input_image",
    )

    ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(
        interface=util.Function(
            function=make_transform_list_with_gradient_warps,
            input_names=["averageAffineTranform", "gradientStepWarp"],
            output_names=["TransformListWithGradientWarps"],
        ),
        run_without_submitting=True,
        name="99_MakeTransformListWithGradientWarps",
    )
    # ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True

    TemplateBuildSingleIterationWF.connect(
        AvgAffineTransform,
        "affine_transform",
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "averageAffineTranform",
    )
    TemplateBuildSingleIterationWF.connect(
        UpdateTemplateShape,
        "output_image",
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "gradientStepWarp",
    )

    ReshapeAverageImageWithShapeUpdate = pe.Node(
        interface=ApplyTransforms(), name="ReshapeAverageImageWithShapeUpdate"
    )
    ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [
        True,
        False,
        False,
        False,
        False,
    ]
    ReshapeAverageImageWithShapeUpdate.inputs.interpolation = "Linear"
    ReshapeAverageImageWithShapeUpdate.default_value = 0
    ReshapeAverageImageWithShapeUpdate.inputs.output_image = (
        "ReshapeAverageImageWithShapeUpdate.nii.gz"
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        ReshapeAverageImageWithShapeUpdate,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        ReshapeAverageImageWithShapeUpdate,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "TransformListWithGradientWarps",
        ReshapeAverageImageWithShapeUpdate,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        ReshapeAverageImageWithShapeUpdate, "output_image", outputSpec, "template"
    )

    ######
    ######
    ######  Process all the passive deformed images in a way similar to the main image used for registration
    ######
    ######
    ######
    ##############################################
    ## Now warp all the ListOfPassiveImagesDictionaries images
    FlattenTransformAndImagesListNode = pe.Node(
        Function(
            function=flatten_transform_and_images_list,
            input_names=[
                "ListOfPassiveImagesDictionaries",
                "transforms",
                "interpolationMapping",
                "invert_transform_flags",
            ],
            output_names=[
                "flattened_images",
                "flattened_transforms",
                "flattened_invert_transform_flags",
                "flattened_image_nametypes",
                "flattened_interpolation_type",
            ],
        ),
        run_without_submitting=True,
        name="99_FlattenTransformAndImagesList",
    )

    GetPassiveImagesNode = pe.Node(
        interface=util.Function(
            function=get_passive_images,
            input_names=["ListOfImagesDictionaries", "registrationImageTypes"],
            output_names=["ListOfPassiveImagesDictionaries"],
        ),
        run_without_submitting=True,
        name="99_GetPassiveImagesNode",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "ListOfImagesDictionaries",
        GetPassiveImagesNode,
        "ListOfImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "registrationImageTypes",
        GetPassiveImagesNode,
        "registrationImageTypes",
    )

    TemplateBuildSingleIterationWF.connect(
        GetPassiveImagesNode,
        "ListOfPassiveImagesDictionaries",
        FlattenTransformAndImagesListNode,
        "ListOfPassiveImagesDictionaries",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "interpolationMapping",
        FlattenTransformAndImagesListNode,
        "interpolationMapping",
    )
    TemplateBuildSingleIterationWF.connect(
        BeginANTS,
        "composite_transform",
        FlattenTransformAndImagesListNode,
        "transforms",
    )
    ## FlattenTransformAndImagesListNode.inputs.invert_transform_flags = [False,False,False,False,False,False]
    ## INFO: Please check of invert_transform_flags has a fixed number.
    ## PREVIOUS TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags')
    wimtPassivedeformed = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=[
            "transforms",
            "invert_transform_flags",
            "input_image",
            "interpolation",
        ],
        name="wimtPassivedeformed",
    )
    wimtPassivedeformed.default_value = 0
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedImages,
        "output_average_image",
        wimtPassivedeformed,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_interpolation_type",
        wimtPassivedeformed,
        "interpolation",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_images",
        wimtPassivedeformed,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_transforms",
        wimtPassivedeformed,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_invert_transform_flags",
        wimtPassivedeformed,
        "invert_transform_flags",
    )

    RenestDeformedPassiveImagesNode = pe.Node(
        Function(
            function=renest_deformed_passive_images,
            input_names=[
                "deformedPassiveImages",
                "flattened_image_nametypes",
                "interpolationMapping",
            ],
            output_names=[
                "nested_imagetype_list",
                "outputAverageImageName_list",
                "image_type_list",
                "nested_interpolation_type",
            ],
        ),
        run_without_submitting=True,
        name="99_RenestDeformedPassiveImages",
    )
    TemplateBuildSingleIterationWF.connect(
        inputSpec,
        "interpolationMapping",
        RenestDeformedPassiveImagesNode,
        "interpolationMapping",
    )
    TemplateBuildSingleIterationWF.connect(
        wimtPassivedeformed,
        "output_image",
        RenestDeformedPassiveImagesNode,
        "deformedPassiveImages",
    )
    TemplateBuildSingleIterationWF.connect(
        FlattenTransformAndImagesListNode,
        "flattened_image_nametypes",
        RenestDeformedPassiveImagesNode,
        "flattened_image_nametypes",
    )
    ## Now  Average All passive input_images deformed images together to create an updated template average
    AvgDeformedPassiveImages = pe.MapNode(
        interface=AverageImages(),
        iterfield=["images", "output_average_image"],
        name="AvgDeformedPassiveImages",
    )
    AvgDeformedPassiveImages.inputs.dimension = 3
    AvgDeformedPassiveImages.inputs.normalize = False
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "nested_imagetype_list",
        AvgDeformedPassiveImages,
        "images",
    )
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "outputAverageImageName_list",
        AvgDeformedPassiveImages,
        "output_average_image",
    )

    ## -- INFO:  Now neeed to reshape all the passive images as well
    ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(
        interface=ApplyTransforms(),
        iterfield=["input_image", "reference_image", "output_image", "interpolation"],
        name="ReshapeAveragePassiveImageWithShapeUpdate",
    )
    ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [
        True,
        False,
        False,
        False,
        False,
    ]
    ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "nested_interpolation_type",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "interpolation",
    )
    TemplateBuildSingleIterationWF.connect(
        RenestDeformedPassiveImagesNode,
        "outputAverageImageName_list",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "output_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedPassiveImages,
        "output_average_image",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "input_image",
    )
    TemplateBuildSingleIterationWF.connect(
        AvgDeformedPassiveImages,
        "output_average_image",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "reference_image",
    )
    TemplateBuildSingleIterationWF.connect(
        ApplyInvAverageAndFourTimesGradientStepWarpImage,
        "TransformListWithGradientWarps",
        ReshapeAveragePassiveImageWithShapeUpdate,
        "transforms",
    )
    TemplateBuildSingleIterationWF.connect(
        ReshapeAveragePassiveImageWithShapeUpdate,
        "output_image",
        outputSpec,
        "passive_deformed_templates",
    )

    return TemplateBuildSingleIterationWF
Esempio n. 9
0
def create_workflow():
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'funcs',
        'subject_id',
        'session_id',
        'fwhm',  # smoothing
        'highpass'
    ]), name="inputspec")

    # SelectFiles
    templates = {
        'ref_manual_fmapmask':  # was: manual_fmapmask
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_manualmask.nii.gz',

        'ref_fmap_magnitude':
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_reference.nii.gz',

        'ref_fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        # 'manualweights':
        # 'manual-masks/sub-eddy/ses-20170511/func/'
        #     'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
        #     '_res-1x1x1_manualweights.nii.gz',

        'ref_func':  # was: manualmask_func_ref
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_reference.nii.gz',

        'ref_funcmask':  # was: manualmask
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_mask.nii.gz',

        'ref_t1':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_reference.nii.gz',

        'ref_t1mask':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_manualmask.nii.gz',

        # 'funcs':
        # 'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        #     # 'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
        #     'sub-{subject_id}_ses-{session_id}*run-01_bold_res-1x1x1_preproc'
        #     # '.nii.gz',
        #     '_nvol10.nii.gz',

        'fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        'fmap_magnitude':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
            '.nii.gz',

        # 'fmap_mask':
        # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        #     'sub-{subject_id}_ses-{session_id}_'
        #     'magnitude1_res-1x1x1_preproc.nii.gz',
    }

    inputfiles = pe.Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), name="input_files")

    featpreproc.connect(
        [(inputnode, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # ------------------ Output Files
    # Datasink
    outputfiles = pe.Node(nio.DataSink(
        base_directory=ds_root,
        container='derivatives/featpreproc',
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['motion_parameters',
                'motion_corrected',
                'motion_plots',
                'motion_outlier_files',
                'mask',
                'smoothed_files',
                'highpassed_files',
                'mean',
                'func_unwarp',
                'ref_func',
                'ref_funcmask',
                'ref_t1',
                'ref_t1mask',
                ]),
        name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputfiles, transmanmask_mc,
         [('subject_id', 'in.subject_id'),
          ('session_id', 'in.session_id'),
          ])])

    featpreproc.connect(inputfiles, 'ref_funcmask',
                        transmanmask_mc, 'in.manualmask')
    featpreproc.connect(inputnode, 'funcs',
                        transmanmask_mc, 'in.funcs')
    featpreproc.connect(inputfiles, 'ref_func',
                        transmanmask_mc, 'in.manualmask_func_ref')

    # fieldmaps not being used
    if False:
        trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
        featpreproc.connect(inputfiles, 'ref_manual_fmapmask',
                            trans_fmapmask, 'in.manualmask')
        featpreproc.connect(inputfiles, 'fmap_magnitude',
                            trans_fmapmask, 'in.funcs')
        featpreproc.connect(inputfiles, 'ref_func',
                            trans_fmapmask, 'in.manualmask_func_ref')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------
    # mc = motioncorrection_workflow.create_workflow_afni()

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='premotioncorrection')

    featpreproc.connect(
        [
         (inputnode, median_func,
          [
           ('funcs', 'in_file'),
           ]),
         (median_func, pre_mc,
          [
           ('out_file', 'in.funcs'),
           ]),
         (inputfiles, pre_mc,
          [
           # median func image will be used a reference / base
           ('ref_func', 'in.ref_func'),
           ('ref_funcmask', 'in.ref_func_weights'),
          ]),
         (transmanmask_mc, pre_mc,
          [
           ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (pre_mc, outputnode,
          [
           ('mc.out_file', 'pre_motion_corrected'),
           ('mc.oned_file', 'pre_motion_parameters.oned_file'),
           ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles,
          [
           ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
           ('pre_motion_parameters.oned_file', 'pre_motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
           ('pre_motion_parameters.oned_matrix_save', 'pre_motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='motioncorrection',
        iterfield=('in_file', 'ref_file', 'in_weight_file'))
    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputnode, mc,
          [
           ('funcs', 'in.funcs'),
           ]),
         (pre_mc, mc, [
             # the median image realigned to the reference functional will serve as reference
             #  this way motion correction is done to an image more similar to the functionals
             ('mc.out_file', 'in.ref_func'),
           ]),
         (inputfiles, mc, [
             # Check and make sure the ref func mask is close enough to the registered median
             # image.
             ('ref_funcmask', 'in.ref_func_weights'),
           ]),
         (transmanmask_mc, mc, [
             ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (mc, outputnode, [
             ('mc.out_file', 'motion_corrected'),
             ('mc.oned_file', 'motion_parameters.oned_file'),
             ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles, [
             ('motion_corrected', 'motion_corrected.out_file'),
             ('motion_parameters.oned_file', 'motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
             ('motion_parameters.oned_matrix_save', 'motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # so b0_unwarp is currently not needed
    if False:
        b0_unwarp = undistort_workflow.create_workflow()

        featpreproc.connect(
            [(inputfiles, b0_unwarp,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('fmap_phasediff', 'in.fmap_phasediff'),
               ('fmap_magnitude', 'in.fmap_magnitude'),
               ]),
             (mc, b0_unwarp,
              [('mc.out_file', 'in.funcs'),
               ]),
             (transmanmask_mc, b0_unwarp,
              [('funcreg.out_file', 'in.funcmasks'),
               ]),
             (trans_fmapmask, b0_unwarp,
              [('funcreg.out_file', 'in.fmap_mask')]),
             (b0_unwarp, outputfiles,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'func_unwarp.funcmasks'),
               ]),
             (b0_unwarp, outputnode,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'mask'),
               ]),
             ])

    # undistort the reference images
    if False:
        b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
        featpreproc.connect(
            [(inputfiles, b0_unwarp_ref,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('ref_fmap_phasediff', 'in.fmap_phasediff'),
               ('ref_fmap_magnitude', 'in.fmap_magnitude'),
               ('ref_manual_fmapmask', 'in.fmap_mask'),
               ('ref_func', 'in.funcs'),
               ('ref_funcmask', 'in.funcmasks'),
               ]),
             (b0_unwarp_ref, outputfiles,
              [('out.funcs', 'func_unwarp_ref.func'),
               ('out.funcmasks', 'func_unwarp_ref.funcmask'),
               ]),
             (b0_unwarp_ref, outputnode,
              [('out.funcs', 'ref_func'),
               ('out.funcmasks', 'ref_mask'),
               ]),
             ])
    else:
        featpreproc.connect(
            [(inputfiles, outputfiles,
              [('ref_func', 'reference/func'),
               ('ref_funcmask', 'reference/func_mask'),
               ]),
             (inputfiles, outputnode,
              [('ref_func', 'ref_func'),
               ('ref_funcmask', 'ref_funcmask'),
               ]),
             ])

    # |~) _  _ . __|_ _  _  _|_ _   |~) _  |` _  _ _  _  _ _ 
    # |~\(/_(_||_\ | (/_|    | (_)  |~\(/_~|~(/_| (/_| |(_(/_
    #        _|
    # Register all functionals to common reference
    # --------------------------------------------------------
    if False:  # this is now done during motion correction
        # FLIRT cost: intermodal: corratio, intramodal: least squares and normcorr
        reg_to_ref = pe.MapNode(  # intra-modal
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='normcorr'),
            name='reg_to_ref',
            iterfield=('in_file', 'in_weight'),
        )
        refEPI_to_refT1 = pe.Node(
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='corratio'),
            name='refEPI_to_refT1',
        )
        # combine func -> ref_func and ref_func -> ref_T1
        reg_to_refT1 = pe.MapNode(
            interface=fsl.ConvertXFM(concat_xfm=True),
            name='reg_to_refT1',
            iterfield=('in_file'),
        )

        reg_funcs = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcs',
            iterfield=('in_file', 'in_matrix_file'),
        )
        reg_funcmasks = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcmasks',
            iterfield=('in_file', 'in_matrix_file')
        )

        def deref_list(x):
            assert len(x)==1
            return x[0]

        featpreproc.connect(
            [
             (b0_unwarp, reg_to_ref,  # --> reg_to_ref, (A)
              [
               ('out.funcs', 'in_file'),
               ('out.funcmasks', 'in_weight'),
              ]),
             (b0_unwarp_ref, reg_to_ref,
              [
               (('out.funcs', deref_list), 'reference'),
               (('out.funcmasks', deref_list), 'ref_weight'),
              ]),

             (b0_unwarp_ref, refEPI_to_refT1,  # --> refEPI_to_refT1 (B)
              [
               (('out.funcs', deref_list), 'in_file'),
               (('out.funcmasks', deref_list), 'in_weight'),
              ]),
             (inputfiles, refEPI_to_refT1,
              [
               ('ref_t1', 'reference'),
               ('ref_t1mask', 'ref_weight'),
              ]),

             (reg_to_ref, reg_to_refT1,  # --> reg_to_refT1 (A*B)
              [
               ('out_matrix_file', 'in_file'),
              ]),
             (refEPI_to_refT1, reg_to_refT1,
              [
               ('out_matrix_file', 'in_file2'),
              ]),

             (reg_to_refT1, reg_funcs,  # --> reg_funcs
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcs,
              [
               ('out.funcs', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcs,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_to_refT1, reg_funcmasks,  # --> reg_funcmasks
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcmasks,
              [
               ('out.funcmasks', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcmasks,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_funcs, outputfiles,
              [
               ('out_file', 'common_ref.func'),
              ]),
             (reg_funcmasks, outputfiles,
              [
               ('out_file', 'common_ref.funcmask'),
              ]),
        ])


    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    """
    Dilate the mask
    """
    if False:
        dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                         op_string='-dilF'),
                                iterfield=['in_file'],
                                name='dilatemask')
        featpreproc.connect(reg_funcmasks, 'out_file', dilatemask, 'in_file')
    else:
        dilatemask = pe.Node(
            interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'),
            name='dilatemask')
        featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(
        fsl.BinaryMaths(operation='mul'),
        iterfield=('in_file', 'operand_file'),
        name='funcbrains'
    )

    featpreproc.connect(
        [(mc, funcbrains,
          [('mc.out_file', 'in_file'),
          ]),
         (dilatemask, funcbrains,
          [('out_file', 'operand_file'),
          ]),
         (funcbrains, outputfiles,
          [('out_file', 'funcbrains'),
           ]),
         ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            # trying to "disable" `norm_threshold`:
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            # use_norm=Undefined,
            # translation_threshold=1.0,  # translation in mm
            # rotation_threshold=0.02,  # rotation in radians
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (mc, outliers,
         [  # ('mc.par_file', 'realignment_parameters'),
             ('mc.oned_file', 'realignment_parameters'),
         ]),
        (funcbrains, outliers,
         [('out_file', 'realigned_files'),
          ]),
        (dilatemask, outliers,
         [('out_file', 'mask_file'),
          ]),
        (outliers, outputfiles,
         [('outlier_files', 'motion_outliers.@outlier_files'),
          ('plot_files', 'motion_outliers.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ]),
        (mc, outputnode,
         [('mc.oned_file', 'motion_parameters'),
          ]),
        (outliers, outputnode,
         [('outlier_files', 'motion_outlier_files'),
          ('plot_files', 'motion_plots.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ])
    ])

    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')

    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(
        getthresh, ('out_stat', getthreshop),
        threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')

    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)
    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    # featpreproc.connect(b0_unwarp, 'out.funcs', smooth, 'inputnode.in_files')
    if False:
        featpreproc.connect(reg_funcs, 'out_file', smooth, 'inputnode.in_files')
    else:
        featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file',
                        smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(
        smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2),
                         name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    if False:
        featpreproc.connect(b0_unwarp, ('out.funcs', tolist), concatnode, 'in1')
    else:
        featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')

    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(
        medianval, ('out_stat', getmeanscale),
        meanscale, 'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(
        inputnode, ('highpass', highpass_operand),
        highpass, 'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(
            highpass, 'out_file', outputnode, 'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(
            addmean, 'out_file', outputnode, 'highpassed_files')

    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles, 'highpassed_files')

    return(featpreproc)
Esempio n. 10
0
def analyze_openfmri_dataset(data_dir,
                             subject=None,
                             model_id=None,
                             task_id=None,
                             output_dir=None,
                             subj_prefix='*',
                             hpcutoff=120.,
                             use_derivatives=True,
                             fwhm=6.0,
                             subjects_dir=None,
                             target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """
    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()
    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')
    """
    Set up openfmri data specific components
    """

    subjects = sorted([
        path.split(os.path.sep)[-1]
        for path in glob(os.path.join(data_dir, subj_prefix))
    ])

    infosource = pe.Node(
        niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']),
        name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]), ('task_id', task_id)]
    else:
        infosource.iterables = [
            ('subject_id',
             [subjects[subjects.index(subj)] for subj in subject]),
            ('model_id', [model_id]), ('task_id', task_id)
        ]

    subjinfo = pe.Node(niu.Function(
        input_names=['subject_id', 'base_dir', 'task_id', 'model_id'],
        output_names=['run_id', 'conds', 'TR'],
        function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir
    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav', 'contrasts']),
                             name='datasource')
    else:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav']),
                             name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt'),
            'contrasts': ('models/model%03d/'
                          'task_contrasts.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']],
            'contrasts': [['model_id']]
        }
    else:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]
        }

    datasource.inputs.sort_filelist = True
    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([
        (datasource, preproc, [('bold', 'inputspec.func')]),
    ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2. * TR)

    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')
    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([
                    np.array(row.split()) for row in fp.readlines()
                    if row.strip()
                ])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [
                row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))],
                row[2:].astype(float).tolist()
            ]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(
        input_names=['contrast_file', 'task_id', 'conds'],
        output_names=['contrasts'],
        function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(
        interface=ra.ArtifactDetect(use_differences=[True, False],
                                    use_norm=True,
                                    norm_threshold=1,
                                    zintensity_threshold=3,
                                    parameter_source='FSL',
                                    mask_type='file'),
        iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
        name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, (str, bytes)):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(int(num_elements / num_conds),
                                   num_conds).tolist()

    reshape_behav = pe.Node(niu.Function(
        input_names=['behav', 'run_id', 'conds'],
        output_names=['behav'],
        function=check_behav_list),
                            name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art,
                 [('outputspec.motion_parameters', 'realignment_parameters'),
                  ('outputspec.realigned_files', 'realigned_files'),
                  ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec,
                 [('outputspec.highpassed_files', 'functional_runs'),
                  ('outputspec.motion_parameters', 'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(CalculateMedian(), name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(int(len(all_copes) / num_copes),
                                     num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(int(len(all_varcopes) / num_copes),
                                           num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'contrasts'],
        output_names=['copes', 'varcopes', 'n_runs'],
        function=sort_copes),
                          name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, cope_sorter, [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'),
                                         ('varcopes', 'inputspec.varcopes'),
                                         ('n_runs', 'l2model.num_copes')]),
                (modelfit, fixed_fx, [
                    ('outputspec.dof_file', 'inputspec.dof_files'),
                ])])

    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration,
                   'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration,
                   'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'zstats'],
        output_names=['out_files', 'splits'],
        function=merge_files),
                        name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [
        ('copes', 'copes'),
        ('varcopes', 'varcopes'),
        ('zstats', 'zstats'),
    ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(
        input_names=['in_files', 'splits'],
        output_names=['copes', 'varcopes', 'zstats'],
        function=split_files),
                        name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files', splitfunc,
               'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean,
                   'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean,
                   'segmentation_file')

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr,
                   'segmentation_file')
    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(
            ('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
                     'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(niu.Function(
        input_names=['subject_id', 'conds', 'run_id', 'model_id', 'task_id'],
        output_names=['substitutions'],
        function=get_subs),
                      name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(), name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                 [('res4d', 'res4d'), ('copes', 'copes'),
                  ('varcopes', 'varcopes'), ('zstats', 'zstats'),
                  ('tstats', 'tstats')])])
    wf.connect([(modelfit.get_node('modelgen'), datasink, [
        ('design_cov', 'qa.model'),
        ('design_image', 'qa.model.@matrix_image'),
        ('design_file', 'qa.model.@matrix'),
    ])])
    wf.connect([(preproc, datasink,
                 [('outputspec.motion_parameters', 'qa.motion'),
                  ('outputspec.motion_plots', 'qa.motion.plots'),
                  ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink,
               'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink,
                   'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file',
                                               'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file',
                                               'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink, [
        ('copes', 'copes.mni'),
        ('varcopes', 'varcopes.mni'),
        ('zstats', 'zstats.mni'),
    ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink,
               'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink,
               'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink,
               'xfm.anat2target')
    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Esempio n. 11
0
def create_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------
        name : name of workflow (default: 'registration')

    Inputs:

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.anatomical_image : anatomical image to coregister to
        inputspec.target_image : registration target

    Outputs:

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = pe.Workflow(name=name)

    inputnode = pe.Node(interface=niu.IdentityInterface(fields=[
        'source_files', 'mean_image', 'anatomical_image', 'target_image',
        'target_image_brain', 'config_file'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=niu.IdentityInterface(fields=[
        'func2anat_transform', 'anat2target_transform', 'transformed_files',
        'transformed_mean', 'anat2target', 'mean2anat_mask'
    ]),
                         name='outputspec')
    """
    Estimate the tissue classes from the anatomical image. But use spm's segment
    as FSL appears to be breaking.
    """

    stripper = pe.Node(fsl.BET(), name='stripper')
    register.connect(inputnode, 'anatomical_image', stripper, 'in_file')
    fast = pe.Node(fsl.FAST(), name='fast')
    register.connect(stripper, 'out_file', fast, 'in_files')
    """
    Binarize the segmentation
    """

    binarize = pe.Node(fsl.ImageMaths(op_string='-nan -thr 0.5 -bin'),
                       name='binarize')
    pickindex = lambda x, i: x[i]
    register.connect(fast, ('partial_volume_files', pickindex, 2), binarize,
                     'in_file')
    """
    Calculate rigid transform from mean image to anatomical image
    """

    mean2anat = pe.Node(fsl.FLIRT(), name='mean2anat')
    mean2anat.inputs.dof = 6
    register.connect(inputnode, 'mean_image', mean2anat, 'in_file')
    register.connect(stripper, 'out_file', mean2anat, 'reference')
    """
    Now use bbr cost function to improve the transform
    """

    mean2anatbbr = pe.Node(fsl.FLIRT(), name='mean2anatbbr')
    mean2anatbbr.inputs.dof = 6
    mean2anatbbr.inputs.cost = 'bbr'
    mean2anatbbr.inputs.schedule = os.path.join(os.getenv('FSLDIR'),
                                                'etc/flirtsch/bbr.sch')
    register.connect(inputnode, 'mean_image', mean2anatbbr, 'in_file')
    register.connect(binarize, 'out_file', mean2anatbbr, 'wm_seg')
    register.connect(inputnode, 'anatomical_image', mean2anatbbr, 'reference')
    register.connect(mean2anat, 'out_matrix_file', mean2anatbbr,
                     'in_matrix_file')
    """
    Create a mask of the median image coregistered to the anatomical image
    """

    mean2anat_mask = Node(fsl.BET(mask=True), name='mean2anat_mask')
    register.connect(mean2anatbbr, 'out_file', mean2anat_mask, 'in_file')
    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = pe.Node(C3dAffineTool(), name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(mean2anatbbr, 'out_matrix_file', convert2itk,
                     'transform_file')
    register.connect(inputnode, 'mean_image', convert2itk, 'source_file')
    register.connect(stripper, 'out_file', convert2itk, 'reference_file')
    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = pe.Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[
        100, 30, 20
    ]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.args = '--float'
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.num_threads = 4
    reg.plugin_args = {
        'qsub_args': '-pe orte 4',
        'sbatch_args': '--mem=6G -c 4'
    }
    register.connect(stripper, 'out_file', reg, 'moving_image')
    register.connect(inputnode, 'target_image_brain', reg, 'fixed_image')
    """
    Concatenate the affine and ants transforms into a list
    """

    pickfirst = lambda x: x[0]

    merge = pe.Node(niu.Merge(2), iterfield=['in2'], name='mergexfm')
    register.connect(convert2itk, 'itk_transform', merge, 'in2')
    register.connect(reg, 'composite_transform', merge, 'in1')
    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = pe.Node(ants.ApplyTransforms(), name='warpmean')
    warpmean.inputs.input_image_type = 0
    warpmean.inputs.interpolation = 'Linear'
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.terminal_output = 'file'

    register.connect(inputnode, 'target_image_brain', warpmean,
                     'reference_image')
    register.connect(inputnode, 'mean_image', warpmean, 'input_image')
    register.connect(merge, 'out', warpmean, 'transforms')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = pe.MapNode(ants.ApplyTransforms(),
                         iterfield=['input_image'],
                         name='warpall')
    warpall.inputs.input_image_type = 0
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.terminal_output = 'file'

    register.connect(inputnode, 'target_image_brain', warpall,
                     'reference_image')
    register.connect(inputnode, 'source_files', warpall, 'input_image')
    register.connect(merge, 'out', warpall, 'transforms')
    """
    Assign all the output files
    """

    register.connect(reg, 'warped_image', outputnode, 'anat2target')
    register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
    register.connect(warpall, 'output_image', outputnode, 'transformed_files')
    register.connect(mean2anatbbr, 'out_matrix_file', outputnode,
                     'func2anat_transform')
    register.connect(mean2anat_mask, 'mask_file', outputnode, 'mean2anat_mask')
    register.connect(reg, 'composite_transform', outputnode,
                     'anat2target_transform')

    return register
Esempio n. 12
0
def create_fs_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = Workflow(name=name)

    inputnode = Node(interface=IdentityInterface(fields=[
        'source_files', 'mean_image', 'subject_id', 'subjects_dir',
        'target_image'
    ]),
                     name='inputspec')

    outputnode = Node(interface=IdentityInterface(fields=[
        'func2anat_transform', 'out_reg_file', 'anat2target_transform',
        'transforms', 'transformed_mean', 'transformed_files', 'min_cost_file',
        'anat2target', 'aparc', 'mean2anat_mask'
    ]),
                      name='outputspec')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(), name='fssource')
    fssource.run_without_submitting = True
    register.connect(inputnode, 'subject_id', fssource, 'subject_id')
    register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')

    convert = Node(freesurfer.MRIConvert(out_type='nii'), name="convert")
    register.connect(fssource, 'T1', convert, 'in_file')

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(registered_file=True),
                      name='bbregister')
    bbregister.inputs.init = 'fsl'
    bbregister.inputs.contrast_type = 't2'
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
    register.connect(inputnode, 'mean_image', bbregister, 'source_file')
    register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')

    # Create a mask of the median coregistered to the anatomical image
    mean2anat_mask = Node(fsl.BET(mask=True), name='mean2anat_mask')
    register.connect(bbregister, 'registered_file', mean2anat_mask, 'in_file')
    """
    use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1),
                    name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize,
                     "in_file")

    stripper = Node(fsl.ApplyMask(), name='stripper')
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, 'out_file', stripper, 'in_file')
    """
    Apply inverse transform to aparc file
    """

    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                 interp='nearest'),
                    name='aparc_inverse_transform')
    register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
    register.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparcxfm,
                     'target_file')
    register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')
    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = Node(C3dAffineTool(), name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file')
    register.connect(inputnode, 'mean_image', convert2itk, 'source_file')
    register.connect(stripper, 'out_file', convert2itk, 'reference_file')
    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[
        100, 30, 20
    ]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.float = True
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.num_threads = 4
    reg.plugin_args = {
        'qsub_args': '-pe orte 4',
        'sbatch_args': '--mem=6G -c 4'
    }
    register.connect(stripper, 'out_file', reg, 'moving_image')
    register.connect(inputnode, 'target_image', reg, 'fixed_image')
    """
    Concatenate the affine and ants transforms into a list
    """

    pickfirst = lambda x: x[0]

    merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')
    register.connect(convert2itk, 'itk_transform', merge, 'in2')
    register.connect(reg, 'composite_transform', merge, 'in1')
    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = Node(ants.ApplyTransforms(), name='warpmean')
    warpmean.inputs.input_image_type = 0
    warpmean.inputs.interpolation = 'Linear'
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.terminal_output = 'file'
    warpmean.inputs.args = '--float'
    # warpmean.inputs.num_threads = 4
    # warpmean.plugin_args = {'sbatch_args': '--mem=4G -c 4'}
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = pe.MapNode(ants.ApplyTransforms(),
                         iterfield=['input_image'],
                         name='warpall')
    warpall.inputs.input_image_type = 0
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.terminal_output = 'file'
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 2
    warpall.plugin_args = {'sbatch_args': '--mem=6G -c 2'}
    """
    Assign all the output files
    """

    register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
    register.connect(warpall, 'output_image', outputnode, 'transformed_files')

    register.connect(inputnode, 'target_image', warpmean, 'reference_image')
    register.connect(inputnode, 'mean_image', warpmean, 'input_image')
    register.connect(merge, 'out', warpmean, 'transforms')
    register.connect(inputnode, 'target_image', warpall, 'reference_image')
    register.connect(inputnode, 'source_files', warpall, 'input_image')
    register.connect(merge, 'out', warpall, 'transforms')
    """
    Assign all the output files
    """

    register.connect(reg, 'warped_image', outputnode, 'anat2target')
    register.connect(aparcxfm, 'transformed_file', outputnode, 'aparc')
    register.connect(bbregister, 'out_fsl_file', outputnode,
                     'func2anat_transform')
    register.connect(bbregister, 'out_reg_file', outputnode, 'out_reg_file')
    register.connect(bbregister, 'min_cost_file', outputnode, 'min_cost_file')
    register.connect(mean2anat_mask, 'mask_file', outputnode, 'mean2anat_mask')
    register.connect(reg, 'composite_transform', outputnode,
                     'anat2target_transform')
    register.connect(merge, 'out', outputnode, 'transforms')

    return register
Esempio n. 13
0
def create_wf_c3d_fsl_to_itk(map_node,
                             input_image_type=0,
                             name='create_wf_c3d_fsl_to_itk'):
    """
    Converts an FSL-format output matrix to an ITK-format (ANTS) matrix
    for use with ANTS registration tools.

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    fsl_to_itk_conversion : nipype.pipeline.engine.Workflow

    Notes
    -----
    
    Workflow Inputs::
    
        inputspec.affine_file : string (nifti file)
            Output matrix of FSL-based functional to anatomical registration
        inputspec.reference_file : string (nifti file)
            File of skull-stripped anatomical brain to be used in affine
            conversion
        inputspec.source_file : string (nifti file)
            Should match the input of the apply warp (in_file) unless you are
            applying the warp to a 4-d file, in which case this file should
            be a mean_functional file

    Workflow Outputs::
    
        outputspec.itk_transform : string (nifti file)
            Converted affine transform in ITK format usable with ANTS
    
    """

    import nipype.interfaces.c3 as c3
    from nipype.interfaces.utility import Function
    from CPAC.registration.utils import change_itk_transform_type
    from nipype.interfaces.afni import preprocess

    fsl_to_itk_conversion = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(
        fields=['affine_file', 'reference_file', 'source_file']),
                        name='inputspec')

    # converts FSL-format .mat affine xfm into ANTS-format .txt
    # .mat affine comes from Func->Anat registration

    if map_node == 0:
        fsl_reg_2_itk = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk')

    elif map_node == 1:
        fsl_reg_2_itk = pe.MapNode(c3.C3dAffineTool(),
                                   name='fsl_reg_2_itk_mapnode',
                                   iterfield=['source_file'])

    fsl_reg_2_itk.inputs.itk_transform = True
    fsl_reg_2_itk.inputs.fsl2ras = True

    itk_imports = ['import os']

    if map_node == 0:
        change_transform = pe.Node(util.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type,
            imports=itk_imports),
                                   name='change_transform_type')

    elif map_node == 1:
        change_transform = pe.MapNode(util.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type,
            imports=itk_imports),
                                      name='change_transform_type',
                                      iterfield=['input_affine_file'])

    outputspec = pe.Node(util.IdentityInterface(fields=['itk_transform']),
                         name='outputspec')

    fsl_to_itk_conversion.connect(inputspec, 'affine_file', fsl_reg_2_itk,
                                  'transform_file')

    fsl_to_itk_conversion.connect(inputspec, 'reference_file', fsl_reg_2_itk,
                                  'reference_file')

    # source_file input of the conversion must be a 3D file, so if the source
    # file is 4D (input_image_type=3), average it into a 3D file first
    if input_image_type == 0:

        fsl_to_itk_conversion.connect(inputspec, 'source_file', fsl_reg_2_itk,
                                      'source_file')

    elif input_image_type == 3:

        try:
            tstat_source = pe.Node(interface=preprocess.TStat(),
                                   name='fsl_to_itk_tcat_source')
        except AttributeError:
            from nipype.interfaces.afni import utils as afni_utils
            tstat_source = pe.Node(interface=afni_utils.TStat(),
                                   name='fsl_to_itk_tcat_source')

        tstat_source.inputs.outputtype = 'NIFTI_GZ'
        tstat_source.inputs.options = '-mean'

        fsl_to_itk_conversion.connect(inputspec, 'source_file', tstat_source,
                                      'in_file')

        fsl_to_itk_conversion.connect(tstat_source, 'out_file', fsl_reg_2_itk,
                                      'source_file')

    fsl_to_itk_conversion.connect(fsl_reg_2_itk, 'itk_transform',
                                  change_transform, 'input_affine_file')

    fsl_to_itk_conversion.connect(change_transform, 'updated_affine_file',
                                  outputspec, 'itk_transform')

    return fsl_to_itk_conversion
Esempio n. 14
0
def create_wf_apply_ants_warp(map_node,
                              name='create_wf_apply_ants_warp',
                              ants_threads=1):
    """
    Applies previously calculated ANTS registration transforms to input
    images. This workflow employs the antsApplyTransforms tool:

    http://stnava.github.io/ANTs/

    Parameters
    ----------
    name : string, optional
        Name of the workflow.

    Returns
    -------
    apply_ants_warp_wf : nipype.pipeline.engine.Workflow

    Notes
    -----

    Workflow Inputs::

        inputspec.input_image : string (nifti file)
            Image file of brain to be registered to reference
        inputspec.reference_image : string (nifti file)
            Image file of brain or template being used as a reference
        inputspec.transforms : list of filepaths (nifti, .mat, .txt)
            List of transforms and warps to be applied to the input image
        inputspec.dimension : integer
            Dimension value of image being registered (2, 3, or 4)
        inputspec.interpolation : string
            Type of interpolation to be used. See antsApplyTransforms
            documentation or Nipype interface documentation for options

            
    Workflow Outputs::
    
        outputspec.output_image : string (nifti file)
            Normalized output file

                 
    Workflow Graph:
    
    .. image::
        :width: 500
    
    Detailed Workflow Graph:
    
    .. image:: 
        :width: 500
       
    """

    import nipype.interfaces.ants as ants

    apply_ants_warp_wf = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'input_image', 'reference_image', 'transforms', 'dimension',
        'input_image_type', 'interpolation'
    ]),
                        name='inputspec')

    if map_node == 0:
        apply_ants_warp = pe.Node(interface=ants.ApplyTransforms(),
                                  name='apply_ants_warp')

    elif map_node == 1:
        apply_ants_warp = pe.MapNode(interface=ants.ApplyTransforms(),
                name='apply_ants_warp_mapnode', iterfield=['input_image', \
                'transforms'])

    apply_ants_warp.inputs.out_postfix = '_antswarp'
    apply_ants_warp.interface.num_threads = ants_threads
    apply_ants_warp.interface.estimated_memory_gb = 1.5

    outputspec = pe.Node(util.IdentityInterface(fields=['output_image']),
                         name='outputspec')

    # connections from inputspec

    apply_ants_warp_wf.connect(inputspec, 'input_image', apply_ants_warp,
                               'input_image')

    apply_ants_warp_wf.connect(inputspec, 'reference_image', apply_ants_warp,
                               'reference_image')

    apply_ants_warp_wf.connect(inputspec, 'transforms', apply_ants_warp,
                               'transforms')

    apply_ants_warp_wf.connect(inputspec, 'dimension', apply_ants_warp,
                               'dimension')

    apply_ants_warp_wf.connect(inputspec, 'input_image_type', apply_ants_warp,
                               'input_image_type')

    apply_ants_warp_wf.connect(inputspec, 'interpolation', apply_ants_warp,
                               'interpolation')

    # connections to outputspec

    apply_ants_warp_wf.connect(apply_ants_warp, 'output_image', outputspec,
                               'output_image')

    return apply_ants_warp_wf
Esempio n. 15
0
def epi_pipeline(name='susceptibility_distortion_correction_using_t1'):
    """
    This workflow allows to correct for echo-planareinduced susceptibility artifacts without fieldmap
    (e.g. ADNI Database) by elastically register DWIs to their respective baseline T1-weighted
    structural scans using an inverse consistent registration algorithm with a mutual information cost
    function (SyN algorithm). This workflow allows also a coregistration of DWIs with their respective
    baseline T1-weighted structural scans in order to latter combine tracks and cortex parcelation.
    ..  warning:: This workflow rotates the `b`-vectors'
    .. References
      .. Nir et al. (Neurobiology of Aging 2015)- Connectivity network measures predict volumetric atrophy in mild cognitive impairment

        Leow et al. (IEEE Trans Med Imaging 2007)- Statistical Properties of Jacobian Maps and the Realization of Unbiased Large Deformation Nonlinear Image Registration
    Example
    -------
    >>> epi = epi_pipeline()
    >>> epi.inputs.inputnode.DWI = 'DWI.nii'
    >>> epi.inputs.inputnode.bvec = 'bvec.txt'
    >>> epi.inputs.inputnode.T1 = 'T1.nii'
    >>> epi.run() # doctest: +SKIP
    """

    from clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils import create_jacobian_determinant_image, change_itk_transform_type, expend_matrix_list, rotate_bvecs, ants_registration_syn_quick, ants_warp_image_multi_transform, ants_combin_transform
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.c3 as c3

    inputnode = pe.Node(niu.IdentityInterface(fields=['T1', 'DWI', 'bvec']), name='inputnode')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    pick_ref = pe.Node(niu.Select(), name='Pick_b0')
    pick_ref.inputs.index = [0]

    flirt_b0_2_T1 = pe.Node(interface=fsl.FLIRT(dof=6), name='flirt_B0_2_T1')
    flirt_b0_2_T1.inputs.interp = "spline"
    flirt_b0_2_T1.inputs.cost = 'normmi'
    flirt_b0_2_T1.inputs.cost_func = 'normmi'

    apply_xfm = pe.Node(interface=fsl.preprocess.ApplyXFM(), name='apply_xfm')
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(
            interface=niu.Function(
                input_names=['in_matrix', 'in_bvec'],
                output_names=['out_matrix_list'],
                function=expend_matrix_list),
            name='expend_matrix')

    rot_bvec = pe.Node(
            niu.Function(
                input_names=['in_matrix', 'in_bvec'],
                output_names=['out_file'],
                function=rotate_bvecs),
            name='Rotate_Bvec')

    antsRegistrationSyNQuick = pe.Node(
            interface=niu.Function(
                input_names=['fix_image', 'moving_image'],
                output_names=['image_warped',
                              'affine_matrix',
                              'warp',
                              'inverse_warped',
                              'inverse_warp'],
                function=ants_registration_syn_quick),
            name='antsRegistrationSyNQuick')

    c3d_flirt2ants = pe.Node(c3.C3dAffineTool(), name='fsl_reg_2_itk')
    c3d_flirt2ants.inputs.itk_transform = True
    c3d_flirt2ants.inputs.fsl2ras = True

    change_transform = pe.Node(niu.Function(
            input_names=['input_affine_file'],
            output_names=['updated_affine_file'],
            function=change_itk_transform_type),
            name='change_transform_type')

    merge_transform = pe.Node(niu.Merge(3), name='MergeTransforms')

    apply_transform = pe.MapNode(interface=niu.Function(input_names=['fix_image', 'moving_image', 'ants_warp_affine'],
                                                        output_names=['out_warp_field', 'out_warped'],
                                                        function=ants_combin_transform),
                                 iterfield=['moving_image'],
                                 name='warp_filed')

    jacobian = pe.MapNode(interface=niu.Function(input_names=['imageDimension', 'deformationField', 'outputImage'],
                                                 output_names=['outputImage'],
                                                 function=create_jacobian_determinant_image),
                          iterfield=['deformationField'],
                          name='jacobian')

    jacobian.inputs.imageDimension = 3
    jacobian.inputs.outputImage = 'Jacobian_image.nii.gz'

    jacmult = pe.MapNode(fsl.MultiImageMaths(op_string='-mul %s'),
                         iterfield=['in_file', 'operand_files'],
                         name='ModulateDWIs')

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    outputnode = pe.Node(niu.IdentityInterface(fields=['DWI_2_T1_Coregistration_matrix',
                                                       'epi_correction_deformation_field',
                                                       'epi_correction_affine_transform',
                                                       'epi_correction_image_warped',
                                                       'DWIs_epicorrected',
                                                       'warp_epi',
                                                       'out_bvec'
                                                       ]), name='outputnode')

    wf = pe.Workflow(name='epi_pipeline')

    wf.connect([(inputnode, split, [('DWI', 'in_file')])])
    wf.connect([(split, pick_ref, [('out_files', 'inlist')])])
    wf.connect([(pick_ref, flirt_b0_2_T1, [('out', 'in_file')])])
    wf.connect([(inputnode, flirt_b0_2_T1, [('T1', 'reference')])])
    wf.connect([(inputnode, rot_bvec, [('bvec', 'in_bvec')])])
    wf.connect([(flirt_b0_2_T1, expend_matrix, [('out_matrix_file', 'in_matrix')])])
    wf.connect([(inputnode, expend_matrix, [('bvec', 'in_bvec')])])
    wf.connect([(expend_matrix, rot_bvec, [('out_matrix_list', 'in_matrix')])])
    wf.connect([(inputnode, antsRegistrationSyNQuick, [('T1', 'fix_image')])])
    wf.connect([(flirt_b0_2_T1, antsRegistrationSyNQuick, [('out_file', 'moving_image')])])

    wf.connect([(inputnode, c3d_flirt2ants, [('T1', 'reference_file')])])
    wf.connect([(pick_ref, c3d_flirt2ants, [('out', 'source_file')])])
    wf.connect([(flirt_b0_2_T1, c3d_flirt2ants, [('out_matrix_file', 'transform_file')])])
    wf.connect([(c3d_flirt2ants, change_transform, [('itk_transform', 'input_affine_file')])])

    wf.connect([(antsRegistrationSyNQuick, merge_transform, [('warp', 'in1')])])
    wf.connect([(antsRegistrationSyNQuick, merge_transform, [('affine_matrix', 'in2')])])
    wf.connect([(change_transform, merge_transform, [('updated_affine_file', 'in3')])])
    wf.connect([(inputnode, apply_transform, [('T1', 'fix_image')])])
    wf.connect([(split, apply_transform, [('out_files', 'moving_image')])])

    wf.connect([(merge_transform, apply_transform, [('out', 'ants_warp_affine')])])
    wf.connect([(apply_transform, jacobian, [('out_warp_field', 'deformationField')])])
    wf.connect([(apply_transform, jacmult, [('out_warped', 'operand_files')])])
    wf.connect([(jacobian, jacmult, [('outputImage', 'in_file')])])
    wf.connect([(jacmult, thres, [('out_file', 'in_file')])])
    wf.connect([(thres, merge, [('out_file', 'in_files')])])

    wf.connect([(merge, outputnode, [('merged_file', 'DWIs_epicorrected')])])
    wf.connect([(flirt_b0_2_T1, outputnode, [('out_matrix_file', 'DWI_2_T1_Coregistration_matrix')])])
    wf.connect([(antsRegistrationSyNQuick, outputnode, [('warp', 'epi_correction_deformation_field'),
                                                        ('affine_matrix', 'epi_correction_affine_transform'),
                                                        ('image_warped', 'epi_correction_image_warped')])])
    wf.connect([(merge_transform, outputnode, [('out', 'warp_epi')])])
    wf.connect([(rot_bvec, outputnode, [('out_file', 'out_bvec')])])

    return wf
Esempio n. 16
0
def create_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

    ::

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.anatomical_image : anatomical image to coregister to
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space

    Example
    -------

    """

    register = pe.Workflow(name=name)

    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'source_files', 'mean_image', 'anatomical_image', 'target_image',
        'target_image_brain', 'config_file'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func2anat_transform',
        'anat2target_transform',
        'transformed_files',
        'transformed_mean',
    ]),
                         name='outputspec')
    """
    Estimate the tissue classes from the anatomical image. But use spm's segment
    as FSL appears to be breaking.
    """

    stripper = pe.Node(fsl.BET(), name='stripper')
    register.connect(inputnode, 'anatomical_image', stripper, 'in_file')
    fast = pe.Node(fsl.FAST(), name='fast')
    register.connect(stripper, 'out_file', fast, 'in_files')
    """
    Binarize the segmentation
    """

    binarize = pe.Node(fsl.ImageMaths(op_string='-nan -thr 0.5 -bin'),
                       name='binarize')
    pickindex = lambda x, i: x[i]
    register.connect(fast, ('partial_volume_files', pickindex, 2), binarize,
                     'in_file')
    """
    Calculate rigid transform from mean image to anatomical image
    """

    mean2anat = pe.Node(fsl.FLIRT(), name='mean2anat')
    mean2anat.inputs.dof = 6
    register.connect(inputnode, 'mean_image', mean2anat, 'in_file')
    register.connect(stripper, 'out_file', mean2anat, 'reference')
    """
    Now use bbr cost function to improve the transform
    """

    mean2anatbbr = pe.Node(fsl.FLIRT(), name='mean2anatbbr')
    mean2anatbbr.inputs.dof = 6
    mean2anatbbr.inputs.cost = 'bbr'
    mean2anatbbr.inputs.schedule = os.path.join(os.getenv('FSLDIR'),
                                                'etc/flirtsch/bbr.sch')
    register.connect(inputnode, 'mean_image', mean2anatbbr, 'in_file')
    register.connect(binarize, 'out_file', mean2anatbbr, 'wm_seg')
    register.connect(inputnode, 'anatomical_image', mean2anatbbr, 'reference')
    register.connect(mean2anat, 'out_matrix_file', mean2anatbbr,
                     'in_matrix_file')
    """
    Calculate affine transform from anatomical to target
    """

    anat2target_affine = pe.Node(fsl.FLIRT(), name='anat2target_linear')
    anat2target_affine.inputs.searchr_x = [-180, 180]
    anat2target_affine.inputs.searchr_y = [-180, 180]
    anat2target_affine.inputs.searchr_z = [-180, 180]
    register.connect(stripper, 'out_file', anat2target_affine, 'in_file')
    register.connect(inputnode, 'target_image_brain', anat2target_affine,
                     'reference')
    """
    Calculate nonlinear transform from anatomical to target
    """

    anat2target_nonlinear = pe.Node(fsl.FNIRT(), name='anat2target_nonlinear')
    anat2target_nonlinear.inputs.fieldcoeff_file = True
    register.connect(anat2target_affine, 'out_matrix_file',
                     anat2target_nonlinear, 'affine_file')
    register.connect(inputnode, 'anatomical_image', anat2target_nonlinear,
                     'in_file')
    register.connect(inputnode, 'config_file', anat2target_nonlinear,
                     'config_file')
    register.connect(inputnode, 'target_image', anat2target_nonlinear,
                     'ref_file')
    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = pe.Node(fsl.ApplyWarp(interp='spline'), name='warpmean')
    register.connect(inputnode, 'mean_image', warpmean, 'in_file')
    register.connect(mean2anatbbr, 'out_matrix_file', warpmean, 'premat')
    register.connect(inputnode, 'target_image', warpmean, 'ref_file')
    register.connect(anat2target_nonlinear, 'fieldcoeff_file', warpmean,
                     'field_file')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = pe.MapNode(fsl.ApplyWarp(interp='spline'),
                         iterfield=['in_file'],
                         nested=True,
                         name='warpall')
    register.connect(inputnode, 'source_files', warpall, 'in_file')
    register.connect(mean2anatbbr, 'out_matrix_file', warpall, 'premat')
    register.connect(inputnode, 'target_image', warpall, 'ref_file')
    register.connect(anat2target_nonlinear, 'fieldcoeff_file', warpall,
                     'field_file')
    """
    Assign all the output files
    """

    register.connect(warpmean, 'out_file', outputnode, 'transformed_mean')
    register.connect(warpall, 'out_file', outputnode, 'transformed_files')
    register.connect(mean2anatbbr, 'out_matrix_file', outputnode,
                     'func2anat_transform')
    register.connect(anat2target_nonlinear, 'fieldcoeff_file', outputnode,
                     'anat2target_transform')

    return register
Esempio n. 17
0
def extract_dl_t1w(caps_directory,
                   tsv,
                   working_directory=None,
                   extract_method='image',
                   patch_size=50,
                   stride_size=50,
                   slice_direction=0,
                   slice_mode='single'):
    """ This is a preprocessing pipeline to convert the MRIs in nii.gz format
    into tensor versions (using pytorch format). It also prepares the
    slice-level and patch-level data from the entire MRI and save them on disk.
    This enables the training process:
        - For slice-level CNN, all slices were extracted from the entire
          MRI from three different axis. The first and last 15 slice were
          discarded due to the lack of information.
        - For patch-level CNN, the 3D patch (with specific patch size)
          were extracted by a 3D window.

    Parameters
    ----------

    caps_directory: str
      CAPS directory where stores the output of preprocessing.
    tsv: str
      TVS file with the subject list (participant_id and session_id).
    extract_method:
      Select which extract method will be applied for the outputs:
      - 'image' to convert to PyTorch tensor the complete 3D image,
      - 'patch' to extract 3D volumetric patches and
      - 'slice' to extract 2D slices from the image
    patch_size: int
      Size for extracted 3D patches (only 'patch' method).
    stride_size: int
      Sliding size window of when extracting the patches (only 'patch' method).
    slice_direction: int
      Which direction the slices will be extracted (only 'slice' method):
      - 0: Sagittal plane
      - 1: Coronal plane
      - 2: Axial plane
    slice_mode: str
      Mode how slices are stored (only 'slice' method):
      - single: saves the slice in a single channel,
      - rgb: saves the slice in three identical  channels (red, green, blue)
    working_directory: str
      Folder containing a temporary space to save intermediate results.
    e

    Returns
    -------
    wf: class nipype.pipeline.engine.workflows.Workflow
      A class du type nypipe workflow to control, setup, and execute a process
      as a nypipe pipeline.

    """

    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as nutil
    import nipype.pipeline.engine as npe
    from nipype.interfaces.io import DataSink
    from nipype import config
    import tempfile
    from clinica.utils.inputs import check_caps_folder
    from clinica.utils.filemanip import get_subject_id
    from clinica.utils.participant import get_subject_session_list
    from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
    from clinica.utils.inputs import clinica_file_reader
    from clinica.utils.nipype import fix_join
    from .T1_preparedl_utils import (extract_slices, extract_patches,
                                     save_as_pt, container_from_filename,
                                     get_data_datasink)

    T1W_LINEAR = {
        'pattern': '*space-MNI152NLin2009cSym_res-1x1x1_T1w.nii.gz',
        'description': 'T1W Image registered using T1_Linear'
    }
    T1W_LINEAR_CROPPED = {
        'pattern':
        '*space-MNI152NLin2009cSym_desc-Crop_res-1x1x1_T1w.nii.gz',
        'description':
        'T1W Image registered using T1_Linear and cropped'
        '(matrix size 169×208×179, 1 mm isotropic voxels)'
    }

    if working_directory is None:
        working_directory = tempfile.mkdtemp()

    check_caps_folder(caps_directory)
    is_bids_dir = False
    use_session_tsv = False

    sessions, subjects = get_subject_session_list(caps_directory, tsv,
                                                  is_bids_dir, use_session_tsv,
                                                  working_directory)

    # Use hash instead of parameters for iterables folder names
    # Otherwise path will be too long and generate OSError
    cfg = dict(execution={'parameterize_dirs': False})
    config.update_config(cfg)

    # Inputs from t1_linear folder
    # ========================
    # T1w file:
    try:
        t1w_files = clinica_file_reader(subjects, sessions, caps_directory,
                                        T1W_LINEAR_CROPPED)
    except ClinicaException as e:
        err = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' + str(
            e)
        raise ClinicaBIDSError(err)

    def get_input_fields():
        """"Specify the list of possible inputs of this pipelines.
        Returns:
        A list of (string) input fields name.
        """
        return ['t1w']

    # Read node
    # ----------------------
    read_node = npe.Node(
        name="ReadingFiles",
        iterables=[
            ('t1w', t1w_files),
        ],
        synchronize=True,
        interface=nutil.IdentityInterface(fields=get_input_fields()))

    # Get subject ID node
    # ----------------------
    image_id_node = npe.Node(interface=nutil.Function(
        input_names=['bids_or_caps_file'],
        output_names=['image_id'],
        function=get_subject_id),
                             name='ImageID')

    # The processing nodes

    # Node to save MRI in nii.gz format into pytorch .pt format
    # ----------------------
    save_as_pt = npe.MapNode(name='save_as_pt',
                             iterfield=['input_img'],
                             interface=nutil.Function(
                                 function=save_as_pt,
                                 input_names=['input_img'],
                                 output_names=['output_file']))

    # Extract slices node (options: 3 directions, mode)
    # ----------------------
    extract_slices = npe.MapNode(
        name='extract_slices',
        iterfield=['input_tensor'],
        interface=nutil.Function(
            function=extract_slices,
            input_names=['input_tensor', 'slice_direction', 'slice_mode'],
            output_names=['output_file_rgb', 'output_file_original']))

    extract_slices.inputs.slice_direction = slice_direction
    extract_slices.inputs.slice_mode = slice_mode

    # Extract patches node (options: patch size and stride size)
    # ----------------------
    extract_patches = npe.MapNode(
        name='extract_patches',
        iterfield=['input_tensor'],
        interface=nutil.Function(
            function=extract_patches,
            input_names=['input_tensor', 'patch_size', 'stride_size'],
            output_names=['output_patch']))

    extract_patches.inputs.patch_size = patch_size
    extract_patches.inputs.stride_size = stride_size

    # Output node
    # ----------------------
    outputnode = npe.Node(nutil.IdentityInterface(fields=['preprocessed_T1']),
                          name='outputnode')

    # Find container path from t1w filename
    # ----------------------
    container_path = npe.Node(nutil.Function(
        input_names=['bids_or_caps_filename'],
        output_names=['container'],
        function=container_from_filename),
                              name='ContainerPath')

    # Write node
    # ----------------------
    write_node = npe.Node(name="WriteCaps", interface=DataSink())
    write_node.inputs.base_directory = caps_directory
    write_node.inputs.parameterization = False

    subfolder = 'image_based'
    wf = npe.Workflow(name='dl_prepare_data', base_dir=working_directory)

    # Connections
    # ----------------------
    wf.connect([
        (read_node, image_id_node, [('t1w', 'bids_or_caps_file')]),
        (read_node, container_path, [('t1w', 'bids_or_caps_filename')]),
        (read_node, save_as_pt, [('t1w', 'input_img')]),
        (image_id_node, write_node, [('image_id', '@image_id')]),
        # Connect to DataSink
    ])

    if extract_method == 'slice':
        subfolder = 'slice_based'
        wf.connect([(save_as_pt, extract_slices, [('output_file',
                                                   'input_tensor')]),
                    (extract_slices, write_node, [('output_file_rgb',
                                                   '@slices_rgb_T1')]),
                    (extract_slices, write_node, [('output_file_original',
                                                   '@slices_original_T1')])])
    elif extract_method == 'patch':
        subfolder = 'patch_based'
        wf.connect([
            (save_as_pt, extract_patches, [('output_file', 'input_tensor')]),
            (extract_patches, write_node, [('output_patch', '@patches_T1')])
        ])
    else:
        wf.connect([(save_as_pt, write_node, [('output_file',
                                               '@output_pt_file')])])

    wf.connect([(container_path, write_node,
                 [(('container', fix_join, 'deeplearning_prepare_data',
                    subfolder, 't1_linear'), 'container')])])

    return wf
Esempio n. 18
0
def create_susan_smooth(name="susan_smooth", separate_masks=True):
    """Create a SUSAN smoothing workflow

    Parameters
    ----------

    ::

        name : name of workflow (default: susan_smooth)
        separate_masks : separate masks for each run

    Inputs::

        inputnode.in_files : functional runs (filename or list of filenames)
        inputnode.fwhm : fwhm for smoothing with SUSAN
        inputnode.mask_file : mask used for estimating SUSAN thresholds (but not for smoothing)

    Outputs::

        outputnode.smoothed_files : functional runs (filename or list of filenames)

    Example
    -------

    >>> smooth = create_susan_smooth()
    >>> smooth.inputs.inputnode.in_files = 'f3.nii'
    >>> smooth.inputs.inputnode.fwhm = 5
    >>> smooth.inputs.inputnode.mask_file = 'mask.nii'
    >>> smooth.run() # doctest: +SKIP

    """

    susan_smooth = pe.Workflow(name=name)
    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=['in_files', 'fwhm', 'mask_file']),
                        name='inputnode')
    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = pe.MapNode(interface=fsl.SUSAN(),
                        iterfield=['in_file', 'brightness_threshold', 'usans'],
                        name='smooth')
    """
    Determine the median value of the functional runs using the mask
    """

    if separate_masks:
        median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                            iterfield=['in_file', 'mask_file'],
                            name='median')
    else:
        median = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                            iterfield=['in_file'],
                            name='median')
    susan_smooth.connect(inputnode, 'in_files', median, 'in_file')
    susan_smooth.connect(inputnode, 'mask_file', median, 'mask_file')
    """
    Mask the motion corrected functional runs with the dilated mask
    """

    if separate_masks:
        mask = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                   op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='mask')
    else:
        mask = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name='mask')
    susan_smooth.connect(inputnode, 'in_files', mask, 'in_file')
    susan_smooth.connect(inputnode, 'mask_file', mask, 'in_file2')
    """
    Determine the mean image from each functional run
    """

    meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                   suffix='_mean'),
                          iterfield=['in_file'],
                          name='meanfunc2')
    susan_smooth.connect(mask, 'out_file', meanfunc, 'in_file')
    """
    Merge the median values with the mean functional images into a coupled list
    """

    merge = pe.Node(interface=util.Merge(2, axis='hstack'), name='merge')
    susan_smooth.connect(meanfunc, 'out_file', merge, 'in1')
    susan_smooth.connect(median, 'out_stat', merge, 'in2')
    """
    Define a function to get the brightness threshold for SUSAN
    """
    susan_smooth.connect(inputnode, 'fwhm', smooth, 'fwhm')
    susan_smooth.connect(inputnode, 'in_files', smooth, 'in_file')
    susan_smooth.connect(median, ('out_stat', getbtthresh), smooth,
                         'brightness_threshold')
    susan_smooth.connect(merge, ('out', getusans), smooth, 'usans')

    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=['smoothed_files']),
        name='outputnode')

    susan_smooth.connect(smooth, 'smoothed_file', outputnode, 'smoothed_files')

    return susan_smooth
Esempio n. 19
0
def create_tessellation_flow(name='tessellate', out_format='stl'):
    """Tessellates the input subject's aseg.mgz volume and returns
    the surfaces for each region in stereolithic (.stl) format

    Example
    -------
    >>> from nipype.workflows.smri.freesurfer import create_tessellation_flow
    >>> tessflow = create_tessellation_flow()
    >>> tessflow.inputs.inputspec.subject_id = 'subj1'
    >>> tessflow.inputs.inputspec.subjects_dir = '.'
    >>> tessflow.inputs.inputspec.lookup_file = 'FreeSurferColorLUT.txt' # doctest: +SKIP
    >>> tessflow.run()  # doctest: +SKIP


    Inputs::

           inputspec.subject_id : freesurfer subject id
           inputspec.subjects_dir : freesurfer subjects directory
           inputspec.lookup_file : lookup file from freesurfer directory

    Outputs::

           outputspec.meshes : output region meshes in (by default) stereolithographic (.stl) format
    """
    """
    Initialize the workflow
    """

    tessflow = pe.Workflow(name=name)
    """
    Define the inputs to the workflow.
    """

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['subject_id', 'subjects_dir', 'lookup_file']),
                        name='inputspec')
    """
    Define all the nodes of the workflow:

      fssource: used to retrieve aseg.mgz
      mri_convert : converts aseg.mgz to aseg.nii
      tessellate : tessellates regions in aseg.mgz
      surfconvert : converts regions to stereolithographic (.stl) format
      smoother: smooths the tessellated regions

    """

    fssource = pe.Node(nio.FreeSurferSource(), name='fssource')
    volconvert = pe.Node(fs.MRIConvert(out_type='nii'), name='volconvert')
    tessellate = pe.MapNode(fs.MRIMarchingCubes(),
                            iterfield=['label_value', 'out_file'],
                            name='tessellate')
    surfconvert = pe.MapNode(fs.MRIsConvert(out_datatype='stl'),
                             iterfield=['in_file'],
                             name='surfconvert')
    smoother = pe.MapNode(mf.MeshFix(),
                          iterfield=['in_file1'],
                          name='smoother')
    if out_format == 'gii':
        stl_to_gifti = pe.MapNode(fs.MRIsConvert(out_datatype=out_format),
                                  iterfield=['in_file'],
                                  name='stl_to_gifti')
    smoother.inputs.save_as_stl = True
    smoother.inputs.laplacian_smoothing_steps = 1

    region_list_from_volume_interface = Function(
        input_names=["in_file"],
        output_names=["region_list"],
        function=region_list_from_volume)

    id_list_from_lookup_table_interface = Function(
        input_names=["lookup_file", "region_list"],
        output_names=["id_list"],
        function=id_list_from_lookup_table)

    region_list_from_volume_node = pe.Node(
        interface=region_list_from_volume_interface,
        name='region_list_from_volume_node')
    id_list_from_lookup_table_node = pe.Node(
        interface=id_list_from_lookup_table_interface,
        name='id_list_from_lookup_table_node')
    """
    Connect the nodes
    """

    tessflow.connect([
        (inputnode, fssource, [('subject_id', 'subject_id'),
                               ('subjects_dir', 'subjects_dir')]),
        (fssource, volconvert, [('aseg', 'in_file')]),
        (volconvert, region_list_from_volume_node, [('out_file', 'in_file')]),
        (region_list_from_volume_node, tessellate, [('region_list',
                                                     'label_value')]),
        (region_list_from_volume_node, id_list_from_lookup_table_node,
         [('region_list', 'region_list')]),
        (inputnode, id_list_from_lookup_table_node, [('lookup_file',
                                                      'lookup_file')]),
        (id_list_from_lookup_table_node, tessellate, [('id_list', 'out_file')
                                                      ]),
        (fssource, tessellate, [('aseg', 'in_file')]),
        (tessellate, surfconvert, [('surface', 'in_file')]),
        (surfconvert, smoother, [('converted', 'in_file1')]),
    ])
    """
    Setup an outputnode that defines relevant inputs of the workflow.
    """

    outputnode = pe.Node(niu.IdentityInterface(fields=["meshes"]),
                         name="outputspec")

    if out_format == 'gii':
        tessflow.connect([
            (smoother, stl_to_gifti, [("mesh_file", "in_file")]),
        ])
        tessflow.connect([
            (stl_to_gifti, outputnode, [("converted", "meshes")]),
        ])
    else:
        tessflow.connect([
            (smoother, outputnode, [("mesh_file", "meshes")]),
        ])
    return tessflow
Esempio n. 20
0
def create_parallelfeat_preproc(name='featpreproc', highpass=True):
    """Preprocess each run with FSL independently of the others

    Parameters
    ----------

    ::

      name : name of workflow (default: featpreproc)
      highpass : boolean (default: True)

    Inputs::

        inputspec.func : functional runs (filename or list of filenames)
        inputspec.fwhm : fwhm for smoothing with SUSAN
        inputspec.highpass : HWHM in TRs (if created with highpass=True)

    Outputs::

        outputspec.reference : volume to which runs are realigned
        outputspec.motion_parameters : motion correction parameters
        outputspec.realigned_files : motion corrected files
        outputspec.motion_plots : plots of motion correction parameters
        outputspec.mask : mask file used to mask the brain
        outputspec.smoothed_files : smoothed functional data
        outputspec.highpassed_files : highpassed functional data (if highpass=True)
        outputspec.mean : mean file

    Example
    -------

    >>> preproc = create_parallelfeat_preproc()
    >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii']
    >>> preproc.inputs.inputspec.fwhm = 5
    >>> preproc.inputs.inputspec.highpass = 128./(2*2.5)
    >>> preproc.base_dir = '/tmp'
    >>> preproc.run() # doctest: +SKIP

    >>> preproc = create_parallelfeat_preproc(highpass=False)
    >>> preproc.inputs.inputspec.func = 'f3.nii'
    >>> preproc.inputs.inputspec.fwhm = 5
    >>> preproc.base_dir = '/tmp'
    >>> preproc.run() # doctest: +SKIP
    """

    featpreproc = pe.Workflow(name=name)
    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    if highpass:
        inputnode = pe.Node(interface=util.IdentityInterface(
            fields=['func', 'fwhm', 'highpass']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=[
            'reference', 'motion_parameters', 'realigned_files',
            'motion_plots', 'mask', 'smoothed_files', 'highpassed_files',
            'mean'
        ]),
                             name='outputspec')
    else:
        inputnode = pe.Node(
            interface=util.IdentityInterface(fields=['func', 'fwhm']),
            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=[
            'reference', 'motion_parameters', 'realigned_files',
            'motion_plots', 'mask', 'smoothed_files', 'mean'
        ]),
                             name='outputspec')
    """
    Set up a node to define outputs for the preprocessing workflow

    """
    """
    Convert functional images to float representation. Since there can
    be more than one functional run we use a MapNode to convert each
    run.
    """

    img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
                                                    op_string='',
                                                    suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    featpreproc.connect(inputnode, 'func', img2float, 'in_file')
    """
    Extract the first volume of the first run as the reference
    """

    extract_ref = pe.MapNode(interface=fsl.ExtractROI(t_size=1),
                             iterfield=['in_file', 't_min'],
                             name='extractref')

    featpreproc.connect(img2float, 'out_file', extract_ref, 'in_file')
    featpreproc.connect(img2float, ('out_file', pickmiddle), extract_ref,
                        't_min')
    featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference')
    """
    Realign the functional runs to the reference (1st volume of first run)
    """

    motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True,
                                                      save_plots=True),
                                name='realign',
                                iterfield=['in_file', 'ref_file'])
    featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file')
    featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file')
    featpreproc.connect(motion_correct, 'par_file', outputnode,
                        'motion_parameters')
    featpreproc.connect(motion_correct, 'out_file', outputnode,
                        'realigned_files')
    """
    Plot the estimated motion parameters
    """

    plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
                             name='plot_motion',
                             iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots')
    """
    Extract the mean volume of the first functional run
    """

    meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                   suffix='_mean'),
                          iterfield=['in_file'],
                          name='meanfunc')
    featpreproc.connect(motion_correct, 'out_file', meanfunc, 'in_file')
    """
    Strip the skull from the mean functional to generate a mask
    """

    meanfuncmask = pe.MapNode(interface=fsl.BET(mask=True,
                                                no_output=True,
                                                frac=0.3),
                              iterfield=['in_file'],
                              name='meanfuncmask')
    featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file')
    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file', 'in_file2'],
                          name='maskfunc')
    featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2')
    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """

    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file')
    """
    Threshold the first run of the functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file')
    """
    Define a function to get 10% of the intensity
    """

    featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold,
                        'op_string')
    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file')
    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')
    """
    Dilate the mask
    """

    dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                     op_string='-dilF'),
                            iterfield=['in_file'],
                            name='dilatemask')
    featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file')
    featpreproc.connect(dilatemask, 'out_file', outputnode, 'mask')
    """
    Mask the motion corrected functional runs with the dilated mask
    """

    maskfunc2 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc2')
    featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file')
    featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2')
    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = create_susan_smooth()

    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
    featpreproc.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files')
    featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file')
    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3,
                        'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2), name='concat')
    featpreproc.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1')
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files')
    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                        'op_string')
    """
    Perform temporal highpass filtering on the data
    """

    if highpass:
        highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=['in_file'],
                              name='highpass')
        featpreproc.connect(inputnode, ('highpass', highpass_operand),
                            highpass, 'op_string')
        featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')
        featpreproc.connect(highpass, 'out_file', outputnode,
                            'highpassed_files')
    """
    Generate a mean functional image from the first run
    """

    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')
    if highpass:
        featpreproc.connect(highpass, 'out_file', meanfunc3, 'in_file')
    else:
        featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean')

    return featpreproc
preproc = create_featreg_preproc(whichvol='first')

modelfit = create_modelfit_workflow()

fixed_fx = create_fixed_effects_flow()
"""
Add artifact detection and model specification nodes between the preprocessing
and modelfitting workflows.
"""

art = pe.MapNode(
    interface=ra.ArtifactDetect(use_differences=[True, False],
                                use_norm=True,
                                norm_threshold=1,
                                zintensity_threshold=3,
                                parameter_source='FSL',
                                mask_type='file'),
    iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
    name="art")

modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")

level1_workflow.connect([
    (preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'),
                    ('outputspec.realigned_files', 'realigned_files'),
                    ('outputspec.mask', 'mask_file')]),
    (preproc, modelspec, [('outputspec.highpassed_files', 'functional_runs'),
                          ('outputspec.motion_parameters',
                           'realignment_parameters')]),
    (art, modelspec, [('outlier_files', 'outlier_files')]),
Esempio n. 22
0
def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

    ::

        name : name of workflow (default: preproc)
        highpass : boolean (default: True)
        whichvol : which volume of the first run to register to ('first', 'middle', 'mean')

    Inputs::

        inputspec.func : functional runs (filename or list of filenames)
        inputspec.fwhm : fwhm for smoothing with SUSAN
        inputspec.highpass : HWHM in TRs (if created with highpass=True)
        inputspec.subject_id : freesurfer subject id
        inputspec.subjects_dir : freesurfer subjects dir

    Outputs::

        outputspec.reference : volume to which runs are realigned
        outputspec.motion_parameters : motion correction parameters
        outputspec.realigned_files : motion corrected files
        outputspec.motion_plots : plots of motion correction parameters
        outputspec.mask_file : mask file used to mask the brain
        outputspec.smoothed_files : smoothed functional data
        outputspec.highpassed_files : highpassed functional data (if highpass=True)
        outputspec.reg_file : bbregister registration files
        outputspec.reg_cost : bbregister registration cost files

    Example
    -------

    >>> preproc = create_fsl_fs_preproc(whichvol='first')
    >>> preproc.inputs.inputspec.highpass = 128./(2*2.5)
    >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii']
    >>> preproc.inputs.inputspec.subjects_dir = '.'
    >>> preproc.inputs.inputspec.subject_id = 's1'
    >>> preproc.inputs.inputspec.fwhm = 6
    >>> preproc.run() # doctest: +SKIP
    """

    featpreproc = pe.Workflow(name=name)
    """
    Set up a node to define all inputs required for the preprocessing workflow

    """

    if highpass:
        inputnode = pe.Node(interface=util.IdentityInterface(
            fields=['func', 'fwhm', 'subject_id', 'subjects_dir', 'highpass']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=[
            'reference', 'motion_parameters', 'realigned_files',
            'motion_plots', 'mask_file', 'smoothed_files', 'highpassed_files',
            'reg_file', 'reg_cost'
        ]),
                             name='outputspec')
    else:
        inputnode = pe.Node(interface=util.IdentityInterface(
            fields=['func', 'fwhm', 'subject_id', 'subjects_dir']),
                            name='inputspec')
        outputnode = pe.Node(interface=util.IdentityInterface(fields=[
            'reference', 'motion_parameters', 'realigned_files',
            'motion_plots', 'mask_file', 'smoothed_files', 'reg_file',
            'reg_cost'
        ]),
                             name='outputspec')
    """
    Set up a node to define outputs for the preprocessing workflow

    """
    """
    Convert functional images to float representation. Since there can
    be more than one functional run we use a MapNode to convert each
    run.
    """

    img2float = pe.MapNode(interface=fsl.ImageMaths(out_data_type='float',
                                                    op_string='',
                                                    suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    featpreproc.connect(inputnode, 'func', img2float, 'in_file')
    """
    Extract the first volume of the first run as the reference
    """

    if whichvol != 'mean':
        extract_ref = pe.Node(interface=fsl.ExtractROI(t_size=1),
                              iterfield=['in_file'],
                              name='extractref')
        featpreproc.connect(img2float, ('out_file', pickfirst), extract_ref,
                            'in_file')
        featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol),
                            extract_ref, 't_min')
        featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference')
    """
    Realign the functional runs to the reference (1st volume of first run)
    """

    motion_correct = pe.MapNode(interface=fsl.MCFLIRT(save_mats=True,
                                                      save_plots=True,
                                                      interpolation='sinc'),
                                name='realign',
                                iterfield=['in_file'])
    featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file')
    if whichvol != 'mean':
        featpreproc.connect(extract_ref, 'roi_file', motion_correct,
                            'ref_file')
    else:
        motion_correct.inputs.mean_vol = True
        featpreproc.connect(motion_correct, 'mean_img', outputnode,
                            'reference')

    featpreproc.connect(motion_correct, 'par_file', outputnode,
                        'motion_parameters')
    featpreproc.connect(motion_correct, 'out_file', outputnode,
                        'realigned_files')
    """
    Plot the estimated motion parameters
    """

    plot_motion = pe.MapNode(interface=fsl.PlotMotionParams(in_source='fsl'),
                             name='plot_motion',
                             iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots')
    """Get the mask from subject for each run
    """

    maskflow = create_getmask_flow()
    featpreproc.connect([(inputnode, maskflow,
                          [('subject_id', 'inputspec.subject_id'),
                           ('subjects_dir', 'inputspec.subjects_dir')])])
    maskflow.inputs.inputspec.contrast_type = 't2'
    if whichvol != 'mean':
        featpreproc.connect(extract_ref, 'roi_file', maskflow,
                            'inputspec.source_file')
    else:
        featpreproc.connect(motion_correct, ('mean_img', pickfirst), maskflow,
                            'inputspec.source_file')
    """
    Mask the functional runs with the extracted mask
    """

    maskfunc = pe.MapNode(interface=fsl.ImageMaths(suffix='_bet',
                                                   op_string='-mas'),
                          iterfield=['in_file'],
                          name='maskfunc')
    featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst),
                        maskfunc, 'in_file2')
    """
    Smooth each run using SUSAN with the brightness threshold set to 75%
    of the median value for each run and a mask consituting the mean
    functional
    """

    smooth = create_susan_smooth(separate_masks=False)

    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')
    featpreproc.connect(maskfunc, 'out_file', smooth, 'inputnode.in_files')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), smooth,
                        'inputnode.mask_file')
    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file'],
                           name='maskfunc3')
    featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3,
                        'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst),
                        maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2), name='concat')
    featpreproc.connect(maskfunc, ('out_file', tolist), concatnode, 'in1')
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')
    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files')
    """
    Scale the median value of the run is set to 10000
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')
    """
    Determine the median value of the functional runs using the mask
    """

    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file'],
                           name='medianval')
    featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file')
    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst),
                        medianval, 'mask_file')
    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale,
                        'op_string')
    """
    Perform temporal highpass filtering on the data
    """

    if highpass:
        highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=['in_file'],
                              name='highpass')
        featpreproc.connect(inputnode, ('highpass', highpass_operand),
                            highpass, 'op_string')
        featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')
        featpreproc.connect(highpass, 'out_file', outputnode,
                            'highpassed_files')

    featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst),
                        outputnode, 'mask_file')
    featpreproc.connect(maskflow, 'outputspec.reg_file', outputnode,
                        'reg_file')
    featpreproc.connect(maskflow, 'outputspec.reg_cost', outputnode,
                        'reg_cost')

    return featpreproc
Esempio n. 23
0
def CreateCorrectionWorkflow(WFname):

    ###### UTILITY FUNCTIONS #######
    #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\
    # remove the skull from the T2 volume
    def ExtractBRAINFromHead(RawScan, BrainLabels):
        import os
        import SimpleITK as sitk
        # Remove skull from the head scan
        assert os.path.exists(RawScan), "File not found: %s" % RawScan
        assert os.path.exists(BrainLabels), "File not found: %s" % BrainLabels
        headImage = sitk.ReadImage(RawScan)
        labelsMap = sitk.ReadImage(BrainLabels)
        label_mask = labelsMap>0
        brainImage = sitk.Cast(headImage,sitk.sitkInt16) * sitk.Cast(label_mask,sitk.sitkInt16)
        outputVolume = os.path.realpath('T2Stripped.nrrd')
        sitk.WriteImage(brainImage, outputVolume)
        return outputVolume

    def MakeResamplerInFileList(inputT2, inputLabelMap):
        imagesList = [inputT2, inputLabelMap]
        return imagesList

    # This function helps to pick desirable output from the output list
    def pickFromList(inlist,item):
        return inlist[item]

    # Create registration mask for ANTs from resampled label map image
    def CreateAntsRegistrationMask(brainMask):
        import os
        import SimpleITK as sitk
        assert os.path.exists(brainMask), "File not found: %s" % brainMask
        labelsMap = sitk.ReadImage(brainMask)
        label_mask = labelsMap>0
        # dilate the label mask
        dilateFilter = sitk.BinaryDilateImageFilter()
        dilateFilter.SetKernelRadius(12)
        dilated_mask = dilateFilter.Execute( label_mask )
        regMask = dilated_mask
        registrationMask = os.path.realpath('registrationMask.nrrd')
        sitk.WriteImage(regMask, registrationMask)
        return registrationMask

    # Save direction cosine for the input volume
    def SaveDirectionCosineToMatrix(inputVolume):
        import os
        import SimpleITK as sitk
        assert os.path.exists(inputVolume), "File not found: %s" % inputVolume
        t2 = sitk.ReadImage(inputVolume)
        directionCosine = t2.GetDirection()
        return directionCosine

    def MakeForceDCFilesList(inputB0, inputT2, inputLabelMap):
        import os
        assert os.path.exists(inputB0), "File not found: %s" % inputB0
        assert os.path.exists(inputT2), "File not found: %s" % inputT2
        assert os.path.exists(inputLabelMap), "File not found: %s" % inputLabelMap
        imagesList = [inputB0, inputT2, inputLabelMap]
        return imagesList

    # Force DC to ID
    def ForceDCtoID(inputVolume):
        import os
        import SimpleITK as sitk
        inImage = sitk.ReadImage(inputVolume)
        inImage.SetDirection((1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
        outputVolume = os.path.realpath('IDDC_'+ os.path.basename(inputVolume))
        sitk.WriteImage(inImage, outputVolume)
        return outputVolume

    def RestoreDCFromSavedMatrix(inputVolume, inputDirectionCosine):
        import os
        import SimpleITK as sitk
        inImage = sitk.ReadImage(inputVolume)
        inImage.SetDirection(inputDirectionCosine)
        outputVolume = os.path.realpath('CorrectedDWI.nrrd')
        sitk.WriteImage(inImage, outputVolume)
        return outputVolume

    def GetRigidTransformInverse(inputTransform):
        import os
        import SimpleITK as sitk
        inputTx = sitk.ReadTransform(inputTransform)
        versorRigidTx = sitk.VersorRigid3DTransform()
        versorRigidTx.SetFixedParameters(inputTx.GetFixedParameters())
        versorRigidTx.SetParameters(inputTx.GetParameters())
        invTx = versorRigidTx.GetInverse()
        inverseTransform = os.path.realpath('Inverse_'+ os.path.basename(inputTransform))
        sitk.WriteTransform(invTx, inverseTransform)
        return inverseTransform
    #################################
    #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/
    CorrectionWF = pe.Workflow(name=WFname)

    inputsSpec = pe.Node(interface=IdentityInterface(fields=['T2Volume', 'DWIVolume','LabelMapVolume']),
                         name='inputsSpec')

    outputsSpec = pe.Node(interface=IdentityInterface(fields=['CorrectedDWI','CorrectedDWI_in_T2Space','DWIBrainMask']),
                          name='outputsSpec')

    # Step0: remove the skull from the T2 volume
    ExtractBRAINFromHeadNode = pe.Node(interface=Function(function = ExtractBRAINFromHead,
                                                          input_names=['RawScan','BrainLabels'],
                                                          output_names=['outputVolume']),
                                       name="ExtractBRAINFromHead")

    CorrectionWF.connect(inputsSpec, 'T2Volume', ExtractBRAINFromHeadNode, 'RawScan')
    CorrectionWF.connect(inputsSpec, 'LabelMapVolume', ExtractBRAINFromHeadNode, 'BrainLabels')

    # Step1: extract B0 from DWI volume
    EXTRACT_B0 = pe.Node(interface=extractNrrdVectorIndex(),name="EXTRACT_B0")
    EXTRACT_B0.inputs.vectorIndex = 0
    EXTRACT_B0.inputs.outputVolume = 'B0_Image.nrrd'
    CorrectionWF.connect(inputsSpec,'DWIVolume',EXTRACT_B0,'inputVolume')

    # Step2: Register T2 to B0 space using BRAINSFit
    BFit_T2toB0 = pe.Node(interface=BRAINSFit(), name="BFit_T2toB0")
    BFit_T2toB0.inputs.costMetric = "MMI"
    BFit_T2toB0.inputs.numberOfSamples = 100000
    BFit_T2toB0.inputs.numberOfIterations = [1500]
    BFit_T2toB0.inputs.numberOfHistogramBins = 50
    BFit_T2toB0.inputs.maximumStepLength = 0.2
    BFit_T2toB0.inputs.minimumStepLength = [0.00005]
    BFit_T2toB0.inputs.useRigid = True
    BFit_T2toB0.inputs.useAffine = True
    BFit_T2toB0.inputs.maskInferiorCutOffFromCenter = 65
    BFit_T2toB0.inputs.maskProcessingMode = "ROIAUTO"
    BFit_T2toB0.inputs.ROIAutoDilateSize = 13
    BFit_T2toB0.inputs.backgroundFillValue = 0.0
    BFit_T2toB0.inputs.initializeTransformMode = 'useCenterOfHeadAlign'
    BFit_T2toB0.inputs.strippedOutputTransform = "T2ToB0_RigidTransform.h5"
    BFit_T2toB0.inputs.writeOutputTransformInFloat = True
    CorrectionWF.connect(EXTRACT_B0, 'outputVolume', BFit_T2toB0, 'fixedVolume')
    CorrectionWF.connect(ExtractBRAINFromHeadNode, 'outputVolume', BFit_T2toB0, 'movingVolume')

    # Step3: Use T_rigid to "resample" T2 and label map images to B0 image space
    MakeResamplerInFilesListNode = pe.Node(Function(function=MakeResamplerInFileList,
                                                    input_names=['inputT2','inputLabelMap'],
                                                    output_names=['imagesList']),
                                           name="MakeResamplerInFilesListNode")
    CorrectionWF.connect([(ExtractBRAINFromHeadNode,MakeResamplerInFilesListNode,[('outputVolume','inputT2')]),
                          (inputsSpec,MakeResamplerInFilesListNode,[('LabelMapVolume','inputLabelMap')])])

    ResampleToB0Space = pe.MapNode(interface=BRAINSResample(), name="ResampleToB0Space",
                                   iterfield=['inputVolume', 'pixelType', 'outputVolume'])
    ResampleToB0Space.inputs.interpolationMode = 'Linear'
    ResampleToB0Space.inputs.outputVolume = ['T2toB0.nrrd','BRAINMaskToB0.nrrd']
    ResampleToB0Space.inputs.pixelType = ['ushort','binary']
    CorrectionWF.connect(BFit_T2toB0,'strippedOutputTransform',ResampleToB0Space,'warpTransform')
    CorrectionWF.connect(EXTRACT_B0,'outputVolume',ResampleToB0Space,'referenceVolume')
    CorrectionWF.connect(MakeResamplerInFilesListNode,'imagesList',ResampleToB0Space,'inputVolume')

    # Step4: Create registration mask from resampled label map image
    CreateRegistrationMask = pe.Node(interface=Function(function = CreateAntsRegistrationMask,
                                                        input_names=['brainMask'],
                                                        output_names=['registrationMask']),
                                     name="CreateAntsRegistrationMask")
    CorrectionWF.connect(ResampleToB0Space, ('outputVolume', pickFromList, 1),
                        CreateRegistrationMask, 'brainMask')

    # Step5: Save direction cosine for the resampled T2 image
    SaveDirectionCosineToMatrixNode = pe.Node(interface=Function(function = SaveDirectionCosineToMatrix,
                                                                 input_names=['inputVolume'],
                                                                 output_names=['directionCosine']),
                                              name="SaveDirectionCosineToMatrix")
    CorrectionWF.connect(ResampleToB0Space, ('outputVolume', pickFromList, 0),
                         SaveDirectionCosineToMatrixNode, 'inputVolume')


    # Step6: Force DC to ID
    MakeForceDCFilesListNode = pe.Node(Function(function=MakeForceDCFilesList,
                                                input_names=['inputB0','inputT2','inputLabelMap'],
                                                output_names=['imagesList']),
                                       name="MakeForceDCFilesListNode")
    CorrectionWF.connect([(EXTRACT_B0,MakeForceDCFilesListNode,[('outputVolume','inputB0')]),
                          (ResampleToB0Space,MakeForceDCFilesListNode,[(('outputVolume', pickFromList, 0),'inputT2')]),
                          (CreateRegistrationMask,MakeForceDCFilesListNode,[('registrationMask','inputLabelMap')])])

    ForceDCtoIDNode = pe.MapNode(interface=Function(function = ForceDCtoID,
                                                    input_names=['inputVolume'],
                                                    output_names=['outputVolume']),
                                 name="ForceDCtoID",
                                 iterfield=['inputVolume'])
    CorrectionWF.connect(MakeForceDCFilesListNode, 'imagesList', ForceDCtoIDNode, 'inputVolume')

    # Step7: Run antsRegistration in one direction
    antsReg_B0ToTransformedT2 = pe.Node(interface=ants.Registration(), name="antsReg_B0ToTransformedT2")
    antsReg_B0ToTransformedT2.inputs.interpolation = 'Linear'
    antsReg_B0ToTransformedT2.inputs.dimension = 3
    antsReg_B0ToTransformedT2.inputs.transforms = ["SyN"]
    antsReg_B0ToTransformedT2.inputs.transform_parameters = [(0.25, 3.0, 0.0)]
    antsReg_B0ToTransformedT2.inputs.metric = ['MI']
    antsReg_B0ToTransformedT2.inputs.sampling_strategy = [None]
    antsReg_B0ToTransformedT2.inputs.sampling_percentage = [1.0]
    antsReg_B0ToTransformedT2.inputs.metric_weight = [1.0]
    antsReg_B0ToTransformedT2.inputs.radius_or_number_of_bins = [32]
    antsReg_B0ToTransformedT2.inputs.number_of_iterations = [[70, 50, 40]]
    antsReg_B0ToTransformedT2.inputs.convergence_threshold = [1e-6]
    antsReg_B0ToTransformedT2.inputs.convergence_window_size = [10]
    antsReg_B0ToTransformedT2.inputs.use_histogram_matching = [True]
    antsReg_B0ToTransformedT2.inputs.shrink_factors = [[3, 2, 1]]
    antsReg_B0ToTransformedT2.inputs.smoothing_sigmas = [[2, 1, 0]]
    antsReg_B0ToTransformedT2.inputs.sigma_units = ["vox"]
    antsReg_B0ToTransformedT2.inputs.use_estimate_learning_rate_once = [False]
    antsReg_B0ToTransformedT2.inputs.write_composite_transform = True
    antsReg_B0ToTransformedT2.inputs.collapse_output_transforms = False
    antsReg_B0ToTransformedT2.inputs.initialize_transforms_per_stage = False
    antsReg_B0ToTransformedT2.inputs.output_transform_prefix = 'Tsyn'
    antsReg_B0ToTransformedT2.inputs.winsorize_lower_quantile = 0.01
    antsReg_B0ToTransformedT2.inputs.winsorize_upper_quantile = 0.99
    antsReg_B0ToTransformedT2.inputs.float = True
    antsReg_B0ToTransformedT2.inputs.num_threads = -1
    antsReg_B0ToTransformedT2.inputs.args = '--restrict-deformation 0x1x0'
    CorrectionWF.connect(ForceDCtoIDNode, ('outputVolume', pickFromList, 1), antsReg_B0ToTransformedT2, 'fixed_image')
    CorrectionWF.connect(ForceDCtoIDNode, ('outputVolume', pickFromList, 2), antsReg_B0ToTransformedT2, 'fixed_image_mask')
    CorrectionWF.connect(ForceDCtoIDNode, ('outputVolume', pickFromList, 0), antsReg_B0ToTransformedT2, 'moving_image')

    # Step8: Now, all necessary transforms are acquired. It's a time to
    #        transform input DWI image into T2 image space
    # {DWI} --> ForceDCtoID --> gtractResampleDWIInPlace(using SyN transfrom)
    # --> Restore DirectionCosine From Saved Matrix --> gtractResampleDWIInPlace(inverse of T_rigid from BFit)
    # --> {CorrectedDW_in_T2Space}
    DWI_ForceDCtoIDNode = pe.Node(interface=Function(function = ForceDCtoID,
                                                     input_names=['inputVolume'],
                                                     output_names=['outputVolume']),
                                  name='DWI_ForceDCtoIDNode')
    CorrectionWF.connect(inputsSpec,'DWIVolume',DWI_ForceDCtoIDNode,'inputVolume')

    gtractResampleDWI_SyN = pe.Node(interface=gtractResampleDWIInPlace(),
                                    name="gtractResampleDWI_SyN")
    CorrectionWF.connect(DWI_ForceDCtoIDNode,'outputVolume',
                         gtractResampleDWI_SyN,'inputVolume')
    CorrectionWF.connect(antsReg_B0ToTransformedT2,'composite_transform',
                         gtractResampleDWI_SyN,'warpDWITransform')
    CorrectionWF.connect(ForceDCtoIDNode,('outputVolume', pickFromList, 1),
                         gtractResampleDWI_SyN,'referenceVolume') # fixed image of antsRegistration
    gtractResampleDWI_SyN.inputs.outputVolume = 'IDDC_correctedDWI.nrrd'

    RestoreDCFromSavedMatrixNode = pe.Node(interface=Function(function = RestoreDCFromSavedMatrix,
                                                              input_names=['inputVolume','inputDirectionCosine'],
                                                              output_names=['outputVolume']),
                                           name='RestoreDCFromSavedMatrix')
    CorrectionWF.connect(gtractResampleDWI_SyN,'outputVolume',RestoreDCFromSavedMatrixNode,'inputVolume')
    CorrectionWF.connect(SaveDirectionCosineToMatrixNode,'directionCosine',RestoreDCFromSavedMatrixNode,'inputDirectionCosine')
    CorrectionWF.connect(RestoreDCFromSavedMatrixNode,'outputVolume', outputsSpec, 'CorrectedDWI')

    GetRigidTransformInverseNode = pe.Node(interface=Function(function = GetRigidTransformInverse,
                                                              input_names=['inputTransform'],
                                                              output_names=['inverseTransform']),
                                           name='GetRigidTransformInverse')
    CorrectionWF.connect(BFit_T2toB0,'strippedOutputTransform',GetRigidTransformInverseNode,'inputTransform')


    gtractResampleDWIInPlace_Trigid = pe.Node(interface=gtractResampleDWIInPlace(),
                                              name="gtractResampleDWIInPlace_Trigid")
    CorrectionWF.connect(RestoreDCFromSavedMatrixNode,'outputVolume',
                         gtractResampleDWIInPlace_Trigid,'inputVolume')
    CorrectionWF.connect(GetRigidTransformInverseNode,'inverseTransform',
                         gtractResampleDWIInPlace_Trigid,'inputTransform') #Inverse of rigid transform from BFit
    gtractResampleDWIInPlace_Trigid.inputs.outputVolume = 'CorrectedDWI_in_T2Space_estimate.nrrd'
    gtractResampleDWIInPlace_Trigid.inputs.outputResampledB0 = 'CorrectedDWI_in_T2Space_estimate_B0.nrrd'

    # Setp9: An extra registration step to tune the alignment between the CorrecetedDWI_in_T2Space image and T2 image.
    BFit_TuneRegistration = pe.Node(interface=BRAINSFit(), name="BFit_TuneRegistration")
    BFit_TuneRegistration.inputs.costMetric = "MMI"
    BFit_TuneRegistration.inputs.numberOfSamples = 100000
    BFit_TuneRegistration.inputs.numberOfIterations = [1500]
    BFit_TuneRegistration.inputs.numberOfHistogramBins = 50
    BFit_TuneRegistration.inputs.maximumStepLength = 0.2
    BFit_TuneRegistration.inputs.minimumStepLength = [0.00005]
    BFit_TuneRegistration.inputs.useRigid = True
    BFit_TuneRegistration.inputs.useAffine = True
    BFit_TuneRegistration.inputs.maskInferiorCutOffFromCenter = 65
    BFit_TuneRegistration.inputs.maskProcessingMode = "ROIAUTO"
    BFit_TuneRegistration.inputs.ROIAutoDilateSize = 13
    BFit_TuneRegistration.inputs.backgroundFillValue = 0.0
    BFit_TuneRegistration.inputs.initializeTransformMode = 'useCenterOfHeadAlign'
    BFit_TuneRegistration.inputs.strippedOutputTransform = "CorrectedB0inT2Space_to_T2_RigidTransform.h5"
    BFit_TuneRegistration.inputs.writeOutputTransformInFloat = True
    CorrectionWF.connect(ExtractBRAINFromHeadNode, 'outputVolume', BFit_TuneRegistration, 'fixedVolume') #T2 brain volume
    CorrectionWF.connect(gtractResampleDWIInPlace_Trigid, 'outputResampledB0', BFit_TuneRegistration, 'movingVolume') # CorrectedB0_in_T2Space

    gtractResampleDWIInPlace_TuneRigidTx = pe.Node(interface=gtractResampleDWIInPlace(),
                                                   name="gtractResampleDWIInPlace_TuneRigidTx")
    CorrectionWF.connect(gtractResampleDWIInPlace_Trigid,'outputVolume',gtractResampleDWIInPlace_TuneRigidTx,'inputVolume')
    CorrectionWF.connect(BFit_TuneRegistration,'strippedOutputTransform',gtractResampleDWIInPlace_TuneRigidTx,'inputTransform')
    gtractResampleDWIInPlace_TuneRigidTx.inputs.outputVolume = 'CorrectedDWI_in_T2Space.nrrd'
    gtractResampleDWIInPlace_TuneRigidTx.inputs.outputResampledB0 = 'CorrectedDWI_in_T2Space_B0.nrrd'

    # Finally we pass the outputs of the gtractResampleDWIInPlace_TuneRigidTx to the outputsSpec
    CorrectionWF.connect(gtractResampleDWIInPlace_TuneRigidTx, 'outputVolume', outputsSpec, 'CorrectedDWI_in_T2Space')

    # Step10: Create brain mask from the input labelmap
    DWIBRAINMASK = pe.Node(interface=BRAINSResample(), name='DWIBRAINMASK')
    DWIBRAINMASK.inputs.interpolationMode = 'Linear'
    DWIBRAINMASK.inputs.outputVolume = 'BrainMaskForDWI.nrrd'
    DWIBRAINMASK.inputs.pixelType = 'binary'
    CorrectionWF.connect(gtractResampleDWIInPlace_TuneRigidTx,'outputResampledB0',DWIBRAINMASK,'referenceVolume')
    CorrectionWF.connect(inputsSpec,'LabelMapVolume',DWIBRAINMASK,'inputVolume')
    CorrectionWF.connect(DWIBRAINMASK, 'outputVolume', outputsSpec, 'DWIBrainMask')

    return CorrectionWF
Esempio n. 24
0
def b0_flirt_pipeline(num_b0s, name='b0_coregistration'):
    """
    Rigid registration of the B0 dataset onto the first volume. Rigid
    registration is achieved using FLIRT and the normalized
    correlation.

    Args:
        num_b0s (int): Number of the B0 volumes in the dataset.
        name (str): Name of the workflow.

    Inputnode:
        in_file(str): B0 dataset.

    Outputnode
        out_b0_reg(str): The set of B0 volumes registered to the first volume.

    Returns:
        The workflow
    """
    import nipype.pipeline.engine as pe
    from nipype.interfaces import fsl
    import nipype.interfaces.utility as niu

    from clinica.utils.dwi import merge_volumes_tdim

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    fslroi_ref = pe.Node(fsl.ExtractROI(args='0 1'), name='b0_reference')
    tsize = num_b0s - 1
    fslroi_moving = pe.Node(fsl.ExtractROI(args='1 '+str(tsize)),
                            name='b0_moving')
    split_moving = pe.Node(fsl.Split(dimension='t'), name='split_b0_moving')

    bet_ref = pe.Node(fsl.BET(frac=0.3, mask=True, robust=True),
                      name='bet_ref')

    dilate = pe.Node(
            fsl.maths.MathsCommand(
                nan2zeros=True,
                args='-kernel sphere 5 -dilM'),
            name='mask_dilate')

    flirt = pe.MapNode(fsl.FLIRT(
        interp='spline', dof=6, bins=50, save_log=True,
        cost='corratio', cost_func='corratio', padding_size=10,
        searchr_x=[-4, 4], searchr_y=[-4, 4], searchr_z=[-4, 4],
        fine_search=1, coarse_search=10),
        name='b0_co_registration', iterfield=['in_file'])

    merge = pe.Node(fsl.Merge(dimension='t'), name='merge_registered_b0s')
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='remove_negative')
    insert_ref = pe.Node(niu.Function(input_names=['in_file1', 'in_file2'],
                                      output_names=['out_file'],
                                      function=merge_volumes_tdim),
                         name='concat_ref_moving')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_xfms']),
        name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  fslroi_ref,   [('in_file', 'in_file')]),
        (inputnode,  fslroi_moving,   [('in_file', 'in_file')]),
        (fslroi_moving, split_moving,   [('roi_file', 'in_file')]),
        (fslroi_ref, bet_ref, [('roi_file', 'in_file')]),
        (bet_ref, dilate, [('mask_file', 'in_file')]),
        (dilate, flirt, [('out_file', 'ref_weight'),
                         ('out_file', 'in_weight')]),
        (fslroi_ref, flirt, [('roi_file', 'reference')]),
        (split_moving, flirt, [('out_files', 'in_file')]),
        (flirt, thres, [('out_file', 'in_file')]),
        (thres, merge, [('out_file', 'in_files')]),
        (merge, insert_ref, [('merged_file', 'in_file2')]),
        (fslroi_ref, insert_ref, [('roi_file', 'in_file1')]),
        (insert_ref, outputnode, [('out_file', 'out_file')]),
        (flirt, outputnode, [('out_matrix_file', 'out_xfms')])
    ])
    return wf
Esempio n. 25
0
def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True):
    """Post-registration processing: derive mean_FA and mean_FA_skeleton from
    mean of all subjects in study. Target is assumed to be FMRIB58_FA_1mm.
    A pipeline that does the same as 'tbss_3_postreg -S' script from FSL
    Setting 'estimate_skeleton to False will use precomputed FMRIB58_FA-skeleton_1mm
    skeleton (same as 'tbss_3_postreg -T').

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss3 = tbss.create_tbss_3_postreg()
    >>> tbss3.inputs.inputnode.fa_list = ['s1_wrapped_FA.nii', 's2_wrapped_FA.nii', 's3_wrapped_FA.nii']

    Inputs::

        inputnode.field_list
        inputnode.fa_list

    Outputs::

        outputnode.groupmask
        outputnode.skeleton_file
        outputnode.meanfa_file
        outputnode.mergefa_file

    """

    # Create the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=['field_list',
                                                                'fa_list']),
                        name='inputnode')

    # Apply the warpfield to the masked FA image
    applywarp = pe.MapNode(interface=fsl.ApplyWarp(),
                           iterfield=['in_file', 'field_file'],
                        name="applywarp")
    if fsl.no_fsl():
        warn('NO FSL found')
    else:
        applywarp.inputs.ref_file = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz")

    # Merge the FA files into a 4D file
    mergefa = pe.Node(fsl.Merge(dimension="t"),
                    name="mergefa")

    # Get a group mask
    groupmask = pe.Node(fsl.ImageMaths(op_string="-max 0 -Tmin -bin",
                                       out_data_type="char",
                                       suffix="_mask"),
                        name="groupmask")

    maskgroup = pe.Node(fsl.ImageMaths(op_string="-mas",
                                       suffix="_masked"),
                        name="maskgroup")

    tbss3 = pe.Workflow(name=name)
    tbss3.connect([
        (inputnode, applywarp, [("fa_list", "in_file"),
                               ("field_list", "field_file")]),
        (applywarp, mergefa, [("out_file", "in_files")]),
        (mergefa, groupmask, [("merged_file", "in_file")]),
        (mergefa, maskgroup, [("merged_file", "in_file")]),
        (groupmask, maskgroup, [("out_file", "in_file2")]),
        ])

    # Create outputnode
    outputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask',
                                                                'skeleton_file',
                                                                'meanfa_file',
                                                                'mergefa_file']),
                         name='outputnode')

    if estimate_skeleton:
        # Take the mean over the fourth dimension
        meanfa = pe.Node(fsl.ImageMaths(op_string="-Tmean",
                                         suffix="_mean"),
                          name="meanfa")

        # Use the mean FA volume to generate a tract skeleton
        makeskeleton = pe.Node(fsl.TractSkeleton(skeleton_file=True),
                               name="makeskeleton")
        tbss3.connect([
                       (maskgroup, meanfa, [("out_file", "in_file")]),
                       (meanfa, makeskeleton, [("out_file", "in_file")]),
                       (groupmask, outputnode, [('out_file', 'groupmask')]),
                       (makeskeleton, outputnode, [('skeleton_file', 'skeleton_file')]),
                       (meanfa, outputnode, [('out_file', 'meanfa_file')]),
                       (maskgroup, outputnode, [('out_file', 'mergefa_file')])
                       ])
    else:
        #$FSLDIR/bin/fslmaths $FSLDIR/data/standard/FMRIB58_FA_1mm -mas mean_FA_mask mean_FA
        maskstd = pe.Node(fsl.ImageMaths(op_string="-mas",
                                           suffix="_masked"),
                            name="maskstd")
        maskstd.inputs.in_file = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz")

        #$FSLDIR/bin/fslmaths mean_FA -bin mean_FA_mask
        binmaskstd = pe.Node(fsl.ImageMaths(op_string="-bin"),
                            name="binmaskstd")

        #$FSLDIR/bin/fslmaths all_FA -mas mean_FA_mask all_FA
        maskgroup2 = pe.Node(fsl.ImageMaths(op_string="-mas",
                                           suffix="_masked"),
                            name="maskgroup2")

        tbss3.connect([
                        (groupmask, maskstd, [("out_file", "in_file2")]),
                        (maskstd, binmaskstd, [("out_file", "in_file")]),
                        (maskgroup, maskgroup2, [("out_file", "in_file")]),
                        (binmaskstd, maskgroup2, [("out_file", "in_file2")])
        ])

        outputnode.inputs.skeleton_file = fsl.Info.standard_image("FMRIB58_FA-skeleton_1mm.nii.gz")
        tbss3.connect([
                (binmaskstd, outputnode, [('out_file', 'groupmask')]),
                (maskstd, outputnode, [('out_file', 'meanfa_file')]),
                (maskgroup2, outputnode, [('out_file', 'mergefa_file')])
                ])
    return tbss3
Esempio n. 26
0
def dwi_flirt(name='DWICoregistration', excl_nodiff=False, flirt_param={}):
    """
    Generates a workflow for linear registration of dwi volumes using flirt.

    Inputnode
    ---------
    reference : FILE
      Mandatory input. Reference data set.
    in_file : FILE
      Mandatory input. Moving data set.
    ref_mask : FILE
      Mandatory input. Binary mask of the reference volume.
    in_xfms : FILE
      Mandatory input. Intialisation matrices for flirt.
    in_bval : FILE
      Mandatory input. B values file.

    """
    import nipype.interfaces.ants as ants
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe

    from nipype.workflows.dmri.fsl.utils import _checkinitxfm

    from nipype.workflows.dmri.fsl.utils import enhance

    inputnode = pe.Node(
            niu.IdentityInterface(
                fields=['reference',
                        'in_file',
                        'ref_mask',
                        'in_xfms',
                        'in_bval']),
            name='inputnode')

    initmat = pe.Node(
            niu.Function(
                input_names=['in_bval',
                             'in_xfms',
                             'excl_nodiff'],
                output_names=['init_xfms'],
                function=_checkinitxfm),
            name='InitXforms')
    initmat.inputs.excl_nodiff = excl_nodiff
    dilate = pe.Node(
            fsl.maths.MathsCommand(
                nan2zeros=True,
                args='-kernel sphere 5 -dilM'),
            name='MskDilate')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias')
    flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration',
                       iterfield=['in_file', 'in_matrix_file'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')
    outputnode = pe.Node(
            niu.IdentityInterface(
                fields=['out_file',
                        'out_xfms',
                        'out_ref']),
            name='outputnode')
    enhb0 = pe.Node(niu.Function(
        input_names=['in_file', 'in_mask', 'clip_limit'],
        output_names=['out_file'], function=enhance), name='B0Equalize')
    enhb0.inputs.clip_limit = 0.015
    enhdw = pe.MapNode(niu.Function(
        input_names=['in_file', 'in_mask'], output_names=['out_file'],
        function=enhance), name='DWEqualize', iterfield=['in_file'])
    # enhb0.inputs.clip_limit = clip_limit

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  split,      [('in_file', 'in_file')]),
        (inputnode,  dilate,     [('ref_mask', 'in_file')]),
        (inputnode,   n4,        [('reference', 'input_image'),
                                  ('ref_mask', 'mask_image')]),
        #        (inputnode,  flirt,      [('ref_mask', 'reference')]),
        (n4, enhb0, [('output_image', 'in_file')]),
        (enhb0, flirt, [('out_file', 'reference')]),
        (inputnode, initmat, [('in_xfms', 'in_xfms'),
                              ('in_bval', 'in_bval')]),
        (split, enhdw, [('out_files', 'in_file')]),
        (dilate, enhdw, [('out_file', 'in_mask')]),
        (dilate, flirt, [('out_file', 'ref_weight'),
                         ('out_file', 'in_weight')]),
        (enhdw, flirt, [('out_file', 'in_file')]),
        (initmat, flirt, [('init_xfms', 'in_matrix_file')]),
        (flirt,      thres,      [('out_file', 'in_file')]),
        (thres,      merge,      [('out_file', 'in_files')]),
        (merge,     outputnode, [('merged_file', 'out_file')]),
        (enhb0, outputnode, [('out_file', 'out_ref')]),
        (flirt,     outputnode, [('out_matrix_file', 'out_xfms')])
    ])
    return wf
Esempio n. 27
0
def create_tbss_non_FA(name='tbss_non_FA'):
    """
    A pipeline that implement tbss_non_FA in FSL

    Example
    -------

    >>> from nipype.workflows.dmri.fsl import tbss
    >>> tbss_MD = tbss.create_tbss_non_FA()
    >>> tbss_MD.inputs.inputnode.file_list = []
    >>> tbss_MD.inputs.inputnode.field_list = []
    >>> tbss_MD.inputs.inputnode.skeleton_thresh = 0.2
    >>> tbss_MD.inputs.inputnode.groupmask = './xxx'
    >>> tbss_MD.inputs.inputnode.meanfa_file = './xxx'
    >>> tbss_MD.inputs.inputnode.distance_map = []
    >>> tbss_MD.inputs.inputnode.all_FA_file = './xxx'

    Inputs::

        inputnode.file_list
        inputnode.field_list
        inputnode.skeleton_thresh
        inputnode.groupmask
        inputnode.meanfa_file
        inputnode.distance_map
        inputnode.all_FA_file

    Outputs::

        outputnode.projected_nonFA_file

    """

    # Define the inputnode
    inputnode = pe.Node(interface=util.IdentityInterface(fields=['file_list',
                                                                 'field_list',
                                                                 'skeleton_thresh',
                                                                 'groupmask',
                                                                 'meanfa_file',
                                                                 'distance_map',
                                                                 'all_FA_file']),
                        name='inputnode')

    # Apply the warpfield to the non FA image
    applywarp = pe.MapNode(interface=fsl.ApplyWarp(),
                           iterfield=['in_file', 'field_file'],
                           name="applywarp")
    if fsl.no_fsl():
        warn('NO FSL found')
    else:
        applywarp.inputs.ref_file = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz")
    # Merge the non FA files into a 4D file
    merge = pe.Node(fsl.Merge(dimension="t"), name="merge")
    #merged_file="all_FA.nii.gz"
    maskgroup = pe.Node(fsl.ImageMaths(op_string="-mas",
                                       suffix="_masked"),
                        name="maskgroup")
    projectfa = pe.Node(fsl.TractSkeleton(project_data=True,
                                        #projected_data = 'test.nii.gz',
                                        use_cingulum_mask=True
                                      ),
                        name="projectfa")

    tbss_non_FA = pe.Workflow(name=name)
    tbss_non_FA.connect([
                    (inputnode, applywarp, [('file_list', 'in_file'),
                                            ('field_list', 'field_file'),
                                            ]),
                    (applywarp, merge, [("out_file", "in_files")]),

                    (merge, maskgroup, [("merged_file", "in_file")]),

                    (inputnode, maskgroup, [('groupmask', 'in_file2')]),

                    (maskgroup, projectfa, [('out_file', 'alt_data_file')]),
                    (inputnode, projectfa, [('skeleton_thresh', 'threshold'),
                                            ("meanfa_file", "in_file"),
                                            ("distance_map", "distance_map"),
                                             ("all_FA_file", 'data_file')
                                            ]),
                ])

    # Define the outputnode
    outputnode = pe.Node(interface=util.IdentityInterface(
                                            fields=['projected_nonFA_file']),
                         name='outputnode')
    tbss_non_FA.connect([
            (projectfa, outputnode, [('projected_data', 'projected_nonFA_file'),
                                    ]),
            ])
    return tbss_non_FA
Esempio n. 28
0
def ecc_pipeline(name='eddy_correct'):
    """
    ECC stands for Eddy currents correction.
    Creates a pipelines that corrects for artifacts induced by Eddy currents in
    dMRI sequences.
    It takes a series of diffusion weighted images and linearly co-registers
    them to one reference image (the average of all b0s in the dataset).
    DWIs are also modulated by the determinant of the Jacobian as indicated by
    [Jones10]_ and [Rohde04]_.
    A list of rigid transformation matrices can be provided, sourcing from a
    :func:`.hmc_pipeline` workflow, to initialize registrations in a *motion
    free* framework.
    A list of affine transformation matrices is available as output, so that
    transforms can be chained (discussion
    `here <https://github.com/nipy/nipype/pull/530#issuecomment-14505042>`_).
    .. admonition:: References
      .. [Jones10] Jones DK, `The signal intensity must be modulated by the
        determinant of the Jacobian when correcting for eddy currents in
        diffusion MRI
        <http://cds.ismrm.org/protected/10MProceedings/files/1644_129.pdf>`_,
        Proc. ISMRM 18th Annual Meeting, (2010).
      .. [Rohde04] Rohde et al., `Comprehensive Approach for Correction of
        Motion and Distortion in Diffusion-Weighted MRI
        <http://stbb.nichd.nih.gov/pdf/com_app_cor_mri04.pdf>`_, MRM
        51:103-114 (2004).
    Example
    -------
    from nipype.workflows.dmri.fsl.artifacts import ecc_pipeline
    ecc = ecc_pipeline()
    ecc.inputs.inputnode.in_file = 'diffusion.nii'
    ecc.inputs.inputnode.in_bval = 'diffusion.bval'
    ecc.inputs.inputnode.in_mask = 'mask.nii'
    ecc.run() # doctest: +SKIP
    Inputs::
        inputnode.in_file - input dwi file
        inputnode.in_mask - weights mask of reference image (a file with data \
range sin [0.0, 1.0], indicating the weight of each voxel when computing the \
metric.
        inputnode.in_bval - b-values table
        inputnode.in_xfms - list of matrices to initialize registration (from \
head-motion correction)
    Outputs::
        outputnode.out_file - corrected dwi file
        outputnode.out_xfms - list of transformation matrices
    """

    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.fsl as fsl

    from nipype.workflows.data import get_flirt_schedule
    from nipype.workflows.dmri.fsl.utils import extract_bval
    from nipype.workflows.dmri.fsl.utils import recompose_xfm
    from nipype.workflows.dmri.fsl.utils import recompose_dwi
    from nipype.workflows.dmri.fsl.artifacts import _xfm_jacobian

    from clinica.workflows.dwi_preprocessing import dwi_flirt
    from clinica.utils.dwi import merge_volumes_tdim

    params = dict(dof=12, no_search=True, interp='spline', bgvalue=0,
                  schedule=get_flirt_schedule('ecc'))

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'in_bval', 'in_mask', 'in_xfms']), name='inputnode')

    getb0 = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='get_b0')

    pick_dws = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'b'], output_names=['out_file'],
        function=extract_bval), name='extract_dwi')
    pick_dws.inputs.b = 'diff'

    flirt = dwi_flirt(flirt_param=params, excl_nodiff=True)

    mult = pe.MapNode(fsl.BinaryMaths(operation='mul'), name='ModulateDWIs',
                      iterfield=['in_file', 'operand_value'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    get_mat = pe.Node(niu.Function(
        input_names=['in_bval', 'in_xfms'], output_names=['out_files'],
        function=recompose_xfm), name='GatherMatrices')
    merge = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval', 'in_corrected'],
        output_names=['out_file'], function=recompose_dwi), name='MergeDWIs')

    merged_volumes = pe.Node(niu.Function(
        input_names=['in_file1', 'in_file2'],
        output_names=['out_file'],
        function=merge_volumes_tdim), name='merge_enhanced_ref_dwis')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_xfms']), name='outputnode')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  getb0,        [('in_file', 'in_file')]),
        (inputnode,  pick_dws,     [('in_file', 'in_dwi'),
                                    ('in_bval', 'in_bval')]),
        (flirt, merged_volumes, [('outputnode.out_ref', 'in_file1'),
                                 ('outputnode.out_file', 'in_file2')]),

        (merged_volumes,  merge,        [('out_file', 'in_dwi')]),
        (inputnode, merge, [('in_bval', 'in_bval')]),
        (inputnode,  flirt,        [('in_mask', 'inputnode.ref_mask'),
                                    ('in_xfms', 'inputnode.in_xfms'),
                                    ('in_bval', 'inputnode.in_bval')]),
        (inputnode,  get_mat,      [('in_bval', 'in_bval')]),
        (getb0,      flirt,        [('roi_file', 'inputnode.reference')]),
        (pick_dws,   flirt,        [('out_file', 'inputnode.in_file')]),
        (flirt,      get_mat,      [('outputnode.out_xfms', 'in_xfms')]),
        (flirt,      mult,         [(('outputnode.out_xfms', _xfm_jacobian),
                                     'operand_value')]),
        (flirt,      split,        [('outputnode.out_file', 'in_file')]),
        (split,      mult,         [('out_files', 'in_file')]),
        (mult,       thres,        [('out_file', 'in_file')]),
        (thres,      merge,        [('out_file', 'in_corrected')]),
        (get_mat,    outputnode,   [('out_files', 'out_xfms')]),
        (merge,      outputnode,   [('out_file', 'out_file')])
    ])
    return wf
Esempio n. 29
0
def init_func_derivatives_wf(
    bids_root,
    cifti_output,
    freesurfer,
    metadata,
    output_dir,
    spaces,
    name='func_derivatives_wf',
):
    """
    Set up a battery of datasinks to store derivatives in the right location.

    Parameters
    ----------
    bids_root : :obj:`str`
        Original BIDS dataset path.
    cifti_output : :obj:`bool`
        Whether the ``--cifti-output`` flag was set.
    freesurfer : :obj:`bool`
        Whether FreeSurfer anatomical processing was run.
    metadata : :obj:`dict`
        Metadata dictionary associated to the BOLD run.
    output_dir : :obj:`str`
        Where derivatives should be written out to.
    spaces : :py:class:`~niworkflows.utils.spaces.SpatialReferences`
        A container for storing, organizing, and parsing spatial normalizations. Composed of
        :py:class:`~niworkflows.utils.spaces.Reference` objects representing spatial references.
        Each ``Reference`` contains a space, which is a string of either TemplateFlow template IDs
        (e.g., ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNIPediatricAsym``), nonstandard references
        (e.g., ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or a custom template located in
        the TemplateFlow root directory. Each ``Reference`` may also contain a spec, which is a
        dictionary with template specifications (e.g., a specification of ``{'resolution': 2}``
        would lead to resampling on a 2mm resolution of the space).
    use_aroma : :obj:`bool`
        Whether ``--use-aroma`` flag was set.
    name : :obj:`str`
        This workflow's identifier (default: ``func_derivatives_wf``).

    """
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.interfaces.utility import KeySelect
    from ...smriprep.workflows.outputs import _bids_relative

    nonstd_spaces = set(spaces.get_nonstandard())
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_aparc_std', 'bold_aparc_t1', 'bold_aseg_std', 'bold_aseg_t1',
        'bold_cifti', 'bold_mask_std', 'bold_mask_t1', 'bold_std',
        'bold_std_ref', 'bold_t1', 'bold_t1_ref', 'bold_native',
        'bold_native_ref', 'bold_mask_native', 'cifti_variant',
        'cifti_metadata', 'cifti_density', 'confounds', 'confounds_metadata',
        'source_file', 'surf_files', 'surf_refs', 'template',
        'spatial_reference', 'cbf', 'meancbf', 'score', 'avgscore', 'scrub',
        'basil', 'pv', 'cbf_t1', 'meancbf_t1', 'att_t1', 'score_t1',
        'avgscore_t1', 'scrub_t1', 'basil_t1', 'pv_t1', 'cbf_std',
        'meancbf_std', 'score_std', 'avgscore_std', 'scrub_std', 'basil_std',
        'pv_std', 'qc_file', 'cbf_hvoxf', 'score_hvoxf', 'scrub_hvoxf',
        'basil_hvoxf', 'pvc_hvoxf', 'cbf_sc207', 'score_sc207', 'scrub_sc207',
        'basil_sc207', 'pvc_sc207', 'cbf_sc217', 'score_sc217', 'scrub_sc217',
        'basil_sc217', 'pvc_sc217', 'cbf_sc407', 'score_sc407', 'scrub_sc407',
        'basil_sc407', 'pvc_sc407', 'cbf_sc417', 'score_sc417', 'scrub_sc417',
        'basil_sc417', 'pvc_sc417'
    ]),
                        name='inputnode')

    raw_sources = pe.Node(niu.Function(function=_bids_relative),
                          name='raw_sources')
    raw_sources.inputs.bids_root = bids_root

    ds_confounds = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='confounds',
                                               suffix='regressors'),
                           name="ds_confounds",
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([
        (inputnode, raw_sources, [('source_file', 'in_files')]),
        (inputnode, ds_confounds, [('source_file', 'source_file'),
                                   ('confounds', 'in_file'),
                                   ('confounds_metadata', 'meta_dict')]),
    ])

    qcfile = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                         desc='quality_control',
                                         suffix='cbf',
                                         compress=False),
                     name='qcfile',
                     run_without_submitting=True,
                     mem_gb=DEFAULT_MEMORY_MIN_GB)
    workflow.connect([
        (inputnode, qcfile, [('source_file', 'source_file'),
                             ('qc_file', 'in_file')]),
    ])

    cbf_hvoxf = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='HavardOxford',
                                            suffix='mean_cbf',
                                            compress=False),
                        name='cbf_hvoxf',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)
    score_hvoxf = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='HavardOxford',
                                              suffix='mean_score',
                                              compress=False),
                          name='score_hvoxf',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    scrub_hvoxf = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='HavardOxford',
                                              suffix='mean_scrub',
                                              compress=False),
                          name='scrub_hvoxf',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    basil_hvoxf = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='HavardOxford',
                                              suffix='mean_basil',
                                              compress=False),
                          name='basil_hvoxf',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    pvc_hvoxf = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='HavardOxford',
                                            suffix='mean_pvc',
                                            compress=False),
                        name='pvc_hvoxf',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, cbf_hvoxf, [('source_file', 'source_file'),
                                ('cbf_hvoxf', 'in_file')]),
        (inputnode, score_hvoxf, [('source_file', 'source_file'),
                                  ('score_hvoxf', 'in_file')]),
        (inputnode, scrub_hvoxf, [('source_file', 'source_file'),
                                  ('scrub_hvoxf', 'in_file')]),
        (inputnode, basil_hvoxf, [('source_file', 'source_file'),
                                  ('basil_hvoxf', 'in_file')]),
        (inputnode, pvc_hvoxf, [('source_file', 'source_file'),
                                ('pvc_hvoxf', 'in_file')]),
    ])

    cbf_sc207 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer200x7',
                                            suffix='mean_cbf',
                                            compress=False),
                        name='cbf_sc207',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)
    score_sc207 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x7',
                                              suffix='mean_score',
                                              compress=False),
                          name='score_sc207',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    scrub_sc207 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x7',
                                              suffix='mean_scrub',
                                              compress=False),
                          name='scrub_sc207',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    basil_sc207 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x7',
                                              suffix='mean_basil',
                                              compress=False),
                          name='basil_sc207',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    pvc_sc207 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer200x7',
                                            suffix='mean_pvc',
                                            compress=False),
                        name='pvc_sc207',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, cbf_sc207, [('source_file', 'source_file'),
                                ('cbf_sc207', 'in_file')]),
        (inputnode, score_sc207, [('source_file', 'source_file'),
                                  ('score_sc207', 'in_file')]),
        (inputnode, scrub_sc207, [('source_file', 'source_file'),
                                  ('scrub_sc207', 'in_file')]),
        (inputnode, basil_sc207, [('source_file', 'source_file'),
                                  ('basil_sc207', 'in_file')]),
        (inputnode, pvc_sc207, [('source_file', 'source_file'),
                                ('pvc_sc207', 'in_file')]),
    ])

    cbf_sc217 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer200x17',
                                            suffix='mean_cbf',
                                            compress=False),
                        name='cbf_sc217',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)
    score_sc217 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x17',
                                              suffix='mean_score',
                                              compress=False),
                          name='score_sc217',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    scrub_sc217 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x17',
                                              suffix='mean_scrub',
                                              compress=False),
                          name='scrub_sc217',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    basil_sc217 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer200x17',
                                              suffix='mean_basil',
                                              compress=False),
                          name='basil_sc217',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    pvc_sc217 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer200x17',
                                            suffix='mean_pvc',
                                            compress=False),
                        name='pvc_sc217',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, cbf_sc217, [('source_file', 'source_file'),
                                ('cbf_sc217', 'in_file')]),
        (inputnode, score_sc217, [('source_file', 'source_file'),
                                  ('score_sc217', 'in_file')]),
        (inputnode, scrub_sc217, [('source_file', 'source_file'),
                                  ('scrub_sc217', 'in_file')]),
        (inputnode, basil_sc217, [('source_file', 'source_file'),
                                  ('basil_sc217', 'in_file')]),
        (inputnode, pvc_sc217, [('source_file', 'source_file'),
                                ('pvc_sc217', 'in_file')]),
    ])

    cbf_sc407 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer400x7',
                                            suffix='mean_cbf',
                                            compress=False),
                        name='cbf_sc407',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)
    score_sc407 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x7',
                                              suffix='mean_score',
                                              compress=False),
                          name='score_sc407',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    scrub_sc407 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x7',
                                              suffix='mean_scrub',
                                              compress=False),
                          name='scrub_sc407',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    basil_sc407 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x7',
                                              suffix='mean_basil',
                                              compress=False),
                          name='basil_sc407',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    pvc_sc407 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer400x7',
                                            suffix='mean_pvc',
                                            compress=False),
                        name='pvc_sc407',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, cbf_sc407, [('source_file', 'source_file'),
                                ('cbf_sc407', 'in_file')]),
        (inputnode, score_sc407, [('source_file', 'source_file'),
                                  ('score_sc407', 'in_file')]),
        (inputnode, scrub_sc407, [('source_file', 'source_file'),
                                  ('scrub_sc407', 'in_file')]),
        (inputnode, basil_sc407, [('source_file', 'source_file'),
                                  ('basil_sc407', 'in_file')]),
        (inputnode, pvc_sc407, [('source_file', 'source_file'),
                                ('pvc_sc407', 'in_file')]),
    ])

    cbf_sc417 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer400x17',
                                            suffix='mean_cbf',
                                            compress=False),
                        name='cbf_sc417',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)
    score_sc417 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x17',
                                              suffix='mean_score',
                                              compress=False),
                          name='score_sc417',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    scrub_sc417 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x17',
                                              suffix='mean_scrub',
                                              compress=False),
                          name='scrub_sc417',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    basil_sc417 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                              desc='schaefer400x17',
                                              suffix='mean_basil',
                                              compress=False),
                          name='basil_sc417',
                          run_without_submitting=True,
                          mem_gb=DEFAULT_MEMORY_MIN_GB)
    pvc_sc417 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='schaefer400x17',
                                            suffix='mean_pvc',
                                            compress=False),
                        name='pvc_sc417',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, cbf_sc417, [('source_file', 'source_file'),
                                ('cbf_sc417', 'in_file')]),
        (inputnode, score_sc417, [('source_file', 'source_file'),
                                  ('score_sc417', 'in_file')]),
        (inputnode, scrub_sc417, [('source_file', 'source_file'),
                                  ('scrub_sc417', 'in_file')]),
        (inputnode, basil_sc417, [('source_file', 'source_file'),
                                  ('basil_sc417', 'in_file')]),
        (inputnode, pvc_sc417, [('source_file', 'source_file'),
                                ('pvc_sc417', 'in_file')]),
    ])

    if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')):
        ds_bold_native = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='preproc',
            compress=True,
            SkullStripped=False,
            RepetitionTime=metadata.get('RepetitionTime'),
            TaskName=metadata.get('TaskName')),
                                 name='ds_bold_native',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_native_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='aslref',
            compress=True,
            dismiss_entities=("echo", )),
                                     name='ds_bold_native_ref',
                                     run_without_submitting=True,
                                     mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_native = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                      name='ds_bold_mask_native',
                                      run_without_submitting=True,
                                      mem_gb=DEFAULT_MEMORY_MIN_GB)
        cbfnative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                suffix='cbf',
                                                compress=True),
                            name='cbfnative',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
        meancbfnative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                    suffix='mean_cbf',
                                                    compress=True),
                                name='meancbfnative',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        scorenative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                  desc='score',
                                                  suffix='cbf',
                                                  compress=True),
                              name='scorenative',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        meanscorenative = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='score',
            suffix='mean_cbf',
            compress=True),
                                  name='meanscorenative',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)
        scrubnative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                  desc='scrub',
                                                  suffix='cbf',
                                                  compress=True),
                              name='scrubnative',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        basilnative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                  desc='basil',
                                                  suffix='cbf',
                                                  compress=True),
                              name='basilnative',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        pvnative = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='pvc',
                                               suffix='cbf',
                                               compress=True),
                           name='pvcnative',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_bold_native, [('source_file', 'source_file'),
                                         ('bold_native', 'in_file')]),
            (inputnode, ds_bold_native_ref, [('source_file', 'source_file'),
                                             ('bold_native_ref', 'in_file')]),
            (inputnode, ds_bold_mask_native, [('source_file', 'source_file'),
                                              ('bold_mask_native', 'in_file')
                                              ]),
            (inputnode, cbfnative, [('source_file', 'source_file'),
                                    ('cbf', 'in_file')]),
            (inputnode, meancbfnative, [('source_file', 'source_file'),
                                        ('meancbf', 'in_file')]),
            (inputnode, scorenative, [('source_file', 'source_file'),
                                      ('score', 'in_file')]),
            (inputnode, meanscorenative, [('source_file', 'source_file'),
                                          ('avgscore', 'in_file')]),
            (inputnode, scrubnative, [('source_file', 'source_file'),
                                      ('scrub', 'in_file')]),
            (inputnode, basilnative, [('source_file', 'source_file'),
                                      ('basil', 'in_file')]),
            (inputnode, pvnative, [('source_file', 'source_file'),
                                   ('pv', 'in_file')]),
            (raw_sources, ds_bold_mask_native, [('out', 'RawSources')]),
        ])

    # Resample to T1w space
    if nonstd_spaces.intersection(('T1w', 'anat')):
        ds_bold_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            desc='preproc',
            compress=True,
            SkullStripped=False,
            RepetitionTime=metadata.get('RepetitionTime'),
            TaskName=metadata.get('TaskName'),
            dismiss_entities=("echo", )),
                             name='ds_bold_t1',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_t1_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            suffix='aslref',
            compress=True,
            dismiss_entities=("echo", )),
                                 name='ds_bold_t1_ref',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

        ds_bold_mask_t1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            space='T1w',
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                  name='ds_bold_mask_t1',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)

        cbfnativet1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                  suffix='cbf',
                                                  space='T1w',
                                                  compress=True),
                              name='cbfnativet1',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        meancbfnativet1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='mean_cbf',
            space='T1w',
            compress=True),
                                  name='meancbfnativet1',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)
        scorenativet1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                    desc='score',
                                                    suffix='cbf',
                                                    space='T1w',
                                                    compress=True),
                                name='scorenativet1',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        meanscorenativet1 = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='mean_cbf',
            desc='score',
            space='T1w',
            compress=True),
                                    name='meanscorenativet1',
                                    run_without_submitting=True,
                                    mem_gb=DEFAULT_MEMORY_MIN_GB)
        scrubnativet1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                    desc='scrub',
                                                    suffix='cbf',
                                                    space='T1w',
                                                    compress=True),
                                name='scrubnativet1',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        basilnativet1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                    desc='basil',
                                                    suffix='cbf',
                                                    space='T1w',
                                                    compress=True),
                                name='basilnativet1',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        pvnativet1 = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 desc='pvc',
                                                 suffix='cbf',
                                                 space='T1w',
                                                 compress=True),
                             name='pvcnativet1',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_bold_t1, [('source_file', 'source_file'),
                                     ('bold_t1', 'in_file')]),
            (inputnode, ds_bold_t1_ref, [('source_file', 'source_file'),
                                         ('bold_t1_ref', 'in_file')]),
            (inputnode, ds_bold_mask_t1, [('source_file', 'source_file'),
                                          ('bold_mask_t1', 'in_file')]),
            (inputnode, cbfnativet1, [('source_file', 'source_file'),
                                      ('cbf_t1', 'in_file')]),
            (inputnode, meancbfnativet1, [('source_file', 'source_file'),
                                          ('meancbf_t1', 'in_file')]),
            (inputnode, scorenativet1, [('source_file', 'source_file'),
                                        ('score_t1', 'in_file')]),
            (inputnode, meanscorenativet1, [('source_file', 'source_file'),
                                            ('avgscore_t1', 'in_file')]),
            (inputnode, scrubnativet1, [('source_file', 'source_file'),
                                        ('scrub_t1', 'in_file')]),
            (inputnode, basilnativet1, [('source_file', 'source_file'),
                                        ('basil_t1', 'in_file')]),
            (inputnode, pvnativet1, [('source_file', 'source_file'),
                                     ('pv_t1', 'in_file')]),
            (raw_sources, ds_bold_mask_t1, [('out', 'RawSources')]),
        ])
        if freesurfer:
            ds_bold_aseg_t1 = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                space='T1w',
                desc='aseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                      name='ds_bold_aseg_t1',
                                      run_without_submitting=True,
                                      mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aparc_t1 = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                space='T1w',
                desc='aparcaseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                       name='ds_bold_aparc_t1',
                                       run_without_submitting=True,
                                       mem_gb=DEFAULT_MEMORY_MIN_GB)
            workflow.connect([
                (inputnode, ds_bold_aseg_t1, [('source_file', 'source_file'),
                                              ('bold_aseg_t1', 'in_file')]),
                (inputnode, ds_bold_aparc_t1, [('source_file', 'source_file'),
                                               ('bold_aparc_t1', 'in_file')]),
            ])

    if getattr(spaces, '_cached') is None:
        return workflow

    # Store resamplings in standard spaces when listed in --output-spaces
    if spaces.cached.references:
        from ...niworkflows.interfaces.space import SpaceDataSource

        spacesource = pe.Node(SpaceDataSource(),
                              name='spacesource',
                              run_without_submitting=True)
        spacesource.iterables = ('in_tuple', [
            (s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3, ))
        ])

        select_std = pe.Node(KeySelect(fields=[
            'template', 'bold_std', 'bold_std_ref', 'bold_mask_std', 'cbf_std',
            'meancbf_std', 'score_std', 'avgscore_std', 'scrub_std',
            'basil_std', 'pv_std'
        ]),
                             name='select_std',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_std = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='preproc',
            compress=True,
            SkullStripped=False,
            RepetitionTime=metadata.get('RepetitionTime'),
            TaskName=metadata.get('TaskName'),
            dismiss_entities=("echo", )),
                              name='ds_bold_std',
                              run_without_submitting=True,
                              mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_std_ref = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='aslref',
            compress=True,
            dismiss_entities=("echo", )),
                                  name='ds_bold_std_ref',
                                  run_without_submitting=True,
                                  mem_gb=DEFAULT_MEMORY_MIN_GB)
        ds_bold_mask_std = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            desc='brain',
            suffix='mask',
            compress=True,
            dismiss_entities=("echo", )),
                                   name='ds_bold_mask_std',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)
        cbfstd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                             suffix='cbf',
                                             compress=True),
                         name='cbfstd',
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
        meancbfstd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                 suffix='mean_cbf',
                                                 compress=True),
                             name='meancbfstd',
                             run_without_submitting=True,
                             mem_gb=DEFAULT_MEMORY_MIN_GB)
        scorestd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='score',
                                               suffix='cbf',
                                               compress=True),
                           name='scorestd',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
        meanscorestd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                                   desc='score',
                                                   suffix='mean_cbf',
                                                   compress=True),
                               name='meanscorestd',
                               run_without_submitting=True,
                               mem_gb=DEFAULT_MEMORY_MIN_GB)
        scrubstd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='scrub',
                                               suffix='cbf',
                                               compress=True),
                           name='scrubstd',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
        basilstd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                               desc='basil',
                                               suffix='cbf',
                                               compress=True),
                           name='basilstd',
                           run_without_submitting=True,
                           mem_gb=DEFAULT_MEMORY_MIN_GB)
        pvstd = pe.Node(DerivativesDataSink(base_directory=output_dir,
                                            desc='pvc',
                                            suffix='cbf',
                                            compress=True),
                        name='pvcstd',
                        run_without_submitting=True,
                        mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, ds_bold_std, [('source_file', 'source_file')]),
            (inputnode, ds_bold_std_ref, [('source_file', 'source_file')]),
            (inputnode, ds_bold_mask_std, [('source_file', 'source_file')]),
            (inputnode, cbfstd, [('source_file', 'source_file')]),
            (inputnode, meancbfstd, [('source_file', 'source_file')]),
            (inputnode, scorestd, [('source_file', 'source_file')]),
            (inputnode, meanscorestd, [('source_file', 'source_file')]),
            (inputnode, scrubstd, [('source_file', 'source_file')]),
            (inputnode, basilstd, [('source_file', 'source_file')]),
            (inputnode, pvstd, [('source_file', 'source_file')]),
            (inputnode, select_std, [('bold_std', 'bold_std'),
                                     ('bold_std_ref', 'bold_std_ref'),
                                     ('bold_mask_std', 'bold_mask_std'),
                                     ('cbf_std', 'cbf_std'),
                                     ('meancbf_std', 'meancbf_std'),
                                     ('score_std', 'score_std'),
                                     ('avgscore_std', 'avgscore_std'),
                                     ('scrub_std', 'scrub_std'),
                                     ('basil_std', 'basil_std'),
                                     ('pv_std', 'pv_std'),
                                     ('template', 'template'),
                                     ('spatial_reference', 'keys')]),
            (spacesource, select_std, [('uid', 'key')]),
            (select_std, ds_bold_std, [('bold_std', 'in_file')]),
            (spacesource, ds_bold_std, [('space', 'space'),
                                        ('cohort', 'cohort'),
                                        ('resolution', 'resolution'),
                                        ('density', 'density')]),
            (select_std, ds_bold_std_ref, [('bold_std_ref', 'in_file')]),
            (spacesource, ds_bold_std_ref, [('space', 'space'),
                                            ('cohort', 'cohort'),
                                            ('resolution', 'resolution'),
                                            ('density', 'density')]),
            (select_std, ds_bold_mask_std, [('bold_mask_std', 'in_file')]),
            (spacesource, ds_bold_mask_std, [('space', 'space'),
                                             ('cohort', 'cohort'),
                                             ('resolution', 'resolution'),
                                             ('density', 'density')]),
            (select_std, cbfstd, [('cbf_std', 'in_file')]),
            (spacesource, cbfstd, [('space', 'space'), ('cohort', 'cohort'),
                                   ('resolution', 'resolution'),
                                   ('density', 'density')]),
            (select_std, meancbfstd, [('meancbf_std', 'in_file')]),
            (spacesource, meancbfstd, [('space', 'space'),
                                       ('cohort', 'cohort'),
                                       ('resolution', 'resolution'),
                                       ('density', 'density')]),
            (select_std, scorestd, [('score_std', 'in_file')]),
            (spacesource, scorestd, [('space', 'space'), ('cohort', 'cohort'),
                                     ('resolution', 'resolution'),
                                     ('density', 'density')]),
            (select_std, meanscorestd, [('avgscore_std', 'in_file')]),
            (spacesource, meanscorestd, [('space', 'space'),
                                         ('cohort', 'cohort'),
                                         ('resolution', 'resolution'),
                                         ('density', 'density')]),
            (select_std, scrubstd, [('scrub_std', 'in_file')]),
            (spacesource, scrubstd, [('space', 'space'), ('cohort', 'cohort'),
                                     ('resolution', 'resolution'),
                                     ('density', 'density')]),
            (select_std, basilstd, [('basil_std', 'in_file')]),
            (spacesource, basilstd, [('space', 'space'), ('cohort', 'cohort'),
                                     ('resolution', 'resolution'),
                                     ('density', 'density')]),
            (select_std, pvstd, [('pv_std', 'in_file')]),
            (spacesource, pvstd, [('space', 'space'), ('cohort', 'cohort'),
                                  ('resolution', 'resolution'),
                                  ('density', 'density')]),
            (raw_sources, ds_bold_mask_std, [('out', 'RawSources')]),
        ])

        if freesurfer:
            select_fs_std = pe.Node(KeySelect(
                fields=['bold_aseg_std', 'bold_aparc_std', 'template']),
                                    name='select_fs_std',
                                    run_without_submitting=True,
                                    mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aseg_std = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                desc='aseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                       name='ds_bold_aseg_std',
                                       run_without_submitting=True,
                                       mem_gb=DEFAULT_MEMORY_MIN_GB)
            ds_bold_aparc_std = pe.Node(DerivativesDataSink(
                base_directory=output_dir,
                desc='aparcaseg',
                suffix='dseg',
                compress=True,
                dismiss_entities=("echo", )),
                                        name='ds_bold_aparc_std',
                                        run_without_submitting=True,
                                        mem_gb=DEFAULT_MEMORY_MIN_GB)
            workflow.connect([
                (spacesource, select_fs_std, [('uid', 'key')]),
                (inputnode, select_fs_std, [('bold_aseg_std', 'bold_aseg_std'),
                                            ('bold_aparc_std',
                                             'bold_aparc_std'),
                                            ('template', 'template'),
                                            ('spatial_reference', 'keys')]),
                (select_fs_std, ds_bold_aseg_std, [('bold_aseg_std', 'in_file')
                                                   ]),
                (spacesource, ds_bold_aseg_std, [('space', 'space'),
                                                 ('cohort', 'cohort'),
                                                 ('resolution', 'resolution'),
                                                 ('density', 'density')]),
                (select_fs_std, ds_bold_aparc_std, [('bold_aparc_std',
                                                     'in_file')]),
                (spacesource, ds_bold_aparc_std, [('space', 'space'),
                                                  ('cohort', 'cohort'),
                                                  ('resolution', 'resolution'),
                                                  ('density', 'density')]),
                (inputnode, ds_bold_aseg_std, [('source_file', 'source_file')
                                               ]),
                (inputnode, ds_bold_aparc_std, [('source_file', 'source_file')
                                                ])
            ])

    fs_outputs = spaces.cached.get_fs_spaces()
    if freesurfer and fs_outputs:
        from ...niworkflows.interfaces.surf import Path2BIDS

        select_fs_surf = pe.Node(KeySelect(fields=['surfaces', 'surf_kwargs']),
                                 name='select_fs_surf',
                                 run_without_submitting=True,
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)
        select_fs_surf.iterables = [('key', fs_outputs)]
        select_fs_surf.inputs.surf_kwargs = [{'space': s} for s in fs_outputs]

        name_surfs = pe.MapNode(Path2BIDS(pattern=r'(?P<hemi>[lr])h.\w+'),
                                iterfield='in_file',
                                name='name_surfs',
                                run_without_submitting=True)

        ds_bold_surfs = pe.MapNode(DerivativesDataSink(
            base_directory=output_dir,
            extension="func.gii",
            dismiss_entities=("echo", )),
                                   iterfield=['in_file', 'hemi'],
                                   name='ds_bold_surfs',
                                   run_without_submitting=True,
                                   mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, select_fs_surf, [('surf_files', 'surfaces'),
                                         ('surf_refs', 'keys')]),
            (select_fs_surf, name_surfs, [('surfaces', 'in_file')]),
            (inputnode, ds_bold_surfs, [('source_file', 'source_file')]),
            (select_fs_surf, ds_bold_surfs, [('surfaces', 'in_file'),
                                             ('key', 'space')]),
            (name_surfs, ds_bold_surfs, [('hemi', 'hemi')]),
        ])

    # CIFTI output
    if cifti_output:
        ds_bold_cifti = pe.Node(DerivativesDataSink(
            base_directory=output_dir,
            suffix='bold',
            compress=False,
            dismiss_entities=("echo", )),
                                name='ds_bold_cifti',
                                run_without_submitting=True,
                                mem_gb=DEFAULT_MEMORY_MIN_GB)
        workflow.connect([(inputnode, ds_bold_cifti,
                           [(('bold_cifti', _unlist), 'in_file'),
                            ('source_file', 'source_file'),
                            (('cifti_metadata', _get_surface), 'space'),
                            ('cifti_density', 'density'),
                            (('cifti_metadata', _read_json), 'meta_dict')])])

    return workflow
Esempio n. 30
0
def init_autorecon_resume_wf(omp_nthreads, name='autorecon_resume_wf'):
    r"""
    Resume recon-all execution, assuming the `-autorecon1` stage has been completed.

    In order to utilize resources efficiently, this is broken down into seven
    sub-stages; after the first stage, the second and third stages may be run
    simultaneously, and the fifth and sixth stages may be run simultaneously,
    if resources permit; the fourth stage must be run prior to the fifth and
    sixth, and the seventh must be run after::

        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon2-volonly
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon-hemi lh -T2pial \
            -noparcstats -noparcstats2 -noparcstats3 -nohyporelabel -nobalabels
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon-hemi rh -T2pial \
            -noparcstats -noparcstats2 -noparcstats3 -nohyporelabel -nobalabels
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -cortribbon
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon-hemi lh -nohyporelabel
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon-hemi rh -nohyporelabel
        $ recon-all -sd <output dir>/freesurfer -subjid sub-<subject_label> \
            -autorecon3

    The parcellation statistics steps are excluded from the second and third
    stages, because they require calculation of the cortical ribbon volume
    (the fourth stage).
    Hypointensity relabeling is excluded from hemisphere-specific steps to avoid
    race conditions, as it is a volumetric operation.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.surfaces import init_autorecon_resume_wf
            wf = init_autorecon_resume_wf(omp_nthreads=1)

    Inputs
    ------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    use_T2
        Refine pial surface using T2w image
    use_FLAIR
        Refine pial surface using FLAIR image

    Outputs
    -------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID

    """
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['subjects_dir', 'subject_id', 'use_T2', 'use_FLAIR']),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['subjects_dir', 'subject_id']),
        name='outputnode')

    autorecon2_vol = pe.Node(ReconAll(directive='autorecon2-volonly',
                                      openmp=omp_nthreads),
                             n_procs=omp_nthreads,
                             mem_gb=5,
                             name='autorecon2_vol')
    autorecon2_vol.interface._always_run = True

    autorecon_surfs = pe.MapNode(ReconAll(directive='autorecon-hemi',
                                          flags=[
                                              '-noparcstats', '-noparcstats2',
                                              '-noparcstats3',
                                              '-nohyporelabel', '-nobalabels'
                                          ],
                                          openmp=omp_nthreads),
                                 iterfield='hemi',
                                 n_procs=omp_nthreads,
                                 mem_gb=5,
                                 name='autorecon_surfs')
    autorecon_surfs.inputs.hemi = ['lh', 'rh']
    autorecon_surfs.interface._always_run = True

    # -cortribbon is a prerequisite for -parcstats, -parcstats2, -parcstats3
    cortribbon = pe.Node(ReconAll(directive=Undefined, steps=['cortribbon']),
                         name='cortribbon')
    cortribbon.interface._always_run = True

    # -parcstats* can be run per-hemisphere
    # -hyporelabel is volumetric, even though it's part of -autorecon-hemi
    parcstats = pe.MapNode(ReconAll(directive='autorecon-hemi',
                                    flags=['-nohyporelabel'],
                                    openmp=omp_nthreads),
                           iterfield='hemi',
                           n_procs=omp_nthreads,
                           mem_gb=5,
                           name='parcstats')
    parcstats.inputs.hemi = ['lh', 'rh']
    parcstats.interface._always_run = True

    # Runs: -hyporelabel -aparc2aseg -apas2aseg -segstats -wmparc
    # All volumetric, so don't
    autorecon3 = pe.Node(ReconAll(directive='autorecon3', openmp=omp_nthreads),
                         n_procs=omp_nthreads,
                         mem_gb=5,
                         name='autorecon3')
    autorecon3.interface._always_run = True

    def _dedup(in_list):
        vals = set(in_list)
        if len(vals) > 1:
            raise ValueError(
                "Non-identical values can't be deduplicated:\n{!r}".format(
                    in_list))
        return vals.pop()

    workflow.connect([
        (inputnode, autorecon_surfs, [('use_T2', 'use_T2'),
                                      ('use_FLAIR', 'use_FLAIR')]),
        (inputnode, autorecon2_vol, [('subjects_dir', 'subjects_dir'),
                                     ('subject_id', 'subject_id')]),
        (autorecon2_vol, autorecon_surfs, [('subjects_dir', 'subjects_dir'),
                                           ('subject_id', 'subject_id')]),
        (autorecon_surfs, cortribbon,
         [(('subjects_dir', _dedup), 'subjects_dir'),
          (('subject_id', _dedup), 'subject_id')]),
        (cortribbon, parcstats, [('subjects_dir', 'subjects_dir'),
                                 ('subject_id', 'subject_id')]),
        (parcstats, autorecon3, [(('subjects_dir', _dedup), 'subjects_dir'),
                                 (('subject_id', _dedup), 'subject_id')]),
        (autorecon3, outputnode, [('subjects_dir', 'subjects_dir'),
                                  ('subject_id', 'subject_id')]),
    ])

    return workflow