예제 #1
0
contrastestimate.overwrite = True
contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}}
contrastestimate.inputs.contrasts = contrasts

wfSPM.connect([
    (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]),
    (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'),
                                        ('beta_images', 'beta_images'),
                                        ('residual_image', 'residual_image')]),
])

#%% Adding data sink
########################################################################
# Datasink
datasink = Node(
    nio.DataSink(base_directory=os.path.join(output_dir, 'Sink_resp_sv')),
    name="datasink")

wfSPM.connect([(level1estimate, datasink, [
    ('beta_images', '1stLevel.@betas.@beta_images'),
    ('residual_image', '1stLevel.@betas.@residual_image'),
    ('residual_images', '1stLevel.@betas.@residual_images'),
    ('SDerror', '1stLevel.@betas.@SDerror'),
    ('SDbetas', '1stLevel.@betas.@SDbetas'),
])])

wfSPM.connect([
    # here we take only the contrast ad spm.mat files of each subject and put it in different folder. It is more convenient like that.
    (contrastestimate, datasink, [
        ('spm_mat_file', '1stLevel.@spm_mat'),
        ('spmT_images', '1stLevel.@T'),
예제 #2
0
 def set_datasink(self, opts) :
     self.datasink=pe.Node(interface=nio.DataSink(), name="output")
     self.datasink.inputs.base_directory= opts.targetDir + '/' 
     self.datasink.inputs.substitutions = [('_cid_', ''), ('sid_', '')]
예제 #3
0
 def data_sink_dti(self):
     if not getattr(self, '_data_sink_dti', None):
         self._data_sink_dti = pe.Node(name="data_sink_dti",
                                       interface=nio.DataSink())
     return self._data_sink_dti
예제 #4
0
def crossValidationWorkUp(
    crossValidationConfigurationFilename,
    baseDir,
    runOption,
    PythonBinDir,
    BRAINSToolsSrcDir,
    BRAINSToolsBuildDir,
):
    print(
        """****************************
          crossValidationWorkUp
          """
    )
    from collections import (
        OrderedDict,
    )  # Need OrderedDict internally to ensure consistent ordering
    from nipype import config

    config.enable_debug_mode()

    import crossValidation as this
    import ConfigurationParser

    myConfigurationMap = ConfigurationParser.ConfigurationSectionMap(
        crossValidationConfigurationFilename
    )

    import nipype.pipeline.engine as pe
    from nipype.interfaces.utility import Function
    import ast

    print(
        """ before
           createeachvalidationunitnd
           """
    )
    createConfigurationFiles = pe.Node(
        name="createConfigurationFiles",
        interface=Function(
            input_names=[
                "inputConfigurationFilename",
                "outputConfigurationFilenamePrefix",
            ],
            output_names=["outputConfigFilenameDict"],
            function=this.createConfigurationFileForCrossValidationUnitTest,
        ),
    )

    preprocessing = pe.Workflow(name="Preprocessing")
    preprocessing.base_dir = baseDir + "/PreprocessingDir"

    createConfigurationFiles.inputs.inputConfigurationFilename = (
        crossValidationConfigurationFilename
    )
    createConfigurationFiles.inputs.outputConfigurationFilenamePrefix = (
        "createConfigurationFiles"
    )

    extractConfigurationFileListND = pe.Node(
        name="extractConfigurationFileListND",
        interface=Function(
            input_names=["configurationFiledict"],
            output_names=["configurationFileList"],
            function=this.extractConfigFile,
        ),
    )
    preprocessing.connect(
        createConfigurationFiles,
        "outputConfigFilenameDict",
        extractConfigurationFileListND,
        "configurationFiledict",
    )

    preprocessing.run()

    # ------------------------------------------------------------------------------------
    # Data graber for outputs
    #
    import nipype.interfaces.io as nio

    dg = nio.DataGrabber()
    dg.inputs.base_directory = (
        baseDir + "/PreprocessingDir/Preprocessing/createConfigurationFiles/"
    )
    dg.inputs.template = "*config"
    mainConfigFiles = dg.run()

    print((mainConfigFiles.outputs.outfiles))
    print((mainConfigFiles.outputs.outfiles))
    print((mainConfigFiles.outputs.outfiles))
    print((mainConfigFiles.outputs.outfiles))

    # ------------------------------------------------------------------------------------
    workflow = pe.Workflow(name="crossValidationWF")
    workflow.base_dir = baseDir

    # ------------------------------------------------------------------------------------
    # Generate Probability Map
    #
    Options = myConfigurationMap["Options"]
    roiDict = Options["roiBooleanCreator".lower()]

    # -------------------------------- probMapFilenameGenerator is dummy node
    # to create proper probability file location for nipype
    #
    print(
        """************************
          probMapFilenameGenerator
          """
    )

    probMapFilenameGenerator = pe.Node(
        name="probMapFilenameGenerator",
        interface=Function(
            input_names=["roiList"],
            output_names=["probabilityMapFilename"],
            function=this.getProbabilityMapFilename,
        ),
    )
    print(roiDict)
    probMapFilenameGenerator.inputs.roiList = list(roiDict.keys())
    print(
        """************************
          probabilityMapGeneratorND
          """
    )

    #
    # --------------------------------  start from generate probability
    #
    probabilityMapGeneratorND = pe.Node(
        name="probabilityMapGeneratorND",
        interface=Function(
            input_names=[
                "configurationFilename",
                "probabilityMapDict",
                "gaussianSigma",
                "outputXmlFilename",
            ],
            output_names=[
                "probabilityMapDict",
                "outputXmlFilename",
                "outputConfigurationFilename",
            ],
            function=ConfigurationParser.BRAINSCutGenerateProbabilityMap,
        ),
    )

    probabilityMapGeneratorND.inputs.outputXmlFilename = "netConfiguration.xml"

    gaussianSigmaParam = ast.literal_eval(Options["gaussianSigma".lower()])
    print(gaussianSigmaParam)
    probabilityMapGeneratorND.iterables = (
        "configurationFilename",
        mainConfigFiles.outputs.outfiles,
    )
    probabilityMapGeneratorND.inputs.gaussianSigma = gaussianSigmaParam

    workflow.connect(
        probMapFilenameGenerator,
        "probabilityMapFilename",
        probabilityMapGeneratorND,
        "probabilityMapDict",
    )

    #
    # --------------------------------  create vectors for each ROI
    #
    print(
        """************************
          configFileND
          """
    )
    configFileND = pe.Node(
        name="configFileND",
        interface=Function(
            input_names=["originalFilename", "editedFilenamePrefix"],
            output_names=["editedFilenames"],
            function=ConfigurationParser.ConfigurationFileEditor,
        ),
    )

    configFileND.inputs.editedFilenamePrefix = "ROI"
    workflow.connect(
        probabilityMapGeneratorND,
        "outputConfigurationFilename",
        configFileND,
        "originalFilename",
    )

    vectorCreatorND = pe.MapNode(
        name="vectorCreatorND",
        interface=Function(
            input_names=[
                "configurationFilename",
                "probabilityMapDict",
                "normalization",
                "outputXmlFilename",
                "outputVectorFilename",
            ],
            output_names=[
                "outputVectorFilename",
                "outputVectorHdrFilename",
                "outputNormalization",
                "outputXmlFilename",
            ],
            function=ConfigurationParser.BRAINSCutCreateVector,
        ),
        iterfield=["configurationFilename"],
    )
    vectorCreatorND.inputs.outputVectorFilename = "oneROIVectorFile.txt"
    vectorCreatorND.inputs.outputXmlFilename = "oneROICreateVectorNetConfiguration.xml"
    normalizationOption = Options["normalization".lower()]
    print(
        (
            """Normalization Option: {str}
           """.format(
                str=normalizationOption
            )
        )
    )
    vectorCreatorND.iterables = ("normalization", normalizationOption)
    #
    # --------------------------------  workflow connections
    #
    workflow.connect(
        configFileND, "editedFilenames", vectorCreatorND, "configurationFilename"
    )
    workflow.connect(
        probabilityMapGeneratorND,
        "probabilityMapDict",
        vectorCreatorND,
        "probabilityMapDict",
    )

    #
    # --------------------------------  balance and combine each ROI vectors
    #
    print(
        """************************
          balanceND
          """
    )
    balaceND = pe.Node(
        name="balanceND",
        interface=Function(
            input_names=["inputVectorFilenames"],
            output_names=["outputVectorFilenames", "outputVectorHdrFilenames"],
            function=ConfigurationParser.BalanceInputVectors,
        ),
    )
    workflow.connect(
        vectorCreatorND, "outputVectorFilename", balaceND, "inputVectorFilenames"
    )

    combineND = pe.Node(
        name="combineND",
        interface=Function(
            input_names=["inputVectorFilenames", "outputVectorFilename"],
            output_names=["outputVectorFilename", "outputVectorHdrFilename"],
            function=ConfigurationParser.CombineInputVectors,
        ),
    )
    workflow.connect(
        balaceND, "outputVectorFilenames", combineND, "inputVectorFilenames"
    )

    combineND.inputs.outputVectorFilename = "allCombinedVector.txtANN"
    #
    # --------------------------------  train
    #
    print(
        """************************
          trainND
          """
    )
    trainND = pe.Node(
        name="trainND",
        interface=Function(
            input_names=[
                "configurationFilename",
                "inputVectorFilename",
                "outputModelFilenamePrefix",
                "outputXmlFilename",
                "methodParameter",
            ],
            output_names=["outputTrainedModelFilename", "outputMethodParameter"],
            function=ConfigurationParser.BRAINSCutTrainModel,
        ),
    )
    # methodParameter = { '--method': 'RandomForest',
    #                    '--numberOfTrees': 60,
    #                    '--randomTreeDepth ': 60 }
    methodFromConfiguFile = Options["modelParameter".lower()]
    trainND.iterables = ("methodParameter", methodFromConfiguFile)

    trainND.inputs.outputXmlFilename = "trianNetConfiguration.xml"
    trainND.inputs.outputModelFilenamePrefix = "trainModelFile.txt"

    workflow.connect(
        probabilityMapGeneratorND,
        "outputConfigurationFilename",
        trainND,
        "configurationFilename",
    )
    workflow.connect(combineND, "outputVectorFilename", trainND, "inputVectorFilename")
    #
    # --------------------------------  apply
    #
    applyND = pe.Node(
        name="applyND",
        interface=Function(
            input_names=[
                "configurationFilename",
                "probabilityMapDict",
                "normalization",
                "inputModelFilename",
                "methodParameter",
                "outputXmlFilename",
            ],
            output_names=["outputLabelDict"],
            function=ConfigurationParser.BRAINSCutApplyModel,
        ),
    )
    # methodParameter = { '--method': 'RandomForest',
    #                    '--numberOfTrees': 60,
    #                    '--randomTreeDepth ': 60 }
    applyND.inputs.outputXmlFilename = "applyConfiguration.xml"
    workflow.connect(
        probabilityMapGeneratorND,
        "outputConfigurationFilename",
        applyND,
        "configurationFilename",
    )
    workflow.connect(vectorCreatorND, "outputNormalization", applyND, "normalization")
    workflow.connect(
        probabilityMapGeneratorND, "probabilityMapDict", applyND, "probabilityMapDict"
    )
    workflow.connect(
        trainND, "outputTrainedModelFilename", applyND, "inputModelFilename"
    )
    workflow.connect(trainND, "outputMethodParameter", applyND, "methodParameter")

    #####################################################################################
    # Data Sink
    #
    import os

    LabelsDS = pe.Node(nio.DataSink(), name="LabelDS")
    LabelsDS.inputs.base_directory = os.path.join(baseDir, "Result")
    LabelsDS.inputs.regexp_substitutions = [
        ("/_", "/"),
        ("configurationFilename.*_Test", "Test"),
        ("_configuration.config/normalization_", "/"),
        ("methodParameter_--method", ""),
        ("RandomForest", "RF/"),
        (".--randomTreeDepth", "TreeDepth"),
        (".--numberOfTrees", "_TreeNumber"),
        (
            "ANNContinuousPrediction(?P<roi>.+)(?P<session>\d\d\d\d\d).nii.gz",
            r"\g<session>_\g<roi>_ANNContinuous.nii.gz",
        ),
    ]
    # ANNContinuousPredictionl_accumben77478

    workflow.connect(
        [(applyND, LabelsDS, [(("outputLabelDict", getDictionaryValues), "Labels")])]
    )

    #####################################################################################
    # analysis
    #

    #####################################################################################
    # Running
    #
    if runOption == "cluster":
        ############################################
        # Platform specific information
        #     Prepend the python search paths
        pythonPath = (
            BRAINSToolsSrcDir
            + "/BRAINSCut/BRAINSFeatureCreators/RobustStatisticComputations:"
            + BRAINSToolsSrcDir
            + "/AutoWorkup/:"
            + BRAINSToolsSrcDir
            + "/AutoWorkup/BRAINSTools/:"
            + BRAINSToolsBuildDir
            + "/SimpleITK-build/bin/"
            + BRAINSToolsBuildDir
            + "/SimpleITK-build/lib:"
            + PythonBinDir
        )
        binPath = BRAINSToolsBuildDir + "/bin:" + BRAINSToolsBuildDir + "/lib"

        PYTHON_AUX_PATHS = pythonPath
        PYTHON_AUX_PATHS = PYTHON_AUX_PATHS.split(":")
        PYTHON_AUX_PATHS.extend(sys.path)
        sys.path = PYTHON_AUX_PATHS
        # print sys.path
        import SimpleITK as sitk

        #     Prepend the shell environment search paths
        PROGRAM_PATHS = binPath
        PROGRAM_PATHS = PROGRAM_PATHS.split(":")
        import os

        PROGRAM_PATHS.extend(os.environ["PATH"].split(":"))
        os.environ["PATH"] = ":".join(PROGRAM_PATHS)

        Cluster_Script = get_global_sge_script(PYTHON_AUX_PATHS, PROGRAM_PATHS, {})
        workflow.run(
            plugin="SGE",
            plugin_args=OrderedDict(
                template=Cluster_Script,
                qsub_args="-S /bin/bash -pe smp 4-8 -o /dev/null ",
            ),
        )
    else:
        print(
            """************************
              run
              """
        )
        try:
            workflow.write_graph(graph2use="flat")
        except:
            pass
        workflow.run()
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True

structural = create_connectivity_pipeline("structural", parcellation_name)
structural.inputs.mapping.inputnode_within.resolution_network_file = cmp_config._get_lausanne_parcellation(
    'Lausanne2008')[parcellation_name]['node_information_graphml']

# It's recommended to use low max harmonic order in damaged brains
lmax = 6
structural.inputs.mapping.csdeconv.maximum_harmonic_order = lmax
structural.inputs.mapping.estimateresponse.maximum_harmonic_order = lmax

# Required for scans from C.H.U Liege
structural.inputs.mapping.fsl2mrtrix.invert_x = True

datasink = pe.Node(interface=nio.DataSink(), name="datasink")
datasink.inputs.base_directory = output_dir

workflow = pe.Workflow(name='connnectivity')
workflow.base_dir = output_dir

workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')])])
workflow.connect([(infosource, datasink, [('subject_id', '@subject_id')])])

workflow.connect([(infosource, structural, [('subject_id',
                                             'inputnode.subject_id')])])
workflow.connect([(infosource, structural, [('subjects_dir',
                                             'inputnode.subjects_dir')])])
workflow.connect([(datasource, structural, [('dwi', 'inputnode.dwi')])])
workflow.connect([(datasource, structural, [('bvecs', 'inputnode.bvecs')])])
workflow.connect([(datasource, structural, [('bvals', 'inputnode.bvals')])])
예제 #6
0
    sub_ids = args.participant_label
# for all subjects
else:
    subject_dirs = glob(os.path.join(args.bids_dir, "sub-*"))
    sub_ids = [subject_dir.split("-")[-1] for subject_dir in subject_dirs]

#Flexibly collect data from disk to feed into workflows.
NodeHash_1fc3610 = pe.Node(io.SelectFiles(templates={'anat':'sub-{sub_id}/anat/sub-{sub_id}_T1w.nii.gz'}), name = 'NodeName_1fc3610')
NodeHash_1fc3610.inputs.base_directory = bids_dir
NodeHash_1fc3610.iterables = [('sub_id', sub_ids)]

#Wraps command **bet**
NodeHash_22ba310 = pe.Node(interface = fsl.BET(), name = 'NodeName_22ba310')
NodeHash_22ba310.inputs.frac = frac

#Generic datasink module to store structured outputs
NodeHash_317bde0 = pe.Node(interface = io.DataSink(), name = 'NodeName_317bde0')
NodeHash_317bde0.inputs.base_directory = out_dir
NodeHash_317bde0.inputs.parameterization = False

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_1fc3610, 'anat', NodeHash_22ba310, 'in_file')
analysisflow.connect(NodeHash_22ba310, 'out_file', NodeHash_317bde0, 'BET_results')

#Run the workflow
plugin = 'MultiProc' #adjust your desired plugin here
plugin_args = {'n_procs': 1} #adjust to your number of cores
analysisflow.write_graph(graph2use='flat', format='png', simple_form=False)
analysisflow.run(plugin=plugin, plugin_args=plugin_args)
예제 #7
0
    def build_output_node(self):
        """Build and connect an output node to the pipelines.
        """

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        import nipype.interfaces.io as nio
        from clinica.utils.io import zip_nii
        from clinica.utils.io import fix_join
        import re
        import clinica.pipelines.pet_volume.pet_volume_utils as utils

        # Find container path from pet filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['pet_filename'],
            output_names=['container'],
            function=utils.pet_container_from_filename),
                                  name='container_path')
        container_path.inputs.threshold = self.parameters['mask_threshold']

        # Writing all images into CAPS
        # ============================
        write_images_node = npe.Node(name='write_caps_node',
                                     interface=nio.DataSink())
        write_images_node.inputs.base_directory = self.caps_directory
        write_images_node.inputs.parameterization = False
        write_images_node.inputs.regexp_substitutions = [
            (r'(.*/)pet_t1_native/r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-T1w_pet\3'),
            (r'(.*/)pet_pvc/pvc-rbv_r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-T1w_pvc-rbv_pet\3'),
            (r'(.*/)pet_mni/wr(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_pet\3'),
            (r'(.*/)pet_pvc_mni/wpvc-rbv_r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_pvc-rbv_pet\3'),
            (r'(.*/)pet_suvr/suvr_wr(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_suvr-' + re.escape(self._suvr_region) +
             r'_pet\3'),
            (r'(.*/)pet_pvc_suvr/suvr_wpvc-rbv_r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_pvc-rbv_suvr-' +
             re.escape(self._suvr_region) + r'_pet\3'),
            (r'(.*/)pet_suvr_masked/masked_suvr_wr(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_suvr-' + re.escape(self._suvr_region) +
             r'_mask-brain_pet\3'),
            (r'(.*/)pet_pvc_suvr_masked/masked_suvr_wpvc-rbv_r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_space-Ixi549Space_pvc-rbv_suvr-' +
             re.escape(self._suvr_region) + r'_mask-brain_pet\3'),
            (r'(.*/)pet_suvr_masked_smoothed/(fwhm-[0-9]+mm)_masked_suvr_wr(sub-.*)(\.nii(\.gz)?)$',
             r'\1\3_space-Ixi549Space_suvr-' + re.escape(self._suvr_region) +
             r'_mask-brain_\2_pet\4'),
            (r'(.*/)pet_pvc_suvr_masked_smoothed/(fwhm-[0-9]+mm)_masked_suvr_wpvc-rbv_r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\3_space-Ixi549Space_pvc-rbv_suvr-' +
             re.escape(self._suvr_region) + r'_mask-brain_\2_pet\4'),
            (r'(.*/)binary_mask/(sub-.*_T1w_).*(space-[a-zA-Z0-9]+).*(_brainmask\.nii(\.gz)?)$',
             r'\1\2\3\4')
        ]

        # Writing atlas statistics into CAPS
        # ==================================
        write_atlas_node = npe.Node(name='write_atlas_node',
                                    interface=nio.DataSink())
        write_atlas_node.inputs.base_directory = self.caps_directory
        write_atlas_node.inputs.parameterization = False
        write_atlas_node.inputs.regexp_substitutions = [
            (r'(.*/atlas_statistics/)suvr_wr(sub-.*)(_statistics\.tsv)$',
             r'\1\2' + r'_suvr-' + re.escape(self._suvr_region) + r'\3'),
            (r'(.*/)pvc_(atlas_statistics/)suvr_wpvc-rbv_r(sub-.*)(_statistics\.tsv)$',
             r'\1\2\3' + r'_pvc-rbv_suvr-' + re.escape(self._suvr_region) +
             r'\4')
        ]

        self.connect([
            (self.input_node, container_path, [('pet_image', 'pet_filename')]),
            (container_path, write_images_node,
             [(('container', fix_join, 'group-' + self._group_id), 'container')
              ]),
            (self.output_node, write_images_node,
             [(('pet_t1_native', zip_nii, True), 'pet_t1_native'),
              (('pet_mni', zip_nii, True), 'pet_mni'),
              (('pet_suvr', zip_nii, True), 'pet_suvr'),
              (('binary_mask', zip_nii, True), 'binary_mask'),
              (('pet_suvr_masked', zip_nii, True), 'pet_suvr_masked'),
              (('pet_suvr_masked_smoothed', zip_nii, True),
               'pet_suvr_masked_smoothed'),
              (('pet_pvc', zip_nii, True), 'pet_pvc'),
              (('pet_pvc_mni', zip_nii, True), 'pet_pvc_mni'),
              (('pet_pvc_suvr', zip_nii, True), 'pet_pvc_suvr'),
              (('pet_pvc_suvr_masked', zip_nii, True), 'pet_pvc_suvr_masked'),
              (('pet_pvc_suvr_masked_smoothed', zip_nii, True),
               'pet_pvc_suvr_masked_smoothed')]),
            (container_path, write_atlas_node,
             [(('container', fix_join, 'group-' + self._group_id), 'container')
              ]),
            (self.output_node, write_atlas_node,
             [('atlas_statistics', 'atlas_statistics'),
              ('pvc_atlas_statistics', 'pvc_atlas_statistics')])
        ])
예제 #8
0
def analyze_openfmri_dataset(data_dir,
                             subject=None,
                             model_id=None,
                             task_id=None,
                             output_dir=None,
                             subj_prefix='*',
                             hpcutoff=120.,
                             use_derivatives=True,
                             fwhm=6.0,
                             subjects_dir=None,
                             target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """
    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()
    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')
    """
    Set up openfmri data specific components
    """

    subjects = sorted([
        path.split(os.path.sep)[-1]
        for path in glob(os.path.join(data_dir, subj_prefix))
    ])

    infosource = pe.Node(
        niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']),
        name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]), ('task_id', task_id)]
    else:
        infosource.iterables = [
            ('subject_id',
             [subjects[subjects.index(subj)] for subj in subject]),
            ('model_id', [model_id]), ('task_id', task_id)
        ]

    subjinfo = pe.Node(niu.Function(
        input_names=['subject_id', 'base_dir', 'task_id', 'model_id'],
        output_names=['run_id', 'conds', 'TR'],
        function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir
    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav', 'contrasts']),
                             name='datasource')
    else:
        datasource = pe.Node(nio.DataGrabber(
            infields=['subject_id', 'run_id', 'task_id', 'model_id'],
            outfields=['anat', 'bold', 'behav']),
                             name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt'),
            'contrasts': ('models/model%03d/'
                          'task_contrasts.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']],
            'contrasts': [['model_id']]
        }
    else:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]
        }

    datasource.inputs.sort_filelist = True
    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([
        (datasource, preproc, [('bold', 'inputspec.func')]),
    ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2. * TR)

    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')
    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([
                    np.array(row.split()) for row in fp.readlines()
                    if row.strip()
                ])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [
                row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))],
                row[2:].astype(float).tolist()
            ]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(
        input_names=['contrast_file', 'task_id', 'conds'],
        output_names=['contrasts'],
        function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(
        interface=ra.ArtifactDetect(use_differences=[True, False],
                                    use_norm=True,
                                    norm_threshold=1,
                                    zintensity_threshold=3,
                                    parameter_source='FSL',
                                    mask_type='file'),
        iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
        name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, (str, bytes)):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(int(num_elements / num_conds),
                                   num_conds).tolist()

    reshape_behav = pe.Node(niu.Function(
        input_names=['behav', 'run_id', 'conds'],
        output_names=['behav'],
        function=check_behav_list),
                            name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art,
                 [('outputspec.motion_parameters', 'realignment_parameters'),
                  ('outputspec.realigned_files', 'realigned_files'),
                  ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec,
                 [('outputspec.highpassed_files', 'functional_runs'),
                  ('outputspec.motion_parameters', 'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(CalculateMedian(), name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(int(len(all_copes) / num_copes),
                                     num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(int(len(all_varcopes) / num_copes),
                                           num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'contrasts'],
        output_names=['copes', 'varcopes', 'n_runs'],
        function=sort_copes),
                          name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, cope_sorter, [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'),
                                         ('varcopes', 'inputspec.varcopes'),
                                         ('n_runs', 'l2model.num_copes')]),
                (modelfit, fixed_fx, [
                    ('outputspec.dof_file', 'inputspec.dof_files'),
                ])])

    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration,
                   'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration,
                   'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(
        input_names=['copes', 'varcopes', 'zstats'],
        output_names=['out_files', 'splits'],
        function=merge_files),
                        name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [
        ('copes', 'copes'),
        ('varcopes', 'varcopes'),
        ('zstats', 'zstats'),
    ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(
        input_names=['in_files', 'splits'],
        output_names=['copes', 'varcopes', 'zstats'],
        function=split_files),
                        name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files', splitfunc,
               'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean,
                   'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean,
                   'segmentation_file')

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'],
                                  name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr,
                   'segmentation_file')
    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(
            ('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp', 'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
                     'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(niu.Function(
        input_names=['subject_id', 'conds', 'run_id', 'model_id', 'task_id'],
        output_names=['substitutions'],
        function=get_subs),
                      name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(), name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                 [('res4d', 'res4d'), ('copes', 'copes'),
                  ('varcopes', 'varcopes'), ('zstats', 'zstats'),
                  ('tstats', 'tstats')])])
    wf.connect([(modelfit.get_node('modelgen'), datasink, [
        ('design_cov', 'qa.model'),
        ('design_image', 'qa.model.@matrix_image'),
        ('design_file', 'qa.model.@matrix'),
    ])])
    wf.connect([(preproc, datasink,
                 [('outputspec.motion_parameters', 'qa.motion'),
                  ('outputspec.motion_plots', 'qa.motion.plots'),
                  ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink,
               'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink,
                   'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file',
                                               'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file',
                                               'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink, [
        ('copes', 'copes.mni'),
        ('varcopes', 'varcopes.mni'),
        ('zstats', 'zstats.mni'),
    ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink,
               'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink,
               'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink,
               'xfm.anat2target')
    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
def runWorkflow(SUBJECT_ID, INPUT_MRI_FILE, WORKFLOW_BASE_DIRECTORY,
                BIDS_DIRECTORY, **keyword_parameters):
    layout = BIDSLayout(BIDS_DIRECTORY)

    WORKFLOW_NAME = SUBJECT_ID + "_cse"

    brainsuite_workflow = pe.Workflow(name=WORKFLOW_NAME)
    brainsuite_workflow.base_dir = WORKFLOW_BASE_DIRECTORY

    bseObj = pe.Node(interface=bs.Bse(), name='BSE')
    bfcObj = pe.Node(interface=bs.Bfc(), name='BFC')
    pvcObj = pe.Node(interface=bs.Pvc(), name='PVC')
    cerebroObj = pe.Node(interface=bs.Cerebro(), name='CEREBRO')
    cortexObj = pe.Node(interface=bs.Cortex(), name='CORTEX')
    scrubmaskObj = pe.Node(interface=bs.Scrubmask(), name='SCRUBMASK')
    tcaObj = pe.Node(interface=bs.Tca(), name='TCA')
    dewispObj = pe.Node(interface=bs.Dewisp(), name='DEWISP')
    dfsObj = pe.Node(interface=bs.Dfs(), name='DFS')
    pialmeshObj = pe.Node(interface=bs.Pialmesh(), name='PIALMESH')
    hemisplitObj = pe.Node(interface=bs.Hemisplit(), name='HEMISPLIT')

    # =====Inputs=====

    # Provided input file
    bseObj.inputs.inputMRIFile = INPUT_MRI_FILE
    # Provided atlas files
    cerebroObj.inputs.inputAtlasMRIFile = (BRAINSUITE_ATLAS_DIRECTORY +
                                           ATLAS_MRI_SUFFIX)
    cerebroObj.inputs.inputAtlasLabelFile = (BRAINSUITE_ATLAS_DIRECTORY +
                                             ATLAS_LABEL_SUFFIX)

    # ====Parameters====
    bseObj.inputs.diffusionIterations = 5
    bseObj.inputs.diffusionConstant = 30  # -d
    bseObj.inputs.edgeDetectionConstant = 0.78  # -s

    cerebroObj.inputs.useCentroids = True
    pialmeshObj.inputs.tissueThreshold = 0.3

    brainsuite_workflow.connect(bseObj, 'outputMRIVolume', bfcObj,
                                'inputMRIFile')
    brainsuite_workflow.connect(bfcObj, 'outputMRIVolume', pvcObj,
                                'inputMRIFile')
    brainsuite_workflow.connect(pvcObj, 'outputTissueFractionFile', cortexObj,
                                'inputTissueFractionFile')

    brainsuite_workflow.connect(bfcObj, 'outputMRIVolume', cerebroObj,
                                'inputMRIFile')
    brainsuite_workflow.connect(cerebroObj, 'outputLabelVolumeFile', cortexObj,
                                'inputHemisphereLabelFile')

    brainsuite_workflow.connect(cortexObj, 'outputCerebrumMask', scrubmaskObj,
                                'inputMaskFile')
    brainsuite_workflow.connect(cortexObj, 'outputCerebrumMask', tcaObj,
                                'inputMaskFile')
    brainsuite_workflow.connect(tcaObj, 'outputMaskFile', dewispObj,
                                'inputMaskFile')
    brainsuite_workflow.connect(dewispObj, 'outputMaskFile', dfsObj,
                                'inputVolumeFile')
    brainsuite_workflow.connect(dfsObj, 'outputSurfaceFile', pialmeshObj,
                                'inputSurfaceFile')

    brainsuite_workflow.connect(pvcObj, 'outputTissueFractionFile',
                                pialmeshObj, 'inputTissueFractionFile')
    brainsuite_workflow.connect(cerebroObj, 'outputCerebrumMaskFile',
                                pialmeshObj, 'inputMaskFile')
    brainsuite_workflow.connect(pialmeshObj, 'outputSurfaceFile', hemisplitObj,
                                'pialSurfaceFile')

    brainsuite_workflow.connect(dfsObj, 'outputSurfaceFile', hemisplitObj,
                                'inputSurfaceFile')
    brainsuite_workflow.connect(cerebroObj, 'outputLabelVolumeFile',
                                hemisplitObj, 'inputHemisphereLabelFile')

    ds = pe.Node(io.DataSink(), name='DATASINK')
    ds.inputs.base_directory = WORKFLOW_BASE_DIRECTORY

    # **DataSink connections**
    brainsuite_workflow.connect(bseObj, 'outputMRIVolume', ds, '@')
    brainsuite_workflow.connect(bseObj, 'outputMaskFile', ds, '@1')
    brainsuite_workflow.connect(bfcObj, 'outputMRIVolume', ds, '@2')
    brainsuite_workflow.connect(pvcObj, 'outputLabelFile', ds, '@3')
    brainsuite_workflow.connect(pvcObj, 'outputTissueFractionFile', ds, '@4')
    brainsuite_workflow.connect(cerebroObj, 'outputCerebrumMaskFile', ds, '@5')
    brainsuite_workflow.connect(cerebroObj, 'outputLabelVolumeFile', ds, '@6')
    brainsuite_workflow.connect(cerebroObj, 'outputAffineTransformFile', ds,
                                '@7')
    brainsuite_workflow.connect(cerebroObj, 'outputWarpTransformFile', ds,
                                '@8')
    brainsuite_workflow.connect(cortexObj, 'outputCerebrumMask', ds, '@9')
    brainsuite_workflow.connect(scrubmaskObj, 'outputMaskFile', ds, '@10')
    brainsuite_workflow.connect(tcaObj, 'outputMaskFile', ds, '@11')
    brainsuite_workflow.connect(dewispObj, 'outputMaskFile', ds, '@12')
    brainsuite_workflow.connect(dfsObj, 'outputSurfaceFile', ds, '@13')
    brainsuite_workflow.connect(pialmeshObj, 'outputSurfaceFile', ds, '@14')
    brainsuite_workflow.connect(hemisplitObj, 'outputLeftHemisphere', ds,
                                '@15')
    brainsuite_workflow.connect(hemisplitObj, 'outputRightHemisphere', ds,
                                '@16')
    brainsuite_workflow.connect(hemisplitObj, 'outputLeftPialHemisphere', ds,
                                '@17')
    brainsuite_workflow.connect(hemisplitObj, 'outputRightPialHemisphere', ds,
                                '@18')

    if 'BDP' in keyword_parameters:
        INPUT_DWI_BASE = keyword_parameters['BDP']
        bdpObj = pe.Node(interface=bs.BDP(), name='BDP')
        bdpInputBase = WORKFLOW_BASE_DIRECTORY + os.sep + SUBJECT_ID + '_T1w'

        # bdp inputs that will be created. We delay execution of BDP until all CSE and datasink are done
        bdpObj.inputs.bfcFile = bdpInputBase + '.bfc.nii.gz'
        bdpObj.inputs.inputDiffusionData = INPUT_DWI_BASE + '.nii.gz'
        dwiabspath = os.path.abspath(
            os.path.dirname(INPUT_DWI_BASE + '.nii.gz'))
        # bdpObj.inputs.BVecBValPair = [dwiabspath+'/tmp.bvec', dwiabspath+'/tmp.bval']
        bdpObj.inputs.BVecBValPair = [
            keyword_parameters['BVEC'], keyword_parameters['BVAL']
        ]

        bdpObj.inputs.estimateTensors = True
        bdpObj.inputs.estimateODF_FRACT = True
        bdpObj.inputs.estimateODF_FRT = True

        brainsuite_workflow.connect(ds, 'out_file', bdpObj, 'dataSinkDelay')

    if 'SVREG' in keyword_parameters:
        svregObj = pe.Node(interface=bs.SVReg(), name='SVREG')
        svregInputBase = WORKFLOW_BASE_DIRECTORY + os.sep + SUBJECT_ID + '_T1w'

        # svreg inputs that will be created. We delay execution of SVReg until all CSE and datasink are done
        svregObj.inputs.subjectFilePrefix = svregInputBase
        # svregObj.inputs.useSingleThreading = True

        brainsuite_workflow.connect(ds, 'out_file', svregObj, 'dataSinkDelay')

    brainsuite_workflow.run(plugin='MultiProc', plugin_args={'n_procs': 2})

    # Print message when all processing is complete.
    print(
        'Processing for subject %s has completed. Nipype workflow is located at: %s'
        % (SUBJECT_ID, WORKFLOW_BASE_DIRECTORY))
예제 #10
0
def calc_local_metrics(preprocessed_data_dir,
                       subject_id,
                       parcellations_dict,
                       bp_freq_list,
                       fd_thresh,
                       working_dir,
                       ds_dir,
                       use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    import utils as calc_metrics_utils





    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'), execution={'stop_on_first_crash': True,
                                                                       'remove_unnecessary_outputs': True,
                                                                       'job_finished_timeout': 15})
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
                                      ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
                                      ('_parcellation_', ''),
                                      ('_bp_freqs_', 'bp_'),
                                      ]



    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(util.IdentityInterface(fields=['parcellation']), name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation', parcellations_dict.keys())

    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']), name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(
        {
            'parcellation_time_series': '{subject_id}/con_mat/parcellated_time_series/bp_{bp_freqs}/{parcellation}/parcellation_time_series.npy'},
        base_directory=preprocessed_data_dir),
        name='selectfiles')
    selectfiles.inputs.subject_id = subject_id
    wf.connect(parcellation_infosource, 'parcellation', selectfiles, 'parcellation')
    wf.connect(bp_filter_infosource, 'bp_freqs', selectfiles, 'bp_freqs')

    fd_file = Node(nio.SelectFiles({'fd_p': '{subject_id}/QC/FD_P_ts'}, base_directory=preprocessed_data_dir),
                   name='fd_file')
    fd_file.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############

    get_good_trs = Node(util.Function(input_names=['fd_file', 'fd_thresh'],
                                      output_names=['good_trs', 'fd_scrubbed_file'],
                                      function=calc_metrics_utils.get_good_trs),
                        name='get_good_trs')
    wf.connect(fd_file, 'fd_p', get_good_trs, 'fd_file')
    get_good_trs.inputs.fd_thresh = fd_thresh

    parcellated_ts_scrubbed = Node(util.Function(input_names=['parcellation_time_series_file', 'good_trs'],
                                                 output_names=['parcellation_time_series_scrubbed'],
                                                 function=calc_metrics_utils.parcellation_time_series_scrubbing),
                                   name='parcellated_ts_scrubbed')

    wf.connect(selectfiles, 'parcellation_time_series', parcellated_ts_scrubbed, 'parcellation_time_series_file')
    wf.connect(get_good_trs, 'good_trs', parcellated_ts_scrubbed, 'good_trs')




    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(input_names=['in_data', 'extraction_method'],
                                 output_names=['matrix', 'matrix_file'],
                                 function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts_scrubbed, 'parcellation_time_series_scrubbed', con_mat, 'in_data')


    ##############
    ## ds
    ##############

    wf.connect(get_good_trs, 'fd_scrubbed_file', ds, 'QC.@fd_scrubbed_file')
    fd_str = ('%.1f' % fd_thresh).replace('.', '_')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix_scrubbed_%s.@mat' % fd_str)

    # wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    # wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    # wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name, plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
            ])):
                #Run analysis
                analysis = Workflow(name="Target_GLM_Level1_ID")
                data = Node(ID(fields=[
                    'func', 'parfsf', 'design_mat', 'design_png', 'con_file'
                ],
                               mandatory_inputs=False),
                            name='Data')
                data.inputs.func = func
                data.inputs.parfsf = parfsf
                data.inputs.design_mat = design_mat
                data.inputs.design_png = design_png
                data.inputs.con_file = design_con

                #Use the DataSink function to store all outputs
                datasink = Node(nio.DataSink(), name='Output')
                datasink.inputs.base_directory = pardir + 'fMRI_Analyses/Conceptual_Change_ID/' + word.capitalize(
                ).replace('_target', '') + str(runnum)

                #Output the fsf file
                analysis.connect(data, 'parfsf', datasink, 'FSF')

                #Output the design matrix
                analysis.connect(data, 'design_mat', datasink, 'Design_Matrix')

                #Output an image of the design matrix
                analysis.connect(data, 'design_png', datasink,
                                 'Design_Matrix_Img')

                #Now we will run the model on each voxel in the brain and output first level stats maps
                filmgls = MapNode(
예제 #12
0
def init_gifti_surface_wf(name='gifti_surface_wf'):
    r"""
    Prepare GIFTI surfaces from a FreeSurfer subjects directory.

    If midthickness (or graymid) surfaces do not exist, they are generated and
    saved to the subject directory as ``lh/rh.midthickness``.
    These, along with the gray/white matter boundary (``lh/rh.smoothwm``), pial
    sufaces (``lh/rh.pial``) and inflated surfaces (``lh/rh.inflated``) are
    converted to GIFTI files.
    Additionally, the vertex coordinates are :py:class:`recentered
    <smriprep.interfaces.NormalizeSurf>` to align with native T1w space.

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from smriprep.workflows.surfaces import init_gifti_surface_wf
            wf = init_gifti_surface_wf()

    Inputs
    ------
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    fsnative2t1w_xfm
        LTA formatted affine transform file (inverse)

    Outputs
    -------
    surfaces
        GIFTI surfaces for gray/white matter boundary, pial surface,
        midthickness (or graymid) surface, and inflated surfaces

    """
    workflow = Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        ['subjects_dir', 'subject_id', 'fsnative2t1w_xfm']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['surfaces']),
                         name='outputnode')

    get_surfaces = pe.Node(nio.FreeSurferSource(), name='get_surfaces')

    midthickness = pe.MapNode(MakeMidthickness(thickness=True,
                                               distance=0.5,
                                               out_name='midthickness'),
                              iterfield='in_file',
                              name='midthickness')

    save_midthickness = pe.Node(nio.DataSink(parameterization=False),
                                name='save_midthickness')

    surface_list = pe.Node(niu.Merge(4, ravel_inputs=True),
                           name='surface_list',
                           run_without_submitting=True)
    fs2gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'),
                        iterfield='in_file',
                        name='fs2gii')
    fix_surfs = pe.MapNode(NormalizeSurf(),
                           iterfield='in_file',
                           name='fix_surfs')

    workflow.connect([
        (inputnode, get_surfaces, [('subjects_dir', 'subjects_dir'),
                                   ('subject_id', 'subject_id')]),
        (inputnode, save_midthickness, [('subjects_dir', 'base_directory'),
                                        ('subject_id', 'container')]),
        # Generate midthickness surfaces and save to FreeSurfer derivatives
        (get_surfaces, midthickness, [('smoothwm', 'in_file'),
                                      ('graymid', 'graymid')]),
        (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
        # Produce valid GIFTI surface files (dense mesh)
        (get_surfaces, surface_list, [('smoothwm', 'in1'), ('pial', 'in2'),
                                      ('inflated', 'in3')]),
        (save_midthickness, surface_list, [('out_file', 'in4')]),
        (surface_list, fs2gii, [('out', 'in_file')]),
        (fs2gii, fix_surfs, [('converted', 'in_file')]),
        (inputnode, fix_surfs, [('fsnative2t1w_xfm', 'transform_file')]),
        (fix_surfs, outputnode, [('out_file', 'surfaces')]),
    ])
    return workflow
예제 #13
0
def calc_local_metrics(brain_mask, preprocessed_data_dir, subject_id,
                       parcellations_dict, bp_freq_list, TR,
                       selectfiles_templates, working_dir, ds_dir, use_n_procs,
                       plugin_name):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow, MapNode
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.fsl as fsl
    from nipype.interfaces.freesurfer.preprocess import MRIConvert

    import CPAC.alff.alff as cpac_alff
    import CPAC.reho.reho as cpac_reho
    import CPAC.utils.utils as cpac_utils

    import utils as calc_metrics_utils
    from motion import calculate_FD_P, calculate_FD_J

    #####################################
    # GENERAL SETTINGS
    #####################################
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    wf = Workflow(name='LeiCA_LIFE_metrics')
    wf.base_dir = os.path.join(working_dir)

    nipype_cfg = dict(logging=dict(workflow_level='DEBUG'),
                      execution={
                          'stop_on_first_crash': True,
                          'remove_unnecessary_outputs': True,
                          'job_finished_timeout': 15
                      })
    config.update_config(nipype_cfg)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.regexp_substitutions = [
        ('MNI_resampled_brain_mask_calc.nii.gz', 'falff.nii.gz'),
        ('residual_filtered_3dT.nii.gz', 'alff.nii.gz'),
        ('_parcellation_', ''),
        ('_bp_freqs_', 'bp_'),
    ]

    #####################
    # ITERATORS
    #####################
    # PARCELLATION ITERATOR
    parcellation_infosource = Node(
        util.IdentityInterface(fields=['parcellation']),
        name='parcellation_infosource')
    parcellation_infosource.iterables = ('parcellation',
                                         parcellations_dict.keys())

    # BP FILTER ITERATOR
    bp_filter_infosource = Node(util.IdentityInterface(fields=['bp_freqs']),
                                name='bp_filter_infosource')
    bp_filter_infosource.iterables = ('bp_freqs', bp_freq_list)

    selectfiles = Node(nio.SelectFiles(selectfiles_templates,
                                       base_directory=preprocessed_data_dir),
                       name='selectfiles')
    selectfiles.inputs.subject_id = subject_id

    ##############
    ## CON MATS
    ##############
    ##############
    ## extract ts
    ##############
    parcellated_ts = Node(util.Function(
        input_names=[
            'in_data', 'parcellation_name', 'parcellations_dict', 'bp_freqs',
            'tr'
        ],
        output_names=[
            'parcellation_time_series', 'parcellation_time_series_file',
            'masker_file'
        ],
        function=calc_metrics_utils.extract_parcellation_time_series),
                          name='parcellated_ts')

    parcellated_ts.inputs.parcellations_dict = parcellations_dict
    parcellated_ts.inputs.tr = TR
    wf.connect(selectfiles, 'epi_MNI_fullspectrum', parcellated_ts, 'in_data')
    wf.connect(parcellation_infosource, 'parcellation', parcellated_ts,
               'parcellation_name')
    wf.connect(bp_filter_infosource, 'bp_freqs', parcellated_ts, 'bp_freqs')

    ##############
    ## get conmat
    ##############
    con_mat = Node(util.Function(
        input_names=['in_data', 'extraction_method'],
        output_names=['matrix', 'matrix_file'],
        function=calc_metrics_utils.calculate_connectivity_matrix),
                   name='con_mat')
    con_mat.inputs.extraction_method = 'correlation'
    wf.connect(parcellated_ts, 'parcellation_time_series', con_mat, 'in_data')

    ##############
    ## ds
    ##############

    wf.connect(parcellated_ts, 'parcellation_time_series_file', ds,
               'con_mat.parcellated_time_series.@parc_ts')
    wf.connect(parcellated_ts, 'masker_file', ds,
               'con_mat.parcellated_time_series.@masker')
    wf.connect(con_mat, 'matrix_file', ds, 'con_mat.matrix.@mat')

    wf.write_graph(dotfilename=wf.name, graph2use='colored',
                   format='pdf')  # 'hierarchical')
    wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name,
               plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})
예제 #14
0
l2source.inputs.template_args = dict(
    copes=[[subject_list, subject_list, 'con']],
    varcopes=[[subject_list, subject_list, 'con']],
    matrix=[[subject_list, subject_list]],
    field=[[subject_list, subject_list]],
    fieldcoeff=[[subject_list, subject_list]])
# iterate over all contrast images
l2source.iterables = [('con', contrast_ids)]
l2source.inputs.sort_filelist = True

# DataSink
#DataSink  --- stores important outputs
MFXdatasink = pe.Node(
    interface=nio.DataSink(
        base_directory=betweenSubjectResults_dir,
        parameterization=
        True  # This line keeps the DataSink from adding an aditional level to the directory, I have no Idea why this works.
    ),
    name="datasink")
MFXdatasink.inputs.substitutions = [('_subject_id_', ''),
                                    ('_flameo', 'contrast')]
'''
===============
Alignment Nodes
===============
'''
aligncope = pe.MapNode(interface=fsl.ApplyWarp(ref_file=mfxTemplateBrain),
                       iterfield=['in_file', 'postmat'],
                       name='aligncope')

alignvarcope = pe.MapNode(interface=fsl.ApplyWarp(ref_file=mfxTemplateBrain),
예제 #15
0
So what did we run in this precanned workflow
"""

preproc.write_graph()
"""
Datasink
--------

Datasink is a special interface for copying and arranging results.
"""

import nipype.interfaces.io as nio

preproc.inputs.inputspec.func = os.path.abspath('data/s1/f3.nii')
preproc.inputs.inputspec.struct = os.path.abspath('data/s1/struct.nii')
datasink = pe.Node(interface=nio.DataSink(), name='sinker')
preprocess = pe.Workflow(name='preprocout')
preprocess.base_dir = os.path.abspath('.')
preprocess.connect([(preproc, datasink, [('meanfunc2.out_file', 'meanfunc'),
                                         ('maskfunc3.out_file', 'funcruns')])])
preprocess.run()
"""
Datagrabber
-----------

Datagrabber is (surprise, surprise) an interface for collecting files from hard drive. It is very flexible and
supports almost any file organisation of your data you can imagine.
"""

datasource1 = nio.DataGrabber()
datasource1.inputs.template = 'data/s1/f3.nii'
예제 #16
0
def create_workflow():
    featpreproc = pe.Workflow(name="featpreproc")

    featpreproc.base_dir = os.path.join(ds_root, 'workingdirs')

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'funcs',
        'subject_id',
        'session_id',
        'fwhm',  # smoothing
        'highpass'
    ]), name="inputspec")

    # SelectFiles
    templates = {
        'ref_manual_fmapmask':  # was: manual_fmapmask
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_manualmask.nii.gz',

        'ref_fmap_magnitude':
        'derivatives/manual-masks/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_magnitude1_res-1x1x1_reference.nii.gz',

        'ref_fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-eddy/ses-20170511/fmap/'
            'sub-eddy_ses-20170511_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        # 'manualweights':
        # 'manual-masks/sub-eddy/ses-20170511/func/'
        #     'sub-eddy_ses-20170511_task-curvetracing_run-01_frame-50_bold'
        #     '_res-1x1x1_manualweights.nii.gz',

        'ref_func':  # was: manualmask_func_ref
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_reference.nii.gz',

        'ref_funcmask':  # was: manualmask
        'derivatives/manual-masks/sub-eddy/ses-20170607/func/'
            'sub-eddy_ses-20170607_task-RestingPRF_run-02_bold_'
            'res-1x1x1_fnirt_mask.nii.gz',

        'ref_t1':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_reference.nii.gz',

        'ref_t1mask':
        'derivatives/manual-masks/sub-eddy/ses-20170511/anat/'
            'sub-eddy_ses-20170511_T1w_res-1x1x1_manualmask.nii.gz',

        # 'funcs':
        # 'resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/func/'
        #     # 'sub-{subject_id}_ses-{session_id}*_bold_res-1x1x1_preproc'
        #     'sub-{subject_id}_ses-{session_id}*run-01_bold_res-1x1x1_preproc'
        #     # '.nii.gz',
        #     '_nvol10.nii.gz',

        'fmap_phasediff':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_phasediff_res-1x1x1_preproc'
            '.nii.gz',

        'fmap_magnitude':
        'derivatives/resampled-isotropic-1mm/sub-{subject_id}/ses-{session_id}/fmap/'
            'sub-{subject_id}_ses-{session_id}_magnitude1_res-1x1x1_preproc'
            '.nii.gz',

        # 'fmap_mask':
        # 'transformed-manual-fmap-mask/sub-{subject_id}/ses-{session_id}/fmap/'
        #     'sub-{subject_id}_ses-{session_id}_'
        #     'magnitude1_res-1x1x1_preproc.nii.gz',
    }

    inputfiles = pe.Node(
        nio.SelectFiles(templates,
                        base_directory=data_dir), name="input_files")

    featpreproc.connect(
        [(inputnode, inputfiles,
         [('subject_id', 'subject_id'),
          ('session_id', 'session_id'),
          ])])

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================

    # ------------------ Output Files
    # Datasink
    outputfiles = pe.Node(nio.DataSink(
        base_directory=ds_root,
        container='derivatives/featpreproc',
        parameterization=True),
        name="output_files")

    # Use the following DataSink output substitutions
    # each tuple is only matched once per file
    outputfiles.inputs.substitutions = [
        ('/_mc_method_afni3dAllinSlices/', '/'),
        ('/_mc_method_afni3dAllinSlices/', '/'),  # needs to appear twice
        ('/oned_file/', '/'),
        ('/out_file/', '/'),
        ('/oned_matrix_save/', '/'),
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'/_addmean[0-9]+/', r'/func/'),
        (r'/_funcbrains[0-9]+/', r'/func/'),
        (r'/_maskfunc[0-9]+/', r'/func/'),
        (r'/_mc[0-9]+/', r'/func/'),
        (r'/_meanfunc[0-9]+/', r'/func/'),
        (r'/_outliers[0-9]+/', r'/func/'),
        (r'_run_id_[0-9][0-9]', r''),
    ]
    outputnode = pe.Node(interface=util.IdentityInterface(
        fields=['motion_parameters',
                'motion_corrected',
                'motion_plots',
                'motion_outlier_files',
                'mask',
                'smoothed_files',
                'highpassed_files',
                'mean',
                'func_unwarp',
                'ref_func',
                'ref_funcmask',
                'ref_t1',
                'ref_t1mask',
                ]),
        name='outputspec')

    # ===================================================================
    #                  _____ _            _ _
    #                 |  __ (_)          | (_)
    #                 | |__) | _ __   ___| |_ _ __   ___
    #                 |  ___/ | '_ \ / _ \ | | '_ \ / _ \
    #                 | |   | | |_) |  __/ | | | | |  __/
    #                 |_|   |_| .__/ \___|_|_|_| |_|\___|
    #                         | |
    #                         |_|
    # ===================================================================

    #  ~|~ _ _  _  _ |` _  _ _ _    _ _  _  _|  _
    #   | | (_|| |_\~|~(_)| | | |  | | |(_|_\|<_\
    #
    # Transform manual skull-stripped masks to multiple images
    # --------------------------------------------------------
    # should just be used as input to motion correction,
    # after mc, all functionals should be aligned to reference
    transmanmask_mc = transform_manualmask.create_workflow()

    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputfiles, transmanmask_mc,
         [('subject_id', 'in.subject_id'),
          ('session_id', 'in.session_id'),
          ])])

    featpreproc.connect(inputfiles, 'ref_funcmask',
                        transmanmask_mc, 'in.manualmask')
    featpreproc.connect(inputnode, 'funcs',
                        transmanmask_mc, 'in.funcs')
    featpreproc.connect(inputfiles, 'ref_func',
                        transmanmask_mc, 'in.manualmask_func_ref')

    # fieldmaps not being used
    if False:
        trans_fmapmask = transmanmask_mc.clone('trans_fmapmask')
        featpreproc.connect(inputfiles, 'ref_manual_fmapmask',
                            trans_fmapmask, 'in.manualmask')
        featpreproc.connect(inputfiles, 'fmap_magnitude',
                            trans_fmapmask, 'in.funcs')
        featpreproc.connect(inputfiles, 'ref_func',
                            trans_fmapmask, 'in.manualmask_func_ref')

    #  |\/| _ _|_. _  _    _ _  _ _ _  __|_. _  _
    #  |  |(_) | |(_)| |  (_(_)| | (/_(_ | |(_)| |
    #
    # Perform motion correction, using some pipeline
    # --------------------------------------------------------
    # mc = motioncorrection_workflow.create_workflow_afni()

    # Register an image from the functionals to the reference image
    median_func = pe.MapNode(
        interface=fsl.maths.MedianImage(dimension="T"),
        name='median_func',
        iterfield=('in_file'),
    )
    pre_mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='premotioncorrection')

    featpreproc.connect(
        [
         (inputnode, median_func,
          [
           ('funcs', 'in_file'),
           ]),
         (median_func, pre_mc,
          [
           ('out_file', 'in.funcs'),
           ]),
         (inputfiles, pre_mc,
          [
           # median func image will be used a reference / base
           ('ref_func', 'in.ref_func'),
           ('ref_funcmask', 'in.ref_func_weights'),
          ]),
         (transmanmask_mc, pre_mc,
          [
           ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (pre_mc, outputnode,
          [
           ('mc.out_file', 'pre_motion_corrected'),
           ('mc.oned_file', 'pre_motion_parameters.oned_file'),
           ('mc.oned_matrix_save', 'pre_motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles,
          [
           ('pre_motion_corrected', 'pre_motion_corrected.out_file'),
           ('pre_motion_parameters.oned_file', 'pre_motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
           ('pre_motion_parameters.oned_matrix_save', 'pre_motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    mc = motioncorrection_workflow.create_workflow_allin_slices(
        name='motioncorrection',
        iterfield=('in_file', 'ref_file', 'in_weight_file'))
    # - - - - - - Connections - - - - - - -
    featpreproc.connect(
        [(inputnode, mc,
          [
           ('funcs', 'in.funcs'),
           ]),
         (pre_mc, mc, [
             # the median image realigned to the reference functional will serve as reference
             #  this way motion correction is done to an image more similar to the functionals
             ('mc.out_file', 'in.ref_func'),
           ]),
         (inputfiles, mc, [
             # Check and make sure the ref func mask is close enough to the registered median
             # image.
             ('ref_funcmask', 'in.ref_func_weights'),
           ]),
         (transmanmask_mc, mc, [
             ('funcreg.out_file', 'in.funcs_masks'),  # use mask as weights
         ]),
         (mc, outputnode, [
             ('mc.out_file', 'motion_corrected'),
             ('mc.oned_file', 'motion_parameters.oned_file'),
             ('mc.oned_matrix_save', 'motion_parameters.oned_matrix_save'),
         ]),
         (outputnode, outputfiles, [
             ('motion_corrected', 'motion_corrected.out_file'),
             ('motion_parameters.oned_file', 'motion_corrected.oned_file'), # warp parameters in ASCII (.1D)
             ('motion_parameters.oned_matrix_save', 'motion_corrected.oned_matrix_save'), # transformation matrices for each sub-brick
         ]),
    ])

    #  |~. _ | _| _ _  _  _    _ _  _ _ _  __|_. _  _
    #  |~|(/_|(_|| | |(_||_)  (_(_)| | (/_(_ | |(_)| |
    #                    |
    # Unwarp EPI distortions
    # --------------------------------------------------------

    # Performing motion correction to a reference that is undistorted,
    # so b0_unwarp is currently not needed
    if False:
        b0_unwarp = undistort_workflow.create_workflow()

        featpreproc.connect(
            [(inputfiles, b0_unwarp,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('fmap_phasediff', 'in.fmap_phasediff'),
               ('fmap_magnitude', 'in.fmap_magnitude'),
               ]),
             (mc, b0_unwarp,
              [('mc.out_file', 'in.funcs'),
               ]),
             (transmanmask_mc, b0_unwarp,
              [('funcreg.out_file', 'in.funcmasks'),
               ]),
             (trans_fmapmask, b0_unwarp,
              [('funcreg.out_file', 'in.fmap_mask')]),
             (b0_unwarp, outputfiles,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'func_unwarp.funcmasks'),
               ]),
             (b0_unwarp, outputnode,
              [('out.funcs', 'func_unwarp.funcs'),
               ('out.funcmasks', 'mask'),
               ]),
             ])

    # undistort the reference images
    if False:
        b0_unwarp_ref = b0_unwarp.clone('b0_unwarp_ref')
        featpreproc.connect(
            [(inputfiles, b0_unwarp_ref,
              [  # ('subject_id', 'in.subject_id'),
               # ('session_id', 'in.session_id'),
               ('ref_fmap_phasediff', 'in.fmap_phasediff'),
               ('ref_fmap_magnitude', 'in.fmap_magnitude'),
               ('ref_manual_fmapmask', 'in.fmap_mask'),
               ('ref_func', 'in.funcs'),
               ('ref_funcmask', 'in.funcmasks'),
               ]),
             (b0_unwarp_ref, outputfiles,
              [('out.funcs', 'func_unwarp_ref.func'),
               ('out.funcmasks', 'func_unwarp_ref.funcmask'),
               ]),
             (b0_unwarp_ref, outputnode,
              [('out.funcs', 'ref_func'),
               ('out.funcmasks', 'ref_mask'),
               ]),
             ])
    else:
        featpreproc.connect(
            [(inputfiles, outputfiles,
              [('ref_func', 'reference/func'),
               ('ref_funcmask', 'reference/func_mask'),
               ]),
             (inputfiles, outputnode,
              [('ref_func', 'ref_func'),
               ('ref_funcmask', 'ref_funcmask'),
               ]),
             ])

    # |~) _  _ . __|_ _  _  _|_ _   |~) _  |` _  _ _  _  _ _ 
    # |~\(/_(_||_\ | (/_|    | (_)  |~\(/_~|~(/_| (/_| |(_(/_
    #        _|
    # Register all functionals to common reference
    # --------------------------------------------------------
    if False:  # this is now done during motion correction
        # FLIRT cost: intermodal: corratio, intramodal: least squares and normcorr
        reg_to_ref = pe.MapNode(  # intra-modal
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='normcorr'),
            name='reg_to_ref',
            iterfield=('in_file', 'in_weight'),
        )
        refEPI_to_refT1 = pe.Node(
            # some runs need to be scaled along the anterior-posterior direction
            interface=fsl.FLIRT(dof=12, cost='corratio'),
            name='refEPI_to_refT1',
        )
        # combine func -> ref_func and ref_func -> ref_T1
        reg_to_refT1 = pe.MapNode(
            interface=fsl.ConvertXFM(concat_xfm=True),
            name='reg_to_refT1',
            iterfield=('in_file'),
        )

        reg_funcs = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcs',
            iterfield=('in_file', 'in_matrix_file'),
        )
        reg_funcmasks = pe.MapNode(
            interface=fsl.preprocess.ApplyXFM(),
            name='reg_funcmasks',
            iterfield=('in_file', 'in_matrix_file')
        )

        def deref_list(x):
            assert len(x)==1
            return x[0]

        featpreproc.connect(
            [
             (b0_unwarp, reg_to_ref,  # --> reg_to_ref, (A)
              [
               ('out.funcs', 'in_file'),
               ('out.funcmasks', 'in_weight'),
              ]),
             (b0_unwarp_ref, reg_to_ref,
              [
               (('out.funcs', deref_list), 'reference'),
               (('out.funcmasks', deref_list), 'ref_weight'),
              ]),

             (b0_unwarp_ref, refEPI_to_refT1,  # --> refEPI_to_refT1 (B)
              [
               (('out.funcs', deref_list), 'in_file'),
               (('out.funcmasks', deref_list), 'in_weight'),
              ]),
             (inputfiles, refEPI_to_refT1,
              [
               ('ref_t1', 'reference'),
               ('ref_t1mask', 'ref_weight'),
              ]),

             (reg_to_ref, reg_to_refT1,  # --> reg_to_refT1 (A*B)
              [
               ('out_matrix_file', 'in_file'),
              ]),
             (refEPI_to_refT1, reg_to_refT1,
              [
               ('out_matrix_file', 'in_file2'),
              ]),

             (reg_to_refT1, reg_funcs,  # --> reg_funcs
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcs,
              [
               ('out.funcs', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcs,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_to_refT1, reg_funcmasks,  # --> reg_funcmasks
              [
               # ('out_matrix_file', 'in_matrix_file'),
               ('out_file', 'in_matrix_file'),
              ]),
             (b0_unwarp, reg_funcmasks,
              [
               ('out.funcmasks', 'in_file'),
              ]),
             (b0_unwarp_ref, reg_funcmasks,
              [
               (('out.funcs', deref_list), 'reference'),
              ]),

             (reg_funcs, outputfiles,
              [
               ('out_file', 'common_ref.func'),
              ]),
             (reg_funcmasks, outputfiles,
              [
               ('out_file', 'common_ref.funcmask'),
              ]),
        ])


    #  |\/| _ _|_. _  _    _   _|_|. _  _ _
    #  |  |(_) | |(_)| |  (_)|_|| ||(/_| _\
    #
    # --------------------------------------------------------

    # Apply brain masks to functionals
    # --------------------------------------------------------

    # Dilate mask
    """
    Dilate the mask
    """
    if False:
        dilatemask = pe.MapNode(interface=fsl.ImageMaths(suffix='_dil',
                                                         op_string='-dilF'),
                                iterfield=['in_file'],
                                name='dilatemask')
        featpreproc.connect(reg_funcmasks, 'out_file', dilatemask, 'in_file')
    else:
        dilatemask = pe.Node(
            interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'),
            name='dilatemask')
        featpreproc.connect(inputfiles, 'ref_funcmask', dilatemask, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', outputfiles, 'dilate_mask')

    funcbrains = pe.MapNode(
        fsl.BinaryMaths(operation='mul'),
        iterfield=('in_file', 'operand_file'),
        name='funcbrains'
    )

    featpreproc.connect(
        [(mc, funcbrains,
          [('mc.out_file', 'in_file'),
          ]),
         (dilatemask, funcbrains,
          [('out_file', 'operand_file'),
          ]),
         (funcbrains, outputfiles,
          [('out_file', 'funcbrains'),
           ]),
         ])
    # Detect motion outliers
    # --------------------------------------------------------

    import nipype.algorithms.rapidart as ra
    outliers = pe.MapNode(
        ra.ArtifactDetect(
            mask_type='file',
            # trying to "disable" `norm_threshold`:
            use_norm=True,
            norm_threshold=10.0,  # combines translations in mm and rotations
            # use_norm=Undefined,
            # translation_threshold=1.0,  # translation in mm
            # rotation_threshold=0.02,  # rotation in radians
            zintensity_threshold=3.0,  # z-score
            parameter_source='AFNI',
            save_plot=True),
        iterfield=('realigned_files', 'realignment_parameters', 'mask_file'),
        name='outliers')

    featpreproc.connect([
        (mc, outliers,
         [  # ('mc.par_file', 'realignment_parameters'),
             ('mc.oned_file', 'realignment_parameters'),
         ]),
        (funcbrains, outliers,
         [('out_file', 'realigned_files'),
          ]),
        (dilatemask, outliers,
         [('out_file', 'mask_file'),
          ]),
        (outliers, outputfiles,
         [('outlier_files', 'motion_outliers.@outlier_files'),
          ('plot_files', 'motion_outliers.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ]),
        (mc, outputnode,
         [('mc.oned_file', 'motion_parameters'),
          ]),
        (outliers, outputnode,
         [('outlier_files', 'motion_outlier_files'),
          ('plot_files', 'motion_plots.@plot_files'),
          ('displacement_files', 'motion_outliers.@displacement_files'),
          ('intensity_files', 'motion_outliers.@intensity_files'),
          ('mask_files', 'motion_outliers.@mask_files'),
          ('statistic_files', 'motion_outliers.@statistic_files'),
          # ('norm_files', 'outliers.@norm_files'),
          ])
    ])

    """
    Determine the 2nd and 98th percentile intensities of each functional run
    """
    getthresh = pe.MapNode(interface=fsl.ImageStats(op_string='-p 2 -p 98'),
                           iterfield=['in_file'],
                           name='getthreshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', getthresh, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', getthresh, 'in_file')

    """
    Threshold the first run of functional data at 10% of the 98th percentile
    """

    threshold = pe.MapNode(interface=fsl.ImageMaths(out_data_type='char',
                                                    suffix='_thresh'),
                           iterfield=['in_file', 'op_string'],
                           name='threshold')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', threshold, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', threshold, 'in_file')

    """
    Define a function to get 10% of the intensity
    """
    def getthreshop(thresh):
        return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh]

    featpreproc.connect(
        getthresh, ('out_stat', getthreshop),
        threshold, 'op_string')

    """
    Determine the median value of the functional runs using the mask
    """
    medianval = pe.MapNode(interface=fsl.ImageStats(op_string='-k %s -p 50'),
                           iterfield=['in_file', 'mask_file'],
                           name='medianval')
    if False:
        featpreproc.connect(b0_unwarp, 'out.funcs', medianval, 'in_file')
    else:
        featpreproc.connect(mc, 'mc.out_file', medianval, 'in_file')

    featpreproc.connect(threshold, 'out_file', medianval, 'mask_file')

    # (~ _  _ _|_. _ |  (~ _ _  _  _ _|_|_ . _  _
    # _)|_)(_| | |(_||  _)| | |(_)(_) | | ||| |(_|
    #   |                                       _|
    # Spatial smoothing (SUSAN)
    # --------------------------------------------------------

    # create_susan_smooth takes care of calculating the mean and median
    #   functional, applying mask to functional, and running the smoothing
    smooth = create_susan_smooth(separate_masks=False)
    featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm')

    # featpreproc.connect(b0_unwarp, 'out.funcs', smooth, 'inputnode.in_files')
    if False:
        featpreproc.connect(reg_funcs, 'out_file', smooth, 'inputnode.in_files')
    else:
        featpreproc.connect(mc, 'mc.out_file', smooth, 'inputnode.in_files')

    featpreproc.connect(dilatemask, 'out_file',
                        smooth, 'inputnode.mask_file')

    # -------------------------------------------------------
    # The below is from workflows/fmri/fsl/preprocess.py

    """
    Mask the smoothed data with the dilated mask
    """

    maskfunc3 = pe.MapNode(interface=fsl.ImageMaths(suffix='_mask',
                                                    op_string='-mas'),
                           iterfield=['in_file', 'in_file2'],
                           name='maskfunc3')
    featpreproc.connect(
        smooth, 'outputnode.smoothed_files', maskfunc3, 'in_file')

    featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2')

    concatnode = pe.Node(interface=util.Merge(2),
                         name='concat')

    tolist = lambda x: [x]

    def chooseindex(fwhm):
        if fwhm < 1:
            return [0]
        else:
            return [1]

    # maskfunc2 is the functional data before SUSAN
    if False:
        featpreproc.connect(b0_unwarp, ('out.funcs', tolist), concatnode, 'in1')
    else:
        featpreproc.connect(mc, ('mc.out_file', tolist), concatnode, 'in1')
    # maskfunc3 is the functional data after SUSAN
    featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2')

    """
    The following nodes select smooth or unsmoothed data depending on the
    fwhm. This is because SUSAN defaults to smoothing the data with about the
    voxel size of the input data if the fwhm parameter is less than 1/3 of the
    voxel size.
    """
    selectnode = pe.Node(interface=util.Select(), name='select')

    featpreproc.connect(concatnode, 'out', selectnode, 'inlist')

    featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index')
    featpreproc.connect(selectnode, 'out', outputfiles, 'smoothed_files')

    """
    Scale the median value of the run is set to 10000.
    """

    meanscale = pe.MapNode(interface=fsl.ImageMaths(suffix='_gms'),
                           iterfield=['in_file', 'op_string'],
                           name='meanscale')
    featpreproc.connect(selectnode, 'out', meanscale, 'in_file')

    """
    Define a function to get the scaling factor for intensity normalization
    """

    featpreproc.connect(
        medianval, ('out_stat', getmeanscale),
        meanscale, 'op_string')

    # |_|. _ |_  _  _  _ _
    # | ||(_|| ||_)(_|_\_\
    #      _|   |
    # Temporal filtering
    # --------------------------------------------------------

    highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt'),
                          iterfield=['in_file'],
                          name='highpass')
    highpass_operand = lambda x: '-bptf %.10f -1' % x
    featpreproc.connect(
        inputnode, ('highpass', highpass_operand),
        highpass, 'op_string')
    featpreproc.connect(meanscale, 'out_file', highpass, 'in_file')

    version = 0
    if fsl.Info.version() and \
            LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'):
        version = 507

    if version < 507:
        featpreproc.connect(
            highpass, 'out_file', outputnode, 'highpassed_files')
    else:
        """
        Add back the mean removed by the highpass filter operation as
            of FSL 5.0.7
        """
        meanfunc4 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                        suffix='_mean'),
                               iterfield=['in_file'],
                               name='meanfunc4')

        featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file')
        addmean = pe.MapNode(interface=fsl.BinaryMaths(operation='add'),
                             iterfield=['in_file', 'operand_file'],
                             name='addmean')
        featpreproc.connect(highpass, 'out_file', addmean, 'in_file')
        featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file')
        featpreproc.connect(
            addmean, 'out_file', outputnode, 'highpassed_files')

    """
    Generate a mean functional image from the first run
    """
    meanfunc3 = pe.MapNode(interface=fsl.ImageMaths(op_string='-Tmean',
                                                    suffix='_mean'),
                           iterfield=['in_file'],
                           name='meanfunc3')

    featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file')
    featpreproc.connect(meanfunc3, 'out_file', outputfiles, 'mean')

    featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean_highpassed')
    featpreproc.connect(outputnode, 'highpassed_files', outputfiles, 'highpassed_files')

    return(featpreproc)
예제 #17
0
def create_cross_sectional_tbss_pipeline(in_files,
                                         output_dir,
                                         name='cross_sectional_tbss',
                                         skeleton_threshold=0.2,
                                         design_mat=None,
                                         design_con=None):
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Create the dtitk groupwise registration workflow
    groupwise_dtitk = create_dtitk_groupwise_workflow(in_files=in_files,
                                                      name="dtitk_groupwise",
                                                      rig_iteration=3,
                                                      aff_iteration=3,
                                                      nrr_iteration=6)

    # Create the average FA map
    mean_fa = pe.Node(interface=dtitk.TVtool(), name="mean_fa")
    workflow.connect(groupwise_dtitk, 'output_node.out_template', mean_fa,
                     'in_file')
    mean_fa.inputs.operation = 'fa'

    # Register the FMRIB58_FA_1mm.nii.gz atlas to the mean FA map
    reg_atlas = pe.Node(interface=niftyreg.RegAladin(), name='reg_atlas')
    workflow.connect(mean_fa, 'out_file', reg_atlas, 'ref_file')
    reg_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'FMRIB58_FA_1mm.nii.gz')

    # Apply the transformation to the lower cingulum image
    war_atlas = pe.Node(interface=niftyreg.RegResample(), name='war_atlas')
    workflow.connect(mean_fa, 'out_file', war_atlas, 'ref_file')
    war_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'LowerCingulum_1mm.nii.gz')
    workflow.connect(reg_atlas, 'aff_file', war_atlas, 'trans_file')
    war_atlas.inputs.inter_val = 'LIN'

    # Threshold the propagated lower cingulum
    thr_atlas = pe.Node(interface=niftyseg.BinaryMaths(), name='thr_atlas')
    workflow.connect(war_atlas, 'out_file', thr_atlas, 'in_file')
    thr_atlas.inputs.operation = 'thr'
    thr_atlas.inputs.operand_value = 0.5

    # Binarise the propagated lower cingulum
    bin_atlas = pe.Node(interface=niftyseg.UnaryMaths(), name='bin_atlas')
    workflow.connect(thr_atlas, 'out_file', bin_atlas, 'in_file')
    bin_atlas.inputs.operation = 'bin'

    # Create all the individual FA maps
    individual_fa = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_fa",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_fa,
                     'in_file')
    individual_fa.inputs.operation = 'fa'

    # Create all the individual MD maps
    individual_md = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_md",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_md,
                     'in_file')
    individual_md.inputs.operation = 'tr'

    # Create all the individual RD maps
    individual_rd = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_rd",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_rd,
                     'in_file')
    individual_rd.inputs.operation = 'rd'

    # Create all the individual RD maps
    individual_ad = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_ad",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_ad,
                     'in_file')
    individual_ad.inputs.operation = 'ad'

    # Combine all the warped FA images into a 4D image
    merged_4d_fa = pe.Node(interface=fsl.Merge(), name='merged_4d_fa')
    merged_4d_fa.inputs.dimension = 't'
    workflow.connect(individual_fa, 'out_file', merged_4d_fa, 'in_files')

    # Combine all the warped MD images into a 4D image
    merged_4d_md = pe.Node(interface=fsl.Merge(), name='merged_4d_md')
    merged_4d_md.inputs.dimension = 't'
    workflow.connect(individual_md, 'out_file', merged_4d_md, 'in_files')

    # Combine all the warped RD images into a 4D image
    merged_4d_rd = pe.Node(interface=fsl.Merge(), name='merged_4d_rd')
    merged_4d_rd.inputs.dimension = 't'
    workflow.connect(individual_rd, 'out_file', merged_4d_rd, 'in_files')

    # Combine all the warped AD images into a 4D image
    merged_4d_ad = pe.Node(interface=fsl.Merge(), name='merged_4d_ad')
    merged_4d_ad.inputs.dimension = 't'
    workflow.connect(individual_ad, 'out_file', merged_4d_ad, 'in_files')

    # Threshold the 4D FA image to 0
    merged_4d_fa_thresholded = pe.Node(interface=niftyseg.BinaryMaths(),
                                       name='merged_4d_fa_thresholded')
    merged_4d_fa_thresholded.inputs.operation = 'thr'
    merged_4d_fa_thresholded.inputs.operand_value = 0
    workflow.connect(merged_4d_fa, 'merged_file', merged_4d_fa_thresholded,
                     'in_file')

    # Extract the min value from the 4D FA image
    minimal_value_across_all_fa = pe.Node(interface=niftyseg.UnaryMaths(),
                                          name='minimal_value_across_all_fa')
    minimal_value_across_all_fa.inputs.operation = 'tmin'
    workflow.connect(merged_4d_fa_thresholded, 'out_file',
                     minimal_value_across_all_fa, 'in_file')

    # Create the mask image
    fa_mask = pe.Node(interface=niftyseg.UnaryMaths(), name='fa_mask')
    fa_mask.inputs.operation = 'bin'
    fa_mask.inputs.output_datatype = 'char'
    workflow.connect(minimal_value_across_all_fa, 'out_file', fa_mask,
                     'in_file')

    # Mask the mean FA image
    masked_mean_fa = pe.Node(interface=fsl.ApplyMask(), name='masked_mean_fa')
    workflow.connect(mean_fa, 'out_file', masked_mean_fa, 'in_file')
    workflow.connect(fa_mask, 'out_file', masked_mean_fa, 'mask_file')

    # Create the skeleton image
    skeleton = pe.Node(interface=fsl.TractSkeleton(), name='skeleton')
    skeleton.inputs.skeleton_file = True
    workflow.connect(masked_mean_fa, 'out_file', skeleton, 'in_file')

    # Threshold the skeleton image
    thresholded_skeleton = pe.Node(interface=niftyseg.BinaryMaths(),
                                   name='thresholded_skeleton')
    thresholded_skeleton.inputs.operation = 'thr'
    thresholded_skeleton.inputs.operand_value = skeleton_threshold
    workflow.connect(skeleton, 'skeleton_file', thresholded_skeleton,
                     'in_file')

    # Binarise the skeleton image
    binarised_skeleton = pe.Node(interface=niftyseg.UnaryMaths(),
                                 name='binarised_skeleton')
    binarised_skeleton.inputs.operation = 'bin'
    workflow.connect(thresholded_skeleton, 'out_file', binarised_skeleton,
                     'in_file')

    # Create skeleton distance map
    invert_mask1 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask1')
    invert_mask1.inputs.operation = 'mul'
    invert_mask1.inputs.operand_value = -1
    workflow.connect(fa_mask, 'out_file', invert_mask1, 'in_file')
    invert_mask2 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask2')
    invert_mask2.inputs.operation = 'add'
    invert_mask2.inputs.operand_value = 1
    workflow.connect(invert_mask1, 'out_file', invert_mask2, 'in_file')
    invert_mask3 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask3')
    invert_mask3.inputs.operation = 'add'
    workflow.connect(invert_mask2, 'out_file', invert_mask3, 'in_file')
    workflow.connect(binarised_skeleton, 'out_file', invert_mask3,
                     'operand_file')
    distance_map = pe.Node(interface=fsl.DistanceMap(), name='distance_map')
    workflow.connect(invert_mask3, 'out_file', distance_map, 'in_file')

    # Project the FA values onto the skeleton
    all_fa_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_fa_projected')
    all_fa_projected.inputs.threshold = skeleton_threshold
    all_fa_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_fa_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_fa_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_fa_projected,
                     'data_file')
    workflow.connect(bin_atlas, 'out_file', all_fa_projected,
                     'search_mask_file')

    # Project the MD values onto the skeleton
    all_md_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_md_projected')
    all_md_projected.inputs.threshold = skeleton_threshold
    all_md_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_md_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_md_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_md_projected,
                     'data_file')
    workflow.connect(merged_4d_md, 'merged_file', all_md_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_md_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_rd_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_rd_projected')
    all_rd_projected.inputs.threshold = skeleton_threshold
    all_rd_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_rd_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_rd_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_rd_projected,
                     'data_file')
    workflow.connect(merged_4d_rd, 'merged_file', all_rd_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_rd_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_ad_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_ad_projected')
    all_ad_projected.inputs.threshold = skeleton_threshold
    all_ad_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_ad_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_ad_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_ad_projected,
                     'data_file')
    workflow.connect(merged_4d_ad, 'merged_file', all_ad_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_ad_projected,
                     'search_mask_file')

    # Create an output node
    output_node = pe.Node(interface=niu.IdentityInterface(fields=[
        'mean_fa', 'all_fa_skeletonised', 'all_md_skeletonised',
        'all_rd_skeletonised', 'all_ad_skeletonised', 'skeleton',
        'skeleton_bin', 't_contrast_raw_stat', 't_contrast_uncorrected_pvalue',
        't_contrast_corrected_pvalue'
    ]),
                          name='output_node')

    # Connect the workflow to the output node
    workflow.connect(masked_mean_fa, 'out_file', output_node, 'mean_fa')
    workflow.connect(all_fa_projected, 'projected_data', output_node,
                     'all_fa_skeletonised')
    workflow.connect(all_md_projected, 'projected_data', output_node,
                     'all_md_skeletonised')
    workflow.connect(all_rd_projected, 'projected_data', output_node,
                     'all_rd_skeletonised')
    workflow.connect(all_ad_projected, 'projected_data', output_node,
                     'all_ad_skeletonised')
    workflow.connect(skeleton, 'skeleton_file', output_node, 'skeleton')
    workflow.connect(binarised_skeleton, 'out_file', output_node,
                     'skeleton_bin')

    # Run randomise if required and connect its output to the output node
    if design_mat is not None and design_con is not None:
        randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
        randomise.inputs.base_name = 'stats_tbss'
        randomise.inputs.tfce2D = True
        randomise.inputs.num_perm = 5000
        workflow.connect(all_fa_projected, 'projected_data', randomise,
                         'in_file')
        randomise.inputs.design_mat = design_mat
        randomise.inputs.design_con = design_con
        workflow.connect(binarised_skeleton, 'out_file', randomise, 'mask')

        workflow.connect(randomise, 'tstat_files', output_node,
                         't_contrast_raw_stat')
        workflow.connect(randomise, 't_p_files', output_node,
                         't_contrast_uncorrected_pvalue')
        workflow.connect(randomise, 't_corrected_p_files', output_node,
                         't_contrast_corrected_pvalue')

    # Create nodes to rename the outputs
    mean_fa_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa', keep_ext=True),
                              name='mean_fa_renamer')
    workflow.connect(output_node, 'mean_fa', mean_fa_renamer, 'in_file')

    mean_sk_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton', keep_ext=True),
                              name='mean_sk_renamer')
    workflow.connect(output_node, 'skeleton', mean_sk_renamer, 'in_file')

    bin_ske_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton_mask', keep_ext=True),
                              name='bin_ske_renamer')
    workflow.connect(output_node, 'skeleton_bin', bin_ske_renamer, 'in_file')

    fa_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_fa_skeletonised', keep_ext=True),
                              name='fa_skel_renamer')
    workflow.connect(output_node, 'all_fa_skeletonised', fa_skel_renamer,
                     'in_file')
    md_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_md_skeletonised', keep_ext=True),
                              name='md_skel_renamer')
    workflow.connect(output_node, 'all_md_skeletonised', md_skel_renamer,
                     'in_file')
    rd_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_rd_skeletonised', keep_ext=True),
                              name='rd_skel_renamer')
    workflow.connect(output_node, 'all_rd_skeletonised', rd_skel_renamer,
                     'in_file')
    ad_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_ad_skeletonised', keep_ext=True),
                              name='ad_skel_renamer')
    workflow.connect(output_node, 'all_ad_skeletonised', ad_skel_renamer,
                     'in_file')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    # Connect the data sink
    workflow.connect(mean_fa_renamer, 'out_file', ds, '@mean_fa')
    workflow.connect(mean_sk_renamer, 'out_file', ds, '@skel_fa')
    workflow.connect(bin_ske_renamer, 'out_file', ds, '@bkel_fa')
    workflow.connect(fa_skel_renamer, 'out_file', ds, '@all_fa')
    workflow.connect(md_skel_renamer, 'out_file', ds, '@all_md')
    workflow.connect(rd_skel_renamer, 'out_file', ds, '@all_rd')
    workflow.connect(ad_skel_renamer, 'out_file', ds, '@all_ad')

    if design_mat is not None and design_con is not None:
        workflow.connect(output_node, 't_contrast_raw_stat', ds,
                         '@t_contrast_raw_stat')
        workflow.connect(output_node, 't_contrast_uncorrected_pvalue', ds,
                         '@t_contrast_uncorrected_pvalue')
        workflow.connect(output_node, 't_contrast_corrected_pvalue', ds,
                         '@t_contrast_corrected_pvalue')

    return workflow
예제 #18
0
datasource.inputs.base_directory = data_path
if conductivity_tensor_included:
  datasource.inputs.field_template = dict(
      mesh_file='%s_gmsh_cond_elec.msh', electrode_name_file='%s.txt',
      dipole_file="%s*-lh.dip", leadfield="%s_aniso_leadfield.hdf5")
else:
  datasource.inputs.field_template = dict(
      mesh_file='%s_gmsh_cond_elec.msh', electrode_name_file='%s.txt',
      dipole_file="%s*-lh.dip", leadfield="%s_iso_leadfield.hdf5")
datasource.inputs.template_args = info

cost = create_cost_function_workflow("dipole_cost")
cost.inputs.inputnode.dipole_row = dipole_row
cost.inputs.inputnode.mesh_id = 1002

datasink = pe.Node(interface=nio.DataSink(),
                   name="datasink")
datasink.inputs.base_directory = op.abspath('forward_datasink')
datasink.inputs.container = 'subject'

cost_proc = pe.Workflow(name="cost_proc")
cost_proc.base_dir = os.path.abspath('cost_proc')
cost_proc.connect([
                    (infosource, datasource,[('subject_id', 'subject_id')]),
                    (datasource, cost,[('mesh_file','inputnode.mesh_file'),
                                               ('dipole_file','inputnode.dipole_file'),
                                               ('leadfield','inputnode.leadfield'),
                                               ])
                ])
cost_proc.connect([(cost, datasink, [("outputnode.mesh_file", "mesh_file")])])
cost_proc.connect([(cost, datasink, [("outputnode.dipole_geo_file", "dipole_geo_file")])])
예제 #19
0
def run_dmri_normalization_wf(subjects_list, working_dir, ds_dir, use_n_procs,
                              plugin_name, file_templates, in_path):
    import os
    from nipype import config
    from nipype.pipeline.engine import Node, Workflow
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    from nipype.interfaces import fsl
    from nipype.interfaces.ants import ApplyTransforms

    #####################################
    # GENERAL SETTINGS
    #####################################
    wf = Workflow(name='dmri_normalization_wf')
    wf.base_dir = os.path.join(working_dir)
    wf.config['execution']['crashdump_dir'] = os.path.join(
        working_dir, 'crash')

    ds = Node(nio.DataSink(), name='ds')

    ds.inputs.regexp_substitutions = [
        ('_subject_id_[A0-9]*/', ''),
        ('_metric_.*dtifit__', ''),
    ]

    infosource = Node(util.IdentityInterface(fields=['subject_id', 'metric']),
                      name='infosource')
    infosource.iterables = [('subject_id', subjects_list),
                            ('metric', ['FA', 'MD', 'L1', 'L2', 'L3'])]

    def add_subject_id_to_ds_dir_fct(subject_id, ds_dir):
        import os
        out_path = os.path.join(ds_dir, subject_id)
        return out_path

    wf.connect(infosource,
               ('subject_id', add_subject_id_to_ds_dir_fct, ds_dir), ds,
               'base_directory')

    # GET SUBJECT SPECIFIC FUNCTIONAL DATA
    selectfiles = Node(nio.SelectFiles(file_templates, base_directory=in_path),
                       name="selectfiles")
    wf.connect(infosource, 'subject_id', selectfiles, 'subject_id')
    wf.connect(infosource, 'metric', selectfiles, 'metric')

    #####################################
    # WF
    #####################################

    # also transform to mni space
    collect_transforms = Node(interface=util.Merge(2),
                              name='collect_transforms')
    wf.connect([(selectfiles, collect_transforms, [('FA_2_MNI_warp', 'in1'),
                                                   ('FA_2_MNI_affine', 'in2')])
                ])

    mni = Node(interface=ApplyTransforms(), name='mni')
    #wf.connect(selectfiles, 'FA', mni, 'input_image')
    wf.connect(selectfiles, 'metric', mni, 'input_image')
    mni.inputs.reference_image = fsl.Info.standard_image(
        'FMRIB58_FA_1mm.nii.gz')
    wf.connect(collect_transforms, 'out', mni, 'transforms')

    wf.connect(mni, 'output_image', ds, 'dti_mni')

    #####################################
    # RUN WF
    #####################################
    # wf.write_graph(dotfilename=wf.name, graph2use='colored', format='pdf')  # 'hierarchical')
    # wf.write_graph(dotfilename=wf.name, graph2use='orig', format='pdf')
    # wf.write_graph(dotfilename=wf.name, graph2use='flat', format='pdf')

    if plugin_name == 'CondorDAGMan':
        wf.run(plugin=plugin_name,
               plugin_args={'initial_specs': 'request_memory = 1500'})
    if plugin_name == 'MultiProc':
        wf.run(plugin=plugin_name, plugin_args={'n_procs': use_n_procs})  #
예제 #20
0
import os
import nipype.interfaces.io as nio
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as niu

infosource = pe.Node(interface=niu.IdentityInterface(fields=['subject_id']),
                     name="infosource")
infosource.iterables = ('subject_id', [str(i) for i in range(3)])

datasink = pe.Node(name="datasink",
                   interface=nio.DataSink(base_directory=os.curdir))

wf = pe.Workflow(name='demo_wf')
wf.connect(infosource, 'subject_id', datasink, 'container')

test_workflow = wf
예제 #21
0
hcp_thal_wf.connect(lh_thal_merge, 'merged_file', lh_thal_mean, 'in_file')

# create a node to calculate the mean image for RH thal mask
rh_thal_mean = pe.Node(fsl.MeanImage(), name='rh_thal_mean')
rh_thal_mean.inputs.dimension = 'T'
rh_thal_mean.inputs.output_type = 'NIFTI_GZ'
hcp_thal_wf.connect(rh_thal_merge, 'merged_file', rh_thal_mean, 'in_file')

# create a node to calculate the mean image for COMBINED thal mask
bi_thal_mean = pe.Node(fsl.MeanImage(), name='bi_thal_mean')
bi_thal_mean.inputs.dimension = 'T'
bi_thal_mean.inputs.output_type = 'NIFTI_GZ'
hcp_thal_wf.connect(bi_thal_merge, 'merged_file', bi_thal_mean, 'in_file') 

# create a datasink node to save everything
datasink = pe.Node(nio.DataSink(), name='datasink')
datasink.inputs.base_directory = os.path.abspath(sink_directory)
datasink.inputs.substitutions = [('_subject_id_', '')]
hcp_thal_wf.connect(subj_iterable, 'subject_id', datasink, 'container')
hcp_thal_wf.connect(datasource, 'mni_brain', datasink, 'anat.@mni_brain')
#hcp_thal_wf.connect(datasource, 'fs_brain', datasink, 'anat.@fs_brain')
hcp_thal_wf.connect(datasource, 'dmri_brain', datasink, 'anat.@dmri_brain')
hcp_thal_wf.connect(midline_thal_bin, 'binary_file', datasink, 'dmri_space.@limbic_thal')
hcp_thal_wf.connect(bi_limbic_thal_mask_combine, 'out_file', datasink, 'dmri_space.@bilimbic_thal')
#hcp_thal_wf.connect(limbic_thal_bin_dmri2acpc_flirt, 'out_file', datasink, 'fs_space.@limbic_thal')
#hcp_thal_wf.connect(bi_limbic_thal_mask_dmri2acpc_flirt, 'out_file', datasink, 'fs_space.@bilimbic_thal')
hcp_thal_wf.connect(limbic_thal_bin_acpc2mni_warp, 'out_file', datasink, 'mni_space.@limbic_thal')
hcp_thal_wf.connect(bi_limbic_thal_mask_acpc2mni_warp, 'out_file', datasink, 'mni_space.@bilimbic_thal')
hcp_thal_wf.connect(lh_thal_mean, 'out_file', datasink, 'avgmasks.@lhmask')
hcp_thal_wf.connect(rh_thal_mean, 'out_file', datasink, 'avgmask.@rhmask')
hcp_thal_wf.connect(bi_thal_mean, 'out_file', datasink, 'avgmask.@bimask')
예제 #22
0
def matrixQC(qcname, tag="", SinkDir=".", QCDIR="QC"):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.connectivity as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['matrix_file', 'modules', 'atlas', 'output_file']),
                        name='inputspec')
    inputspec.inputs.modules = None
    #inputspec.inputs.atlas = False
    inputspec.inputs.output_file = "qc_matrix.png"

    plt = pe.MapNode(interface=Function(
        input_names=['matrix_file', 'modules', 'atlas', 'output_file'],
        output_names=['plotfile'],
        function=plot.plot_matrix),
                     iterfield=['matrix_file'],
                     name="qc_conn_matrix")

    plt_hist = pe.MapNode(interface=Function(
        input_names=['matrix_file', 'modules', 'atlas', 'output_file'],
        output_names=['plotfile'],
        function=plot.plot_conn_hist),
                          iterfield=['matrix_file'],
                          name="qc_conn_hist")

    plt_polar = pe.MapNode(interface=Function(
        input_names=['matrix_file', 'modules', 'atlas', 'output_file'],
        output_names=['plotfile'],
        function=plot.plot_conn_polar),
                           iterfield=['matrix_file'],
                           name="qc_conn_polar")

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Save outputs which are important
    ds_qc_hist = pe.Node(interface=io.DataSink(), name='ds_qc_hist')
    ds_qc_hist.inputs.base_directory = QCDir
    ds_qc_hist.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Save outputs which are important
    ds_qc_polar = pe.Node(interface=io.DataSink(), name='ds_qc_polar')
    ds_qc_polar.inputs.base_directory = QCDir
    ds_qc_polar.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'matrix_file', plt, 'matrix_file')
    analysisflow.connect(inputspec, 'output_file', plt, 'output_file')
    analysisflow.connect(inputspec, 'modules', plt, 'modules')
    analysisflow.connect(inputspec, 'atlas', plt, 'atlas')
    analysisflow.connect(plt, 'plotfile', ds_qc, qcname)

    analysisflow.connect(inputspec, 'matrix_file', plt_hist, 'matrix_file')
    analysisflow.connect(inputspec, 'output_file', plt_hist, 'output_file')
    analysisflow.connect(inputspec, 'modules', plt_hist, 'modules')
    analysisflow.connect(inputspec, 'atlas', plt_hist, 'atlas')
    analysisflow.connect(plt_hist, 'plotfile', ds_qc_hist, qcname + "_hist")

    analysisflow.connect(inputspec, 'matrix_file', plt_polar, 'matrix_file')
    analysisflow.connect(inputspec, 'output_file', plt_polar, 'output_file')
    analysisflow.connect(inputspec, 'modules', plt_polar, 'modules')
    analysisflow.connect(inputspec, 'atlas', plt_polar, 'atlas')
    analysisflow.connect(plt_polar, 'plotfile', ds_qc_polar, qcname + "_polar")

    return analysisflow
예제 #23
0
    (slicemoco, denoise, [('out_file', 'in_file')]),
    (struct2func, denoise, [(('output_image', selectindex, [0]), 'brain_mask'),
                            (('output_image', selectindex, [1]), 'wm_mask'),
                            (('output_image', selectindex, [2]), 'csf_mask')]),
    (motreg, denoise, [(('out_files', selectindex, [0]), 'motreg_file')]),
    (artefact, denoise, [('outlier_files', 'outlier_file')])
])


# make base directory for sinking
def makebase(subject, out_dir):
    return out_dir + subject


# sink relevant files
sink = Node(nio.DataSink(parameterization=False), name='sink')

preproc.connect([
    (session_infosource, sink, [('session', 'container')]),
    (subject_infosource, sink, [(('subject', makebase, out_dir),
                                 'base_directory')]),
    (remove_vol, sink, [('out_file', 'realignment.@raw_file')]),
    (slicemoco, sink, [('out_file', 'realignment.@realigned_file'),
                       ('par_file', 'confounds.@orig_motion')]),
    (tsnr, sink, [('tsnr_file', 'realignment.@tsnr')]),
    (median, sink, [('median_file', 'realignment.@median')]),
    (biasfield, sink, [('output_image', 'realignment.@biasfield')]),
    (coreg, sink,
     [('outputnode.uni_lowres', 'registration.@uni_lowres'),
      ('outputnode.epi2lowres', 'registration.@epi2lowres'),
      ('outputnode.epi2lowres_mat', 'registration.@epi2lowres_mat'),
예제 #24
0
def timecourse2png(qcname,
                   tag="",
                   type=TsPlotType.ALL,
                   SinkDir=".",
                   QCDIR="QC"):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'mask', 'x', 'y', 'z']),
        name='inputspec')

    if type == TsPlotType.VOX:
        voxroi = pe.MapNode(fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='voxroi')

        #TODO add voxel coordinates
        # TODO test
        def setInputs(x, y, z):
            return '-roi '\
                           + str(x) + ' 1 '\
                           + str(y) + ' 1 '\
                           + str(z) + ' 1 0 -1 -bin'

        voxroi_args = pe.Node(Function(input_names=['x', 'y', 'z'],
                                       output_names=['args'],
                                       function=setInputs),
                              name="voxroi_args")
    elif type == TsPlotType.ALL:
        voxroi = pe.MapNode(fsl.ImageMaths(op_string='-bin'),
                            iterfield=['in_file'],
                            name='voxroi')
    # elif type == TsPloType.ROI: nothing to do here in this case, just connect it

    meants = pe.MapNode(fsl.ImageMeants(),
                        iterfield=['in_file', 'mask'],
                        name='meants')

    plottimeser = pe.MapNode(fsl.PlotTimeSeries(),
                             iterfield=['in_file'],
                             name='plottimeser')

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    if type == TsPlotType.VOX:
        analysisflow.connect(inputspec, 'func', voxroi, 'in_file')
        analysisflow.connect(inputspec, 'x', voxroi_args, 'x')
        analysisflow.connect(inputspec, 'y', voxroi_args, 'y')
        analysisflow.connect(inputspec, 'z', voxroi_args, 'z')
        analysisflow.connect(voxroi_args, 'args', voxroi, 'args')
        analysisflow.connect(voxroi, 'out_file', meants, 'mask')
    elif type == TsPlotType.ALL:
        analysisflow.connect(inputspec, 'func', voxroi, 'in_file')
        analysisflow.connect(voxroi, 'out_file', meants, 'mask')
    elif type == TsPlotType.ROI:
        analysisflow.connect(inputspec, 'mask', meants, 'mask')

    analysisflow.connect(inputspec, 'func', meants, 'in_file')
    analysisflow.connect(meants, 'out_file', plottimeser, 'in_file')
    analysisflow.connect(plottimeser, 'out_file', ds_qc, qcname)

    return analysisflow
def pbX_wf(subject_id, sink_directory, name='hcp_pbX'):

    hcp_pbX_wf = pe.Workflow(name='hcp_pbX_wf')

    #making all the keys for the dictionary
    info = dict(merged_thsamples=[['subject_id', 'merged_th']],
                merged_phsamples=[['subject_id', 'merged_ph']],
                merged_fsamples=[['subject_id', 'merged_f']],
                dmri_brain=[['subject_id', 'T1w_acpc_dc_restore_1.25']],
                fs_brain=[['subject_id', 'T1w_acpc_dc']],
                aparcaseg=[['subject_id', 'aparc+aseg']],
                mask=[['subject_id', 'nodif_brain_mask']])

    # Create a datasource node to get the dwi, bvecs, and bvals
    #This uses the dictionary created above and inputs the keys from the dictionary
    datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
                                                   outfields=list(
                                                       info.keys())),
                         name='datasource')
    datasource.inputs.template = '%s/%s'
    datasource.inputs.subject_id = subject_id
    datasource.inputs.base_directory = os.path.abspath('/home/data/hcp')
    datasource.inputs.field_template = dict(
        merged_thsamples=
        '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/thsamples/%s*.nii.gz',
        merged_phsamples=
        '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/phsamples/%s*.nii.gz',
        merged_fsamples=
        '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/fsamples/%s*.nii.gz',
        dmri_brain='/home/data/hcp/%s/T1w/%s.nii.gz',
        fs_brain='/home/data/hcp/%s/T1w/%s.nii.gz',
        aparcaseg='/home/data/hcp/%s/T1w/%s.nii.gz',
        mask='/home/data/hcp/%s/T1w/Diffusion/%s.nii.gz')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True

    # Create a flirt node to calculate the dmri_brain to fs_brain xfm
    #Basically creating a conversion from DWI space to Freesurfer space
    dmri2fs_xfm = pe.Node(fsl.FLIRT(), name='dmri2fs_xfm')
    dmri2fs_xfm.inputs.out_matrix_file = 'dmri_2_fs_xfm.mat'
    hcp_pbX_wf.connect(datasource, 'dmri_brain', dmri2fs_xfm, 'in_file')
    hcp_pbX_wf.connect(datasource, 'fs_brain', dmri2fs_xfm, 'reference')

    # Create a convertxfm node to create inverse xfm of dmri2fs affine
    # Basicaaly creating a conversion from freesurfer space to DWI space
    invt_dmri2fs = pe.Node(fsl.ConvertXFM(), name='invt_dmri2fs')
    invt_dmri2fs.inputs.invert_xfm = True
    invt_dmri2fs.inputs.out_file = 'fs_2_dmri_xfm.mat'
    hcp_pbX_wf.connect(dmri2fs_xfm, 'out_matrix_file', invt_dmri2fs, 'in_file')

    # Extract thalamus seed masks from aparc+aseg.nii.gz file
    # Here 10 is the left thalamus, and 49 is the right thalamus
    thal_seed_mask = pe.MapNode(fs.Binarize(),
                                iterfield=['match', 'binary_file'],
                                name='thal_seed_mask')
    #thal_seed_mask.inputs.subject_dir = 'aparcaseg'
    thal_seed_mask.inputs.match = [[10], [49]]
    thal_seed_mask.inputs.binary_file = ['lft_thal.nii.gz', 'rt_thal.nii.gz']
    hcp_pbX_wf.connect(datasource, 'aparcaseg', thal_seed_mask, 'in_file')

    #Next we need to avoid the ventricles by creating an -avoid_mask
    #There are no left and right 3rd and 4th ventricle, so we are making one mask
    avoid_mask = pe.Node(
        fs.Binarize(),
        #out_type='nii.gz',
        name='avoid_mask')
    #avoid_mask.inputs.subject_dir = 'aparcaseg'
    avoid_mask.inputs.match = [
        4, 14, 15, 43, 72
    ]  #lft_lat_ven, 3rd_ven, 4th_ven, rgt_lat_ven, 5th_ven
    avoid_mask.inputs.binary_file = 'ventricles.nii.gz'
    hcp_pbX_wf.connect(datasource, 'aparcaseg', avoid_mask, 'in_file')

    # Extract cortical target masks from aparc+aseg.nii.gz file
    # The ".match" is the freesurfer label and the binary_file is the label/name
    ctx_targ_mask = pe.MapNode(fs.Binarize(),
                               iterfield=['match', 'binary_file'],
                               name='ctx_targ_mask')
    #ctx_targ_mask.inputs.subject_dir = 'aparcaseg'
    ctx_targ_mask.inputs.match = [[1024], [1022],
                                  [1003, 1028, 1027, 1012, 1019, 1020, 1032],
                                  [1031, 1029, 1008],
                                  [1009, 1015, 1033, 1035, 1034, 1030], [1011],
                                  [1017], [1002], [1014], [1026], [1028],
                                  [1023, 1025, 1010], [1005, 1013,
                                                       1021], [1007], [1006],
                                  [1016], [17], [18], [26], [2024], [2022],
                                  [2003, 2028, 2027, 2012, 2019, 2020, 2032],
                                  [2031, 2029, 2008],
                                  [2009, 2015, 2033, 2035, 2034, 2030], [2011],
                                  [2017], [2002], [2014], [2026], [2028],
                                  [2023, 2025, 2010], [2005, 2013, 2021],
                                  [2007], [2006], [2016], [53], [54], [58]]
    ctx_targ_mask.inputs.binary_file = [
        'ctx_lh_precentral.nii.gz', 'ctx_lh_postcentral.nii.gz',
        'ctx_lh_latfront.nii.gz', 'ctx_lh_parietal.nii.gz',
        'ctx_lh_temporal.nii.gz', 'ctx_lh_occipital.nii.gz',
        'ctx_lh_paracentral.nii.gz', 'ctx_lh_caudantcing.nii.gz',
        'ctx_lh_medorbfront.nii.gz', 'ctx_lh_rostantcing.nii.gz',
        'ctx_lh_superfront.nii.gz', 'ctx_lh_medpost.nii.gz',
        'ctx_lh_medoccipital.nii.gz', 'ctx_lh_fusiform.nii.gz',
        'ctx_lh_entorhinal.nii.gz', 'ctx_lh_parahippocampal.nii.gz',
        'lh_hpc.nii.gz', 'lh_amy.nii.gz', 'lh_nacc.nii.gz',
        'ctx_rh_precentral.nii.gz', 'ctx_rh_postcentral.nii.gz',
        'ctx_rh_latfront.nii.gz', 'ctx_rh_parietal.nii.gz',
        'ctx_rh_temporal.nii.gz', 'ctx_rh_occipital.nii.gz',
        'ctx_rh_paracentral.nii.gz', 'ctx_rh_caudantcing.nii.gz',
        'ctx_rh_medorbfront.nii.gz', 'ctx_rh_rostantcing.nii.gz',
        'ctx_rh_superfront.nii.gz', 'ctx_rh_medpost.nii.gz',
        'ctx_rh_medoccipital.nii.gz', 'ctx_rh_fusiform.nii.gz',
        'ctx_rh_entorhinal.nii.gz', 'ctx_rh_parahippocampal.nii.gz',
        'rh_hpc.nii.gz', 'rh_amy.nii.gz', 'rh_nacc.nii.gz'
    ]
    hcp_pbX_wf.connect(datasource, 'aparcaseg', ctx_targ_mask, 'in_file')

    # Create a flirt node to apply inverse transform to seeds
    # Basically you convert the masks (seeds) that were in freesurfer space to the DWI space
    seedxfm_fs2dmri = pe.MapNode(fsl.FLIRT(),
                                 iterfield=['in_file'],
                                 name='seedxfm_fs2dmri')
    seedxfm_fs2dmri.inputs.apply_xfm = True
    seedxfm_fs2dmri.inputs.interp = 'nearestneighbour'
    hcp_pbX_wf.connect(thal_seed_mask, 'binary_file', seedxfm_fs2dmri,
                       'in_file')
    hcp_pbX_wf.connect(datasource, 'dmri_brain', seedxfm_fs2dmri, 'reference')
    hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', seedxfm_fs2dmri,
                       'in_matrix_file')

    # Create a flirt node to apply inverse transform to targets
    # You do the same as the previous node, but to the target masks
    targxfm_fs2dmri = pe.MapNode(fsl.FLIRT(),
                                 iterfield=['in_file'],
                                 name='targxfm_fs2dmri')
    targxfm_fs2dmri.inputs.apply_xfm = True
    targxfm_fs2dmri.inputs.interp = 'nearestneighbour'
    hcp_pbX_wf.connect(ctx_targ_mask, 'binary_file', targxfm_fs2dmri,
                       'in_file')
    hcp_pbX_wf.connect(datasource, 'dmri_brain', targxfm_fs2dmri, 'reference')
    hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', targxfm_fs2dmri,
                       'in_matrix_file')

    #Apply the inverse transform for the avoid masks from freesurfer to DWI space
    avoidmaskxfm_fs2dmri = pe.Node(fsl.FLIRT(), name='avoidmaskxfm_fs2dmri')
    avoidmaskxfm_fs2dmri.inputs.apply_xfm = True
    avoidmaskxfm_fs2dmri.inputs.interp = 'nearestneighbour'
    hcp_pbX_wf.connect(avoid_mask, 'binary_file', avoidmaskxfm_fs2dmri,
                       'in_file')
    hcp_pbX_wf.connect(datasource, 'dmri_brain', avoidmaskxfm_fs2dmri,
                       'reference')
    hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', avoidmaskxfm_fs2dmri,
                       'in_matrix_file')

    # Compute motion regressors (save file with 1st and 2nd derivatives)
    #make_targ_lists = pe.Node(util.Function(input_names=['in_files'],
    #                                        output_names='out_list',
    #                                        function=create_two_lists),
    #                          name='make_targ_lists')
    #hcp_pbX_wf.connect(targxfm_fs2dmri, 'out_file', make_targ_lists, 'in_files')

    #PROBTRACKX NODE
    pbx2 = pe.MapNode(
        fsl.ProbTrackX2(),
        iterfield=['seed',
                   'target_masks'],  #Should I have included avoid_mp here?
        name='pbx2')
    pbx2.inputs.c_thresh = 0.2
    pbx2.inputs.n_steps = 2000
    pbx2.inputs.step_length = 0.5
    pbx2.inputs.n_samples = 25000
    pbx2.inputs.opd = True
    pbx2.inputs.os2t = True
    pbx2.inputs.loop_check = True
    #pbx2.plugin_args = {'bsub_args': '-q PQ_madlab'} #old way new way below
    pbx2.plugin_args = {
        'sbatch_args':
        ('-p IB_40C_1.5T --qos pq_madlab --account iacc_madlab -N 1 -n 6')
    }
    hcp_pbX_wf.connect(datasource, 'merged_thsamples', pbx2, 'thsamples')
    hcp_pbX_wf.connect(datasource, 'merged_phsamples', pbx2, 'phsamples')
    hcp_pbX_wf.connect(datasource, 'merged_fsamples', pbx2, 'fsamples')
    hcp_pbX_wf.connect(seedxfm_fs2dmri, 'out_file', pbx2, 'seed')
    hcp_pbX_wf.connect(targxfm_fs2dmri, ('out_file', hemispherize), pbx2,
                       'target_masks')
    #hcp_pbX_wf.connect(make_targ_lists, 'out_list', pbx2, 'target_masks')
    hcp_pbX_wf.connect(avoidmaskxfm_fs2dmri, 'out_file', pbx2, 'avoid_mp')
    hcp_pbX_wf.connect(datasource, 'mask', pbx2, 'mask')

    # Create a findthebiggest node to do hard segmentation between
    # seeds and targets
    #basically this segments the seed region on the basis of outputs of probtrackX when classification targets are being used.
    findthebiggest = pe.MapNode(fsl.FindTheBiggest(),
                                iterfield=['in_files'],
                                name='findthebiggest')
    hcp_pbX_wf.connect(pbx2, 'targets', findthebiggest, 'in_files')

    # Create a datasink node to save outputs.
    datasink = pe.Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = os.path.abspath(sink_directory)
    datasink.inputs.container = subject_id + '/' + 'thal_seed'
    hcp_pbX_wf.connect(pbx2, 'log', datasink, 'hcpprobX.log')
    hcp_pbX_wf.connect(pbx2, 'fdt_paths', datasink, 'hcpprobX.fdt')
    hcp_pbX_wf.connect(pbx2, 'way_total', datasink, 'hcpprobX.waytotal')
    hcp_pbX_wf.connect(pbx2, 'targets', datasink, 'hcpprobX.targets')
    hcp_pbX_wf.connect(findthebiggest, 'out_file', datasink,
                       'hcpprobX.fbiggest.@biggestsegmentation')
    #hcp_pbX_wf.connect(thal_seed_mask, 'binary_file', datasink, 'hcpprobX.thal_mask')
    hcp_pbX_wf.connect(seedxfm_fs2dmri, 'out_file', datasink,
                       'hcpprobX.seed_masks')
    #from seed_xsfm(out_file) to datasink "seed_files"
    #do we need this - > emu_pbX_wf.connect(datasource, 'ref_b0', datasink, 'emuprobX.b0')
    #do we need this - > emu_pbX_wf.connect(thal_seed_mask, 'binary_file', datasink, 'emuprobX.thal_mask')

    return hcp_pbX_wf
예제 #26
0
    def build_output_node(self):
        """Build and connect an output node to the pipelines.
        """
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        import nipype.interfaces.io as nio
        from os.path import join
        from clinica.utils.io import fix_join
        import clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils as utils

        # Find container path from DWI filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['bids_dwi_filename'],
            output_names=['container'],
            function=utils.dwi_container_from_filename),
                                  name='container_path')

        rename_into_caps = npe.Node(nutil.Function(
            input_names=[
                'in_bids_dwi', 'fname_dwi', 'fname_bval', 'fname_bvec',
                'fname_brainmask'
            ],
            output_names=[
                'out_caps_dwi', 'out_caps_bval', 'out_caps_bvec',
                'out_caps_brainmask'
            ],
            function=utils.rename_into_caps),
                                    name='rename_into_caps')

        # Writing results into CAPS
        # =========================
        write_results = npe.Node(name='write_results',
                                 interface=nio.DataSink())
        write_results.inputs.base_directory = self.caps_directory
        write_results.inputs.parameterization = False

        self.connect([
            (self.input_node, container_path, [('dwi', 'bids_dwi_filename')
                                               ]),  # noqa
            (self.input_node, rename_into_caps, [('dwi', 'in_bids_dwi')
                                                 ]),  # noqa
            (
                self.output_node,
                rename_into_caps,
                [
                    ('preproc_dwi', 'fname_dwi'),  # noqa
                    ('preproc_bval', 'fname_bval'),  # noqa
                    ('preproc_bvec', 'fname_bvec'),  # noqa
                    ('b0_mask', 'fname_brainmask')
                ]),  # noqa
            (container_path, write_results, [(('container', fix_join, 'dwi'),
                                              'container')]),  # noqa
            (
                rename_into_caps,
                write_results,
                [
                    ('out_caps_dwi', 'preprocessing.@preproc_dwi'),  # noqa
                    ('out_caps_bval', 'preprocessing.@preproc_bval'),  # noqa
                    ('out_caps_bvec', 'preprocessing.@preproc_bvec'),  # noqa
                    ('out_caps_brainmask', 'preprocessing.@b0_mask')
                ])  # noqa
        ])
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.io as nio
        import nipype.interfaces.spm as spm
        from ..t1_volume_tissue_segmentation import t1_volume_tissue_segmentation_utils as seg_utils
        from clinica.utils.filemanip import unzip_nii, zip_nii
        from clinica.utils.nipype import fix_join

        # Get <subject_id> (e.g. sub-CLNC01_ses-M00) from input_node
        # and print begin message
        # =======================
        init_node = npe.Node(interface=nutil.Function(
            input_names=self.get_input_fields(),
            output_names=['subject_id'] + self.get_input_fields(),
            function=seg_utils.init_input_node),
                             name='0-InitNode')

        # Unzipping
        # =========
        unzip_node = npe.Node(nutil.Function(input_names=['in_file'],
                                             output_names=['out_file'],
                                             function=unzip_nii),
                              name='1-UnzipT1w')

        # Unified Segmentation
        # ====================
        new_segment = npe.Node(spm.NewSegment(), name='2-SpmSegmentation')
        new_segment.inputs.write_deformation_fields = [True, True]
        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(
            self.parameters['tissue_probability_maps'],
            self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # =====================================================
        t1_to_mni = npe.Node(seg_utils.ApplySegmentationDeformation(),
                             name='3-T1wToMni')

        # Print end message
        # =================
        print_end_message = npe.Node(interface=nutil.Function(
            input_names=['subject_id', 'final_file'],
            function=seg_utils.print_end_pipeline),
                                     name='WriteEndMessage')

        # Connection
        # ==========
        self.connect([
            (self.input_node, init_node, [('t1w', 't1w')]),
            (init_node, unzip_node, [('t1w', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (init_node, print_end_message, [('subject_id', 'subject_id')]),
            (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
            (new_segment, t1_to_mni, [('forward_deformation_field',
                                       'deformation_field')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')]),
            (t1_to_mni, self.output_node, [('out_files', 't1_mni')]),
            (self.output_node, print_end_message, [('t1_mni', 'final_file')]),
        ])

        # Find container path from t1w filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['t1w_filename'],
            output_names=['container'],
            function=seg_utils.t1w_container_from_filename),
                                  name='ContainerPath')

        # Writing CAPS
        # ============
        write_node = npe.Node(name='WriteCAPS', interface=nio.DataSink())
        write_node.inputs.base_directory = self.caps_directory
        write_node.inputs.parameterization = False
        write_node.inputs.regexp_substitutions = [
            (r'(.*)c1(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-graymatter\3'),
            (r'(.*)c2(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-whitematter\3'),
            (r'(.*)c3(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-csf\3'),
            (r'(.*)c4(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-bone\3'),
            (r'(.*)c5(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-softtissue\3'),
            (r'(.*)c6(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-background\3'),
            (r'(.*)(/native_space/sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_probability\3'),
            (r'(.*)(/([a-z]+)_deformation_field/)i?y_(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\4_target-Ixi549Space_transformation-\3_deformation\5'
             ),
            (r'(.*)(/t1_mni/)w(sub-.*)_T1w(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_T1w\4'),
            (r'(.*)(/modulated_normalized/)mw(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-on_probability\4'
             ),
            (r'(.*)(/normalized/)w(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-off_probability\4'
             ),
            (r'(.*/dartel_input/)r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_dartelinput\3'),
            # Will remove trait_added empty folder
            (r'trait_added', r'')
        ]

        self.connect([
            (self.input_node, container_path, [('t1w', 't1w_filename')]),
            (container_path, write_node, [(('container', fix_join, ''),
                                           'container')]),
            (self.output_node, write_node,
             [(('native_class_images', seg_utils.zip_list_files, True),
               'native_space'),
              (('dartel_input_images', seg_utils.zip_list_files, True),
               'dartel_input')]),
            (self.output_node, write_node, [(('inverse_deformation_field',
                                              zip_nii, True),
                                             'inverse_deformation_field')]),
            (self.output_node, write_node, [(('forward_deformation_field',
                                              zip_nii, True),
                                             'forward_deformation_field')]),
            (self.output_node, write_node, [(('t1_mni', zip_nii, True),
                                             't1_mni')]),
        ])
        if self.parameters['save_warped_unmodulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('normalized_class_images', seg_utils.zip_list_files, True),
                   'normalized')]),
            ])
        if self.parameters['save_warped_modulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('modulated_class_images', seg_utils.zip_list_files, True),
                   'modulated_normalized')]),
            ])
예제 #28
0
def generate_single_session_template_WF(projectid,
                                        subjectid,
                                        sessionid,
                                        onlyT1,
                                        master_config,
                                        phase,
                                        interpMode,
                                        pipeline_name,
                                        doDenoise=True):
    """
    Run autoworkup on a single sessionid

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """

    #if  not 'landmark' in master_config['components'] or not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']:
    #    print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!"
    #    raise NotImplementedError
    # master_config['components'].append('auxlmk')
    # master_config['components'].append('tissue_classify')

    assert phase in [
        'atlas-based-reference', 'subject-based-reference'
    ], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'"

    if 'tissue_classify' in master_config['components']:
        assert ('landmark' in master_config['components']
                ), "tissue_classify Requires landmark step!"
    if 'landmark' in master_config['components']:
        assert 'denoise' in master_config[
            'components'], "landmark Requires denoise step!"

    from workflows.atlasNode import MakeAtlasNode

    baw201 = pe.Workflow(name=pipeline_name)

    inputsSpec = pe.Node(interface=IdentityInterface(fields=[
        'atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel',
        'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s',
        'PDs', 'FLs', 'OTHERs', 'hncma_atlas', 'template_rightHemisphere',
        'template_leftHemisphere', 'template_WMPM2_labels',
        'template_nac_labels', 'template_ventricles'
    ]),
                         run_without_submitting=True,
                         name='inputspec')

    outputsSpec = pe.Node(
        interface=IdentityInterface(fields=[
            't1_average',
            't2_average',
            'pd_average',
            'fl_average',
            'posteriorImages',
            'outputLabels',
            'outputHeadLabels',
            'atlasToSubjectTransform',
            'atlasToSubjectInverseTransform',
            'atlasToSubjectRegistrationState',
            'BCD_ACPC_T1_CROPPED',
            'outputLandmarksInACPCAlignedSpace',
            'outputLandmarksInInputSpace',
            'output_tx',
            'LMIatlasToSubject_tx',
            'writeBranded2DImage',
            'brainStemMask',
            'UpdatedPosteriorsList'  # Longitudinal
        ]),
        run_without_submitting=True,
        name='outputspec')

    dsName = "{0}_ds_{1}".format(phase, sessionid)
    DataSink = pe.Node(name=dsName, interface=nio.DataSink())
    DataSink.overwrite = master_config['ds_overwrite']
    DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid,
                                                     sessionid)
    DataSink.inputs.base_directory = master_config['resultdir']

    atlas_static_directory = master_config['atlascache']
    if master_config['workflow_phase'] == 'atlas-based-reference':
        atlas_warped_directory = master_config['atlascache']
        atlasABCNode_XML = MakeAtlasNode(atlas_warped_directory,
                                         'BABCXMLAtlas_{0}'.format(sessionid),
                                         ['W_BRAINSABCSupport'])
        baw201.connect(atlasABCNode_XML, 'ExtendedAtlasDefinition_xml',
                       inputsSpec, 'atlasDefinition')

        atlasABCNode_W = MakeAtlasNode(
            atlas_warped_directory, 'BABCAtlas_W{0}'.format(sessionid),
            ['W_BRAINSABCSupport', 'W_LabelMapsSupport'])
        baw201.connect([(atlasABCNode_W, inputsSpec, [
            ('hncma_atlas', 'hncma_atlas'),
            ('template_leftHemisphere', 'template_leftHemisphere'),
            ('template_rightHemisphere', 'template_rightHemisphere'),
            ('template_WMPM2_labels', 'template_WMPM2_labels'),
            ('template_nac_labels', 'template_nac_labels'),
            ('template_ventricles', 'template_ventricles')
        ])])
        ## These landmarks are only relevant for the atlas-based-reference case
        atlasBCDNode_W = MakeAtlasNode(atlas_warped_directory,
                                       'BBCDAtlas_W{0}'.format(sessionid),
                                       ['W_BCDSupport'])
        baw201.connect([
            (atlasBCDNode_W, inputsSpec, [
                ('template_t1', 'template_t1'),
                ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'),
            ]),
        ])
        ## Needed for both segmentation and template building prep
        atlasBCUTNode_W = MakeAtlasNode(atlas_warped_directory,
                                        'BBCUTAtlas_W{0}'.format(sessionid),
                                        ['W_BRAINSCutSupport'])

    elif master_config['workflow_phase'] == 'subject-based-reference':
        print master_config['previousresult']
        atlas_warped_directory = os.path.join(master_config['previousresult'],
                                              subjectid, 'Atlas')

        template_DG = pe.Node(interface=nio.DataGrabber(
            infields=['subject'],
            outfields=[
                'outAtlasXMLFullPath', 'hncma_atlas',
                'template_leftHemisphere', 'template_rightHemisphere',
                'template_WMPM2_labels', 'template_nac_labels',
                'template_ventricles', 'template_t1',
                'template_landmarks_50Lmks_fcsv'
            ]),
                              name='Template_DG')
        template_DG.inputs.base_directory = master_config['previousresult']
        template_DG.inputs.subject = subjectid
        template_DG.inputs.field_template = {
            'outAtlasXMLFullPath': '%s/Atlas/AtlasDefinition_%s.xml',
            'hncma_atlas': '%s/Atlas/AVG_hncma_atlas.nii.gz',
            'template_leftHemisphere':
            '%s/Atlas/AVG_template_leftHemisphere.nii.gz',
            'template_rightHemisphere':
            '%s/Atlas/AVG_template_rightHemisphere.nii.gz',
            'template_WMPM2_labels':
            '%s/Atlas/AVG_template_WMPM2_labels.nii.gz',
            'template_nac_labels': '%s/Atlas/AVG_template_nac_labels.nii.gz',
            'template_ventricles': '%s/Atlas/AVG_template_ventricles.nii.gz',
            'template_t1': '%s/Atlas/AVG_T1.nii.gz',
            'template_landmarks_50Lmks_fcsv': '%s/Atlas/AVG_LMKS.fcsv',
        }
        template_DG.inputs.template_args = {
            'outAtlasXMLFullPath': [['subject', 'subject']],
            'hncma_atlas': [['subject']],
            'template_leftHemisphere': [['subject']],
            'template_rightHemisphere': [['subject']],
            'template_WMPM2_labels': [['subject']],
            'template_nac_labels': [['subject']],
            'template_ventricles': [['subject']],
            'template_t1': [['subject']],
            'template_landmarks_50Lmks_fcsv': [['subject']]
        }
        template_DG.inputs.template = '*'
        template_DG.inputs.sort_filelist = True
        template_DG.inputs.raise_on_empty = True

        baw201.connect(template_DG, 'outAtlasXMLFullPath', inputsSpec,
                       'atlasDefinition')
        baw201.connect([(
            template_DG,
            inputsSpec,
            [
                ## Already connected ('template_t1','template_t1'),
                ('hncma_atlas', 'hncma_atlas'),
                ('template_leftHemisphere', 'template_leftHemisphere'),
                ('template_rightHemisphere', 'template_rightHemisphere'),
                ('template_WMPM2_labels', 'template_WMPM2_labels'),
                ('template_nac_labels', 'template_nac_labels'),
                ('template_ventricles', 'template_ventricles')
            ])])
        ## These landmarks are only relevant for the atlas-based-reference case
        baw201.connect([
            (template_DG, inputsSpec, [
                ('template_t1', 'template_t1'),
                ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'),
            ]),
        ])

    else:
        assert 0 == 1, "Invalid workflow type specified for singleSession"

    atlasBCDNode_S = MakeAtlasNode(atlas_static_directory,
                                   'BBCDAtlas_S{0}'.format(sessionid),
                                   ['S_BCDSupport'])
    baw201.connect([
        (atlasBCDNode_S, inputsSpec,
         [('template_weights_50Lmks_wts', 'atlasWeightFilename'),
          ('LLSModel_50Lmks_h5', 'LLSModel'),
          ('T1_50Lmks_mdl', 'inputTemplateModel')]),
    ])

    if doDenoise:
        print("\ndenoise image filter\n")
        makeDenoiseInImageList = pe.Node(Function(
            function=MakeOutFileList,
            input_names=[
                'T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix',
                'PrimaryT1'
            ],
            output_names=['inImageList', 'outImageList', 'imageTypeList']),
                                         run_without_submitting=True,
                                         name="99_makeDenoiseInImageList")
        baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List')
        baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List')
        baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList')
        makeDenoiseInImageList.inputs.FLList = []  # an emptyList HACK
        makeDenoiseInImageList.inputs.PrimaryT1 = None  # an emptyList HACK
        makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz"
        # HACK baw201.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' )
        baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList,
                       'OtherList')

        print("\nDenoise:\n")
        DenoiseInputImgs = pe.MapNode(
            interface=UnbiasedNonLocalMeans(),
            name='denoiseInputImgs',
            iterfield=['inputVolume', 'outputVolume'])
        DenoiseInputImgs.inputs.rc = [1, 1, 1]
        DenoiseInputImgs.inputs.rs = [4, 4, 4]
        DenoiseInputImgs.plugin_args = {
            'qsub_args': modify_qsub_args(master_config['queue'], .2, 1, 1),
            'overwrite': True
        }
        baw201.connect([(makeDenoiseInImageList, DenoiseInputImgs,
                         [('inImageList', 'inputVolume')]),
                        (makeDenoiseInImageList, DenoiseInputImgs,
                         [('outImageList', 'outputVolume')])])
        print("\nMerge all T1 and T2 List\n")
        makePreprocessingOutList = pe.Node(Function(
            function=GenerateSeparateImageTypeList,
            input_names=['inFileList', 'inTypeList'],
            output_names=['T1s', 'T2s', 'PDs', 'FLs', 'OtherList']),
                                           run_without_submitting=True,
                                           name="99_makePreprocessingOutList")
        baw201.connect(DenoiseInputImgs, 'outputVolume',
                       makePreprocessingOutList, 'inFileList')
        baw201.connect(makeDenoiseInImageList, 'imageTypeList',
                       makePreprocessingOutList, 'inTypeList')

    else:
        makePreprocessingOutList = inputsSpec

    if 'landmark' in master_config['components']:
        DoReverseMapping = False  # Set to true for debugging outputs
        if 'auxlmk' in master_config['components']:
            DoReverseMapping = True
        myLocalLMIWF = CreateLandmarkInitializeWorkflow(
            "LandmarkInitialize", interpMode, DoReverseMapping)

        baw201.connect([
            (makePreprocessingOutList, myLocalLMIWF,
             [(('T1s', get_list_element, 0), 'inputspec.inputVolume')]),
            (inputsSpec, myLocalLMIWF,
             [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'),
              ('atlasWeightFilename', 'inputspec.atlasWeightFilename'),
              ('LLSModel', 'inputspec.LLSModel'),
              ('inputTemplateModel', 'inputspec.inputTemplateModel'),
              ('template_t1', 'inputspec.atlasVolume')]),
            (myLocalLMIWF, outputsSpec,
             [('outputspec.outputResampledCroppedVolume',
               'BCD_ACPC_T1_CROPPED'),
              ('outputspec.outputLandmarksInACPCAlignedSpace',
               'outputLandmarksInACPCAlignedSpace'),
              ('outputspec.outputLandmarksInInputSpace',
               'outputLandmarksInInputSpace'),
              ('outputspec.outputTransform', 'output_tx'),
              ('outputspec.atlasToSubjectTransform', 'LMIatlasToSubject_tx'),
              ('outputspec.writeBranded2DImage', 'writeBranded2DImage')])
        ])
        baw201.connect([(
            outputsSpec,
            DataSink,  # TODO: change to myLocalLMIWF -> DataSink
            [
                ('outputLandmarksInACPCAlignedSpace',
                 'ACPCAlign.@outputLandmarks_ACPC'),
                ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'),
                ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'),
                ('outputLandmarksInInputSpace',
                 'ACPCAlign.@outputLandmarks_Input'),
                ('output_tx', 'ACPCAlign.@output_tx'),
                ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'),
            ])])

    if 'tissue_classify' in master_config['components']:
        useRegistrationMask = master_config['use_registration_masking']

        myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify",
                                                   master_config, interpMode,
                                                   useRegistrationMask)
        baw201.connect([
            (makePreprocessingOutList, myLocalTCWF, [('T1s',
                                                      'inputspec.T1List')]),
            (makePreprocessingOutList, myLocalTCWF, [('T2s',
                                                      'inputspec.T2List')]),
            (inputsSpec, myLocalTCWF,
             [('atlasDefinition', 'inputspec.atlasDefinition'),
              ('template_t1', 'inputspec.atlasVolume'),
              (('T1s', getAllT1sLength), 'inputspec.T1_count'),
              ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'),
              ('OTHERs', 'inputspec.OtherList')]),
            (myLocalLMIWF, myLocalTCWF,
             [('outputspec.outputResampledCroppedVolume',
               'inputspec.PrimaryT1'),
              ('outputspec.atlasToSubjectTransform',
               'inputspec.atlasToSubjectInitialTransform')]),
            (myLocalTCWF, outputsSpec,
             [('outputspec.t1_average', 't1_average'),
              ('outputspec.t2_average', 't2_average'),
              ('outputspec.pd_average', 'pd_average'),
              ('outputspec.fl_average', 'fl_average'),
              ('outputspec.posteriorImages', 'posteriorImages'),
              ('outputspec.outputLabels', 'outputLabels'),
              ('outputspec.outputHeadLabels', 'outputHeadLabels'),
              ('outputspec.atlasToSubjectTransform',
               'atlasToSubjectTransform'),
              ('outputspec.atlasToSubjectInverseTransform',
               'atlasToSubjectInverseTransform'),
              ('outputspec.atlasToSubjectRegistrationState',
               'atlasToSubjectRegistrationState')]),
        ])

        baw201.connect([(
            outputsSpec,
            DataSink,  # TODO: change to myLocalTCWF -> DataSink
            [(('t1_average', convertToList), 'TissueClassify.@t1'),
             (('t2_average', convertToList), 'TissueClassify.@t2'),
             (('pd_average', convertToList), 'TissueClassify.@pd'),
             (('fl_average', convertToList), 'TissueClassify.@fl')])])

        currentFixWMPartitioningName = "_".join(
            ['FixWMPartitioning',
             str(subjectid),
             str(sessionid)])
        FixWMNode = pe.Node(interface=Function(
            function=FixWMPartitioning,
            input_names=['brainMask', 'PosteriorsList'],
            output_names=[
                'UpdatedPosteriorsList', 'MatchingFGCodeList',
                'MatchingLabelList', 'nonAirRegionMask'
            ]),
                            name=currentFixWMPartitioningName)

        baw201.connect([
            (myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'),
                                      (('outputspec.posteriorImages',
                                        flattenDict), 'PosteriorsList')]),
            (FixWMNode, outputsSpec, [('UpdatedPosteriorsList',
                                       'UpdatedPosteriorsList')]),
        ])

        currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(
            subjectid) + "_" + str(sessionid)
        BRAINSCreateLabelMapNode = pe.Node(
            interface=BRAINSCreateLabelMapFromProbabilityMaps(),
            name=currentBRAINSCreateLabelMapName)

        ## TODO:  Fix the file names
        BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz'
        BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz'

        baw201.connect([
            (FixWMNode, BRAINSCreateLabelMapNode,
             [('UpdatedPosteriorsList', 'inputProbabilityVolume'),
              ('MatchingFGCodeList', 'foregroundPriors'),
              ('MatchingLabelList', 'priorLabelCodes'),
              ('nonAirRegionMask', 'nonAirRegionMask')]),
            (
                BRAINSCreateLabelMapNode,
                DataSink,
                [  # brainstem code below replaces this ('cleanLabelVolume', 'TissueClassify.@outputLabels'),
                    ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')
                ]),
            (myLocalTCWF, DataSink,
             [('outputspec.atlasToSubjectTransform',
               'TissueClassify.@atlas2session_tx'),
              ('outputspec.atlasToSubjectInverseTransform',
               'TissueClassify.@atlas2sessionInverse_tx')]),
            (FixWMNode, DataSink, [('UpdatedPosteriorsList',
                                    'TissueClassify.@posteriors')]),
        ])

        currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(
            subjectid) + "_" + str(sessionid)
        AccumulateLikeTissuePosteriorsNode = pe.Node(
            interface=Function(
                function=AccumulateLikeTissuePosteriors,
                input_names=['posteriorImages'],
                output_names=['AccumulatePriorsList',
                              'AccumulatePriorsNames']),
            name=currentAccumulateLikeTissuePosteriorsName)

        baw201.connect([
            (FixWMNode, AccumulateLikeTissuePosteriorsNode,
             [('UpdatedPosteriorsList', 'posteriorImages')]),
            (AccumulateLikeTissuePosteriorsNode, DataSink,
             [('AccumulatePriorsList',
               'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir'
               )])
        ])
        """
        brain stem adds on feature
        inputs:
            - landmark (fcsv) file
            - fixed brainlabels seg.nii.gz
        output:
            - complete_brainlabels_seg.nii.gz Segmentation
        """
        myLocalBrainStemWF = CreateBrainstemWorkflow(
            "BrainStem", master_config['queue'],
            "complete_brainlabels_seg.nii.gz")

        baw201.connect([(myLocalLMIWF, myLocalBrainStemWF,
                         [('outputspec.outputLandmarksInACPCAlignedSpace',
                           'inputspec.inputLandmarkFilename')]),
                        (BRAINSCreateLabelMapNode, myLocalBrainStemWF,
                         [('cleanLabelVolume',
                           'inputspec.inputTissueLabelFilename')])])

        baw201.connect(myLocalBrainStemWF,
                       'outputspec.ouputTissuelLabelFilename', DataSink,
                       'TissueClassify.@complete_brainlabels_seg')

    ###########################
    do_BRAINSCut_Segmentation = DetermineIfSegmentationShouldBeDone(
        master_config)
    if do_BRAINSCut_Segmentation:
        from workflows.segmentation import segmentation
        from workflows.WorkupT1T2BRAINSCut import GenerateWFName

        sname = 'segmentation'
        segWF = segmentation(projectid,
                             subjectid,
                             sessionid,
                             master_config,
                             onlyT1,
                             pipeline_name=sname)

        baw201.connect([(inputsSpec, segWF, [('template_t1',
                                              'inputspec.template_t1')])])
        atlasBCUTNode_W = pe.Node(interface=nio.DataGrabber(
            infields=['subject'],
            outfields=[
                "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap",
                "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap",
                "l_globus_ProbabilityMap", "r_globus_ProbabilityMap",
                "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap",
                "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap",
                "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap",
                "phi", "rho", "theta"
            ]),
                                  name='PerSubject_atlasBCUTNode_W')
        atlasBCUTNode_W.inputs.base_directory = master_config['previousresult']
        atlasBCUTNode_W.inputs.subject = subjectid
        atlasBCUTNode_W.inputs.field_template = {
            'l_accumben_ProbabilityMap':
            '%s/Atlas/AVG_l_accumben_ProbabilityMap.nii.gz',
            'r_accumben_ProbabilityMap':
            '%s/Atlas/AVG_r_accumben_ProbabilityMap.nii.gz',
            'l_caudate_ProbabilityMap':
            '%s/Atlas/AVG_l_caudate_ProbabilityMap.nii.gz',
            'r_caudate_ProbabilityMap':
            '%s/Atlas/AVG_r_caudate_ProbabilityMap.nii.gz',
            'l_globus_ProbabilityMap':
            '%s/Atlas/AVG_l_globus_ProbabilityMap.nii.gz',
            'r_globus_ProbabilityMap':
            '%s/Atlas/AVG_r_globus_ProbabilityMap.nii.gz',
            'l_hippocampus_ProbabilityMap':
            '%s/Atlas/AVG_l_hippocampus_ProbabilityMap.nii.gz',
            'r_hippocampus_ProbabilityMap':
            '%s/Atlas/AVG_r_hippocampus_ProbabilityMap.nii.gz',
            'l_putamen_ProbabilityMap':
            '%s/Atlas/AVG_l_putamen_ProbabilityMap.nii.gz',
            'r_putamen_ProbabilityMap':
            '%s/Atlas/AVG_r_putamen_ProbabilityMap.nii.gz',
            'l_thalamus_ProbabilityMap':
            '%s/Atlas/AVG_l_thalamus_ProbabilityMap.nii.gz',
            'r_thalamus_ProbabilityMap':
            '%s/Atlas/AVG_r_thalamus_ProbabilityMap.nii.gz',
            'phi': '%s/Atlas/AVG_phi.nii.gz',
            'rho': '%s/Atlas/AVG_rho.nii.gz',
            'theta': '%s/Atlas/AVG_theta.nii.gz'
        }
        atlasBCUTNode_W.inputs.template_args = {
            'l_accumben_ProbabilityMap': [['subject']],
            'r_accumben_ProbabilityMap': [['subject']],
            'l_caudate_ProbabilityMap': [['subject']],
            'r_caudate_ProbabilityMap': [['subject']],
            'l_globus_ProbabilityMap': [['subject']],
            'r_globus_ProbabilityMap': [['subject']],
            'l_hippocampus_ProbabilityMap': [['subject']],
            'r_hippocampus_ProbabilityMap': [['subject']],
            'l_putamen_ProbabilityMap': [['subject']],
            'r_putamen_ProbabilityMap': [['subject']],
            'l_thalamus_ProbabilityMap': [['subject']],
            'r_thalamus_ProbabilityMap': [['subject']],
            'phi': [['subject']],
            'rho': [['subject']],
            'theta': [['subject']]
        }
        atlasBCUTNode_W.inputs.template = '*'
        atlasBCUTNode_W.inputs.sort_filelist = True
        atlasBCUTNode_W.inputs.raise_on_empty = True

        baw201.connect([(atlasBCUTNode_W, segWF, [
            ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'),
            ('theta', 'inputspec.theta'),
            ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'),
            ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'),
            ('l_hippocampus_ProbabilityMap',
             'inputspec.l_hippocampus_ProbabilityMap'),
            ('r_hippocampus_ProbabilityMap',
             'inputspec.r_hippocampus_ProbabilityMap'),
            ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'),
            ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'),
            ('l_thalamus_ProbabilityMap',
             'inputspec.l_thalamus_ProbabilityMap'),
            ('r_thalamus_ProbabilityMap',
             'inputspec.r_thalamus_ProbabilityMap'),
            ('l_accumben_ProbabilityMap',
             'inputspec.l_accumben_ProbabilityMap'),
            ('r_accumben_ProbabilityMap',
             'inputspec.r_accumben_ProbabilityMap'),
            ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'),
            ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap')
        ])])

        atlasBCUTNode_S = MakeAtlasNode(atlas_static_directory,
                                        'BBCUTAtlas_S{0}'.format(sessionid),
                                        ['S_BRAINSCutSupport'])
        baw201.connect(atlasBCUTNode_S, 'trainModelFile_txtD0060NT0060_gz',
                       segWF, 'inputspec.trainModelFile_txtD0060NT0060_gz')

        ## baw201_outputspec = baw201.get_node('outputspec')
        baw201.connect([
            (myLocalTCWF, segWF,
             [('outputspec.t1_average', 'inputspec.t1_average'),
              ('outputspec.atlasToSubjectRegistrationState',
               'inputspec.atlasToSubjectRegistrationState'),
              ('outputspec.outputLabels', 'inputspec.inputLabels'),
              ('outputspec.posteriorImages', 'inputspec.posteriorImages'),
              ('outputspec.outputHeadLabels', 'inputspec.inputHeadLabels')]),
            (myLocalLMIWF, segWF, [('outputspec.atlasToSubjectTransform',
                                    'inputspec.LMIatlasToSubject_tx')]),
            (FixWMNode, segWF, [('UpdatedPosteriorsList',
                                 'inputspec.UpdatedPosteriorsList')]),
        ])
        if not onlyT1:
            baw201.connect([(myLocalTCWF, segWF, [('outputspec.t2_average',
                                                   'inputspec.t2_average')])])

    if 'warp_atlas_to_subject' in master_config['components']:
        ##
        ##~/src/NEP-build/bin/BRAINSResample
        # --warpTransform AtlasToSubjectPreBABC_Composite.h5
        #  --inputVolume  /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/Atlas/hncma-atlas.nii.gz
        #  --referenceVolume  /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/singleSession_KID1_KT1/LandmarkInitialize/BROIAuto_cropped/Cropped_BCD_ACPC_Aligned.nii.gz
        # !--outputVolume hncma.nii.gz
        # !--interpolationMode NearestNeighbor
        # !--pixelType short
        ##
        ##

        ## TODO : SHOULD USE BRAINSCut transform that was refined even further!

        BResample = dict()
        AtlasLabelMapsToResample = [
            'hncma_atlas',
            'template_WMPM2_labels',
            'template_nac_labels',
        ]

        for atlasImage in AtlasLabelMapsToResample:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'short'
            BResample[atlasImage].inputs.interpolationMode = 'NearestNeighbor'
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(inputsSpec, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        AtlasBinaryMapsToResample = [
            'template_rightHemisphere', 'template_leftHemisphere',
            'template_ventricles'
        ]

        for atlasImage in AtlasBinaryMapsToResample:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'binary'
            BResample[
                atlasImage].inputs.interpolationMode = 'Linear'  ## Conversion to distance map, so use linear to resample distance map
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(inputsSpec, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        BRAINSCutAtlasImages = [
            'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap',
            'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap',
            'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap',
            'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap',
            'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap',
            'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap',
            'r_globus_ProbabilityMap'
        ]
        for atlasImage in BRAINSCutAtlasImages:
            BResample[atlasImage] = pe.Node(interface=BRAINSResample(),
                                            name="BCUTBRAINSResample_" +
                                            atlasImage)
            BResample[atlasImage].plugin_args = {
                'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1),
                'overwrite': True
            }
            BResample[atlasImage].inputs.pixelType = 'float'
            BResample[
                atlasImage].inputs.interpolationMode = 'Linear'  ## Conversion to distance map, so use linear to resample distance map
            BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz"

            baw201.connect(myLocalTCWF, 'outputspec.t1_average',
                           BResample[atlasImage], 'referenceVolume')
            baw201.connect(atlasBCUTNode_W, atlasImage, BResample[atlasImage],
                           'inputVolume')
            baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform',
                           BResample[atlasImage], 'warpTransform')
            baw201.connect(BResample[atlasImage], 'outputVolume', DataSink,
                           'WarpedAtlas2Subject.@' + atlasImage)

        WhiteMatterHemisphereNode = pe.Node(interface=Function(
            function=CreateLeftRightWMHemispheres,
            input_names=[
                'BRAINLABELSFile', 'HDCMARegisteredVentricleMaskFN',
                'LeftHemisphereMaskName', 'RightHemisphereMaskName',
                'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'
            ],
            output_names=[
                'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'
            ]),
                                            name="WhiteMatterHemisphere")
        WhiteMatterHemisphereNode.inputs.WM_LeftHemisphereFileName = "left_hemisphere_wm.nii.gz"
        WhiteMatterHemisphereNode.inputs.WM_RightHemisphereFileName = "right_hemisphere_wm.nii.gz"

        baw201.connect(myLocalBrainStemWF,
                       'outputspec.ouputTissuelLabelFilename',
                       WhiteMatterHemisphereNode, 'BRAINLABELSFile')
        baw201.connect(BResample['hncma_atlas'], 'outputVolume',
                       WhiteMatterHemisphereNode,
                       'HDCMARegisteredVentricleMaskFN')
        baw201.connect(BResample['template_leftHemisphere'], 'outputVolume',
                       WhiteMatterHemisphereNode, 'LeftHemisphereMaskName')
        baw201.connect(BResample['template_rightHemisphere'], 'outputVolume',
                       WhiteMatterHemisphereNode, 'RightHemisphereMaskName')

        baw201.connect(WhiteMatterHemisphereNode, 'WM_LeftHemisphereFileName',
                       DataSink, 'WarpedAtlas2Subject.@LeftHemisphereWM')
        baw201.connect(WhiteMatterHemisphereNode, 'WM_RightHemisphereFileName',
                       DataSink, 'WarpedAtlas2Subject.@RightHemisphereWM')

    if 'malf_2012_neuro' in master_config[
            'components']:  ## HACK Do MALF labeling
        good_subjects = [
            '1001', '1004', '1005', '1011', '1012', '1018', '1019', '1102',
            '1103', '1104', '1120', '1129', '1009', '1010', '1013', '1014',
            '1036', '1109', '1117', '1122'
        ]

        ## HACK FOR NOW SHOULD BE MORE ELEGANT FROM THE .config file
        BASE_DATA_GRABBER_DIR = '/Shared/johnsonhj/HDNI/Neuromorphometrics/20141116_Neuromorphometrics_base_Results/Neuromorphometrics/2012Subscription'

        myLocalMALF = CreateMALFWorkflow("MALF", master_config, good_subjects,
                                         BASE_DATA_GRABBER_DIR)
        baw201.connect(myLocalTCWF, 'outputspec.t1_average', myLocalMALF,
                       'inputspec.subj_t1_image')
        baw201.connect(myLocalLMIWF,
                       'outputspec.outputLandmarksInACPCAlignedSpace',
                       myLocalMALF, 'inputspec.subj_lmks')
        baw201.connect(atlasBCDNode_S, 'template_weights_50Lmks_wts',
                       myLocalMALF, 'inputspec.atlasWeightFilename')
        baw201.connect(myLocalMALF, 'outputspec.MALF_neuro2012_labelmap',
                       DataSink, 'TissueClassify.@MALF_neuro2012_labelmap')

    return baw201
예제 #29
0
Use :class:`nipype.interfaces.io.DataSink` to store selected outputs
from the pipeline in a specific location. This allows the user to
selectively choose important output bits from the analysis and keep
them.

The first step is to create a datasink node and then to connect
outputs from the modules above to storage locations. These take the
following form directory_name[.[@]subdir] where parts between [] are
optional. For example 'realign.@mean' below creates a directory called
realign in 'l1output/subject_id/' and stores the mean image output
from the Realign process in the realign directory. If the @ is left
out, then a sub-directory with the name 'mean' would be created and
the mean image would be copied to that directory.
"""

datasink = pe.Node(interface=nio.DataSink(), name="datasink")
datasink.inputs.base_directory = os.path.abspath('spm_tutorial2/l1output')
report = pe.Node(interface=nio.DataSink(), name='report')
report.inputs.base_directory = os.path.abspath('spm_tutorial2/report')
report.inputs.parameterization = False


def getstripdir(subject_id):
    import os
    return os.path.join(os.path.abspath('spm_tutorial2/workingdir'),
                        '_subject_id_%s' % subject_id)


# store relevant outputs from various stages of the 1st level analysis
level1.connect([
    (infosource, datasink, [('subject_id', 'container'),
예제 #30
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.
        """

        import clinica.pipelines.machine_learning_spatial_svm.spatial_svm_utils as utils
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        import nipype.interfaces.io as nio

        fisher_tensor_generation = npe.Node(name="obtain_g_fisher_tensor",
                                            interface=nutil.Function(input_names=['dartel_input', 'FWHM'],
                                                                     output_names=['fisher_tensor', 'fisher_tensor_path'],
                                                                     function=utils.obtain_g_fisher_tensor))
        fisher_tensor_generation.inputs.FWHM = self.parameters['fwhm']

        time_step_generation = npe.Node(name='estimation_time_step',
                                        interface=nutil.Function(input_names=['dartel_input', 'FWHM', 'g'],
                                                                 output_names=['t_step', 'json_file'],
                                                                 function=utils.obtain_time_step_estimation))
        time_step_generation.inputs.FWHM = self.parameters['fwhm']

        heat_solver_equation = npe.MapNode(name='heat_solver_equation',
                                           interface=nutil.Function(input_names=['input_image', 'g',
                                                                                 'FWHM', 't_step', 'dartel_input'],
                                                                    output_names=['regularized_image'],
                                                                    function=utils.heat_solver_equation),
                                           iterfield=['input_image'])
        heat_solver_equation.inputs.FWHM = self.parameters['fwhm']

        datasink = npe.Node(nio.DataSink(),
                            name='sinker')
        datasink.inputs.base_directory = self.caps_directory
        datasink.inputs.parameterization = True
        if self.parameters['image_type'] == 't1':
            datasink.inputs.regexp_substitutions = [
                (r'(.*)/regularized_image/.*/(.*(sub-(.*)_ses-(.*))_T1w(.*)_probability(.*))$',
                 r'\1/subjects/sub-\4/ses-\5/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'/\3_T1w\6_spatialregularization\7'),

                (r'(.*)json_file/(output_data.json)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'_space-Ixi549Space_parameters.json'),

                (r'(.*)fisher_tensor_path/(output_fisher_tensor.npy)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'_space-Ixi549Space_gram.npy')
            ]

        elif self.parameters['image_type'] == 'pet':
            datasink.inputs.regexp_substitutions = [
                (r'(.*)/regularized_image/.*/(.*(sub-(.*)_ses-(.*))_(task.*)_pet(.*))$',
                 r'\1/subjects/sub-\4/ses-\5/machine_learning/input_spatial_svm/group-' + self.parameters[
                     'group_id'] + r'/\3_\6_spatialregularization\7'),
                (r'(.*)json_file/(output_data.json)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' +
                 self.parameters['group_id'] + r'_space-Ixi549Space_parameters.json'),
                (r'(.*)fisher_tensor_path/(output_fisher_tensor.npy)$',
                 r'\1/groups/group-' + self.parameters['group_id'] + r'/machine_learning/input_spatial_svm/group-' +
                 self.parameters[
                     'group_id'] + r'_space-Ixi549Space_gram.npy')
            ]
        # Connection
        # ==========
        self.connect([
            (self.input_node,      fisher_tensor_generation,    [('dartel_input',    'dartel_input')]),
            (fisher_tensor_generation,      time_step_generation,    [('fisher_tensor',    'g')]),

            (self.input_node, time_step_generation, [('dartel_input', 'dartel_input')]),
            (self.input_node, heat_solver_equation, [('input_image', 'input_image')]),
            (fisher_tensor_generation, heat_solver_equation, [('fisher_tensor', 'g')]),
            (time_step_generation, heat_solver_equation, [('t_step', 't_step')]),
            (self.input_node, heat_solver_equation, [('dartel_input', 'dartel_input')]),

            (fisher_tensor_generation, datasink, [('fisher_tensor_path', 'fisher_tensor_path')]),
            (time_step_generation, datasink, [('json_file', 'json_file')]),
            (heat_solver_equation, datasink, [('regularized_image', 'regularized_image')])
        ])