コード例 #1
0
 def __init__(self, in_file='path', format_string='', **options):
     from nipype.interfaces.utility import Rename
     rename = Rename()
     rename.inputs.in_file = in_file
     rename.inputs.format_string = format_string
     for ef in options:
         setattr(rename.inputs, ef, options[ef])
     self.res = rename.run()
コード例 #2
0
ファイル: pet_linear_utils.py プロジェクト: ghisvail/clinica
def rename_into_caps(
    in_bids_pet,
    fname_pet,
    fname_trans,
    suvr_reference_region,
    uncropped_image,
    fname_pet_in_t1w=None,
):
    """
    Rename the outputs of the pipelines into CAPS format.
    Args:
        in_bids_pet (str): Input BIDS PET to extract the <source_file>
        fname_pet (str): Preprocessed PET file.
        fname_trans (str): Transformation file from PET to MRI space
        suvr_reference_region (str): SUVR mask name for file name output
        uncropped_image (bool): Pipeline argument for image cropping
        fname_pet_in_t1w (bool): Pipeline argument for saving intermediate file
    Returns:
        The different outputs in CAPS format
    """
    import os

    from nipype.interfaces.utility import Rename
    from nipype.utils.filemanip import split_filename

    _, source_file_pet, _ = split_filename(in_bids_pet)

    # Rename into CAPS PET:
    rename_pet = Rename()
    rename_pet.inputs.in_file = fname_pet
    if not uncropped_image:
        suffix = f"_space-MNI152NLin2009cSym_desc-Crop_res-1x1x1_suvr-{suvr_reference_region}_pet.nii.gz"
        rename_pet.inputs.format_string = source_file_pet + suffix
    else:
        suffix = f"_space-MNI152NLin2009cSym_res-1x1x1_suvr-{suvr_reference_region}_pet.nii.gz"
        rename_pet.inputs.format_string = source_file_pet + suffix
    out_caps_pet = rename_pet.run().outputs.out_file

    # Rename into CAPS transformation file:
    rename_trans = Rename()
    rename_trans.inputs.in_file = fname_trans
    rename_trans.inputs.format_string = source_file_pet + "_space-T1w_rigid.mat"
    out_caps_trans = rename_trans.run().outputs.out_file

    # Rename intermediate PET in T1w MRI space
    if fname_pet_in_t1w is not None:
        rename_pet_in_t1w = Rename()
        rename_pet_in_t1w.inputs.in_file = fname_pet_in_t1w
        rename_pet_in_t1w.inputs.format_string = (source_file_pet +
                                                  "_space-T1w_pet.nii.gz")
        out_caps_pet_in_t1w = rename_pet_in_t1w.run().outputs.out_file
    else:
        out_caps_pet_in_t1w = None

    return out_caps_pet, out_caps_trans, out_caps_pet_in_t1w
コード例 #3
0
def test_Rename_outputs():
    output_map = dict(out_file=dict(), )
    outputs = Rename.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #4
0
ファイル: test_auto_Rename.py プロジェクト: adamatus/nipype
def test_Rename_outputs():
    output_map = dict(out_file=dict())
    outputs = Rename.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #5
0
ファイル: test_auto_Rename.py プロジェクト: adamatus/nipype
def test_Rename_inputs():
    input_map = dict(
        use_fullpath=dict(usedefault=True),
        format_string=dict(mandatory=True),
        keep_ext=dict(),
        in_file=dict(mandatory=True),
        parse_string=dict(),
    )
    inputs = Rename.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #6
0
def test_Rename_inputs():
    input_map = dict(
        format_string=dict(mandatory=True, ),
        in_file=dict(mandatory=True, ),
        keep_ext=dict(),
        parse_string=dict(),
        use_fullpath=dict(usedefault=True, ),
    )
    inputs = Rename.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #7
0
ファイル: test_node_neuro.py プロジェクト: TimVanMourik/pydra
def test_neuro(change_dir, plugin):

    # wf = Workflow(name, mem_gb_node=DEFAULT_MEMORY_MIN_GB,
    #               inputs=['source_file', 't1_preproc', 'subject_id',
    #                       'subjects_dir', 't1_2_fsnative_forward_transform',
    #                       'mem_gb', 'output_spaces', 'medial_surface_nan'],
    #               outputs='surfaces')
    #
    # dj: why do I need outputs?

    wf = Workflow(
        name=Name,
        inputs=Inputs,
        workingdir="test_neuro_{}".format(plugin),
        write_state=False,
        wf_output_names=[
            ("sampler", "out_file", "sampler_out"),
            ("targets", "out", "target_out"),
        ],
    )

    # @interface
    # def select_target(subject_id, space):
    #     """ Given a source subject ID and a target space, get the target subject ID """
    #     return subject_id if space == 'fsnative' else space

    # wf.add('targets', select_target(subject_id=wf.inputs.subject_id))
    #   .split('space', space=[space for space in wf.inputs.output_spaces
    #                        if space.startswith('fs')])

    # dj: don't have option in split to connect with wf input

    wf.add(
        runnable=select_target,
        name="targets",
        subject_id="subject_id",
        input_names=["subject_id", "space"],
        output_names=["out"],
        write_state=False,
    ).split_node(
        splitter="space",
        inputs={
            "space": [
                space for space in Inputs["output_spaces"]
                if space.startswith("fs")
            ]
        },
    )

    # wf.add('rename_src', Rename(format_string='%(subject)s',
    #                             keep_ext=True,
    #                             in_file=wf.inputs.source_file))
    #   .split('subject')

    wf.add(
        name="rename_src",
        runnable=Rename(format_string="%(subject)s", keep_ext=True),
        in_file="source_file",
        output_names=["out_file"],
        write_state=False,
    ).split_node(
        "subject",
        inputs={
            "subject": [
                space for space in Inputs["output_spaces"]
                if space.startswith("fs")
            ]
        },
    )  # TODO: now it's only one subject

    # wf.add('resampling_xfm',
    #        fs.utils.LTAConvert(in_lta='identity.nofile',
    #                            out_lta=True,
    #                            source_file=wf.inputs.source_file,
    #                            target_file=wf.inputs.t1_preproc)
    #   .add('set_xfm_source', ConcatenateLTA(out_type='RAS2RAS',
    #                                         in_lta2=wf.inputs.t1_2_fsnative_forward_transform,
    #                                         in_lta1=wf.resampling_xfm.out_lta))

    wf.add(
        name="resampling_xfm",
        runnable=fs.utils.LTAConvert(in_lta="identity.nofile", out_lta=True),
        source_file="source_file",
        target_file="t1_preproc",
        output_names=["out_lta"],
        write_state=False,
    ).add(
        name="set_xfm_source",
        runnable=ConcatenateLTA(out_type="RAS2RAS"),
        in_lta2="t1_2_fsnative_forward_transform",
        in_lta1="resampling_xfm.out_lta",
        output_names=["out_file"],
        write_state=False,
    )

    # wf.add('sampler',
    #        fs.SampleToSurface(sampling_method='average', sampling_range=(0, 1, 0.2),
    #                           sampling_units='frac', interp_method='trilinear',
    #                           cortex_mask=True, override_reg_subj=True,
    #                           out_type='gii',
    #                           subjects_dir=wf.inputs.subjects_dir,
    #                           subject_id=wf.inputs.subject_id,
    #                           reg_file=wf.set_xfm_source.out_file,
    #                           target_subject=wf.targets.out,
    #                           source_file=wf.rename_src.out_file),
    #         mem_gb=mem_gb * 3)
    #        .split([('source_file', 'target_subject'), 'hemi'], hemi=['lh', 'rh'])

    wf.add(
        name="sampler",
        runnable=fs.SampleToSurface(
            sampling_method="average",
            sampling_range=(0, 1, 0.2),
            sampling_units="frac",
            interp_method="trilinear",
            cortex_mask=True,
            override_reg_subj=True,
            out_type="gii",
        ),
        write_state=False,
        subjects_dir="subjects_dir",
        subject_id="subject_id",
        reg_file="set_xfm_source.out_file",
        target_subject="targets.out",
        source_file="rename_src.out_file",
        output_names=["out_file"],
    ).split_node(splitter=[("_targets", "_rename_src"), "hemi"],
                 inputs={"hemi": ["lh", "rh"]})

    # dj: adding combiner to the last node
    wf.combine_node(combiner="hemi")

    sub = Submitter(plugin=plugin, runnable=wf)
    sub.run()
    sub.close()

    assert "target_out" in wf.output.keys()
    assert len(list(wf.output["target_out"].keys())) == 2
    assert "targets.space" in list(wf.output["target_out"].keys())[0]

    assert "sampler_out" in wf.output.keys()
    # length is 2 because of the combiner
    assert len(list(wf.output["sampler_out"].keys())) == 2
    assert "rename_src.subject" in list(wf.output["sampler_out"].keys())[0]
    assert "targets.space" in list(wf.output["sampler_out"].keys())[0]
    # hemi is eliminated from the state inputs after combiner
    assert "sampler.hemi" not in list(wf.output["sampler_out"].keys())[0]
def rename_into_caps(in_bids_dwi, fname_dwi, fname_bval, fname_bvec,
                     fname_brainmask):
    """
    Rename the outputs of the pipelines into CAPS format namely:
    <source_file>_space-T1w_preproc[.nii.gz|bval|bvec]

    Args:
        in_bids_dwi (str): Input BIDS DWI to extract the <source_file>
        fname_dwi (str): Preprocessed DWI.
        fname_bval (str): Preprocessed DWI.
        fname_bvec (str): Preprocessed DWI.
        fname_brainmask (str): B0 mask.

    Returns:
        The different outputs in CAPS format
    """
    import os

    from nipype.interfaces.utility import Rename
    from nipype.utils.filemanip import split_filename

    # Extract <source_file> in format sub-CLNC01_ses-M00_[acq-label]_dwi
    _, source_file_dwi, _ = split_filename(in_bids_dwi)

    # Extract base path from fname:
    base_dir_dwi, _, _ = split_filename(fname_dwi)
    base_dir_bval, _, _ = split_filename(fname_bval)
    base_dir_bvec, _, _ = split_filename(fname_bvec)
    base_dir_brainmask, _, _ = split_filename(fname_brainmask)

    # Rename into CAPS DWI :
    rename_dwi = Rename()
    rename_dwi.inputs.in_file = fname_dwi
    rename_dwi.inputs.format_string = os.path.join(
        base_dir_dwi, source_file_dwi + "_space-b0_preproc.nii.gz")
    out_caps_dwi = rename_dwi.run()

    # Rename into CAPS bval :
    rename_bval = Rename()
    rename_bval.inputs.in_file = fname_bval
    rename_bval.inputs.format_string = os.path.join(
        base_dir_bval, source_file_dwi + "_space-b0_preproc.bval")
    out_caps_bval = rename_bval.run()

    # Rename into CAPS DWI :
    rename_bvec = Rename()
    rename_bvec.inputs.in_file = fname_bvec
    rename_bvec.inputs.format_string = os.path.join(
        base_dir_bvec, source_file_dwi + "_space-b0_preproc.bvec")
    out_caps_bvec = rename_bvec.run()

    # Rename into CAPS DWI :
    rename_brainmask = Rename()
    rename_brainmask.inputs.in_file = fname_brainmask
    rename_brainmask.inputs.format_string = os.path.join(
        base_dir_brainmask, source_file_dwi + "_space-b0_brainmask.nii.gz")
    out_caps_brainmask = rename_brainmask.run()

    return out_caps_dwi.outputs.out_file, out_caps_bval.outputs.out_file, \
        out_caps_bvec.outputs.out_file, out_caps_brainmask.outputs.out_file
コード例 #9
0
def create_stage(stage_nr, workflow, inputs, inputs_nr_slices, slice_name):
    """
    Don't use this directly, see build_workflow() instead.

    Create an interpolation stage. Mutates the 'workflow' argument.
    """

    # Selectors into 'inputs'
    select_inputs = {}
    for i in range(inputs_nr_slices):
        fi = Function(input_names=['x', 'i'],
                      output_names=['out_file'],
                      function=select_function)

        select_inputs[i] = pe.Node(interface=fi,
                                   name='select_inputs_%s_%d_%d' % (
                                       slice_name,
                                       stage_nr,
                                       i,
                                   ))
        select_inputs[i].inputs.i = i
        workflow.connect(inputs, 'out_files', select_inputs[i], 'x')

    # Interpolations.
    interp_nodes = []
    for i in range(inputs_nr_slices - 1):
        interp_node = pe.Node(interface=InterpolateBetweenSlices(),
                              name='interp_%s_%d_%08d' % (
                                  slice_name,
                                  stage_nr,
                                  i,
                              ))

        select_node = pe.Node(interface=Select(index=[i, i + 1]),
                              name='select_%s_%d_%d_%d' % (
                                  slice_name,
                                  stage_nr,
                                  i,
                                  i + 1,
                              ))
        workflow.connect(inputs, 'out_files', select_node, 'inlist')
        workflow.connect(select_node, 'out', interp_node, 'slices')
        interp_node.inputs.level = stage_nr

        interp_nodes.append(interp_node)

    # Rename slices.
    renamers = []
    k = 0
    rename = pe.Node(interface=Rename(),
                     name='rename_%s_%d_%08d' % (
                         slice_name,
                         stage_nr,
                         k,
                     ))
    rename.inputs.format_string = 'slice_%08d.npz' % k
    workflow.connect(select_inputs[0], 'out_file', rename, 'in_file')
    renamers.append(rename)
    k += 1

    for i in range(len(interp_nodes)):
        rename = pe.Node(interface=Rename(),
                         name='rename_%s_%d_%08d' % (
                             slice_name,
                             stage_nr,
                             k,
                         ))
        rename.inputs.format_string = 'slice_%08d.npz' % k
        workflow.connect(interp_nodes[i], 'interpolated_slice', rename,
                         'in_file')
        renamers.append(rename)
        k += 1

        rename = pe.Node(interface=Rename(),
                         name='rename_%s_%d_%08d' % (
                             slice_name,
                             stage_nr,
                             k,
                         ))
        rename.inputs.format_string = 'slice_%08d.npz' % k
        workflow.connect(select_inputs[i + 1], 'out_file', rename, 'in_file')
        renamers.append(rename)
        k += 1

    # Could skip this unless we want to see intermediate steps.
    datasink = pe.Node(nio.DataSink(),
                       name='sinker_%s_%d' % (
                           slice_name,
                           stage_nr,
                       ))
    for (i, rename) in enumerate(renamers):
        workflow.connect(rename, 'out_file', datasink, 'slices.@%d' % i)

    # If we want to do another stage, use the out_file's of renamers.
    # We need a single node with an output 'out_files' consisting of each of the files.
    merge_renamed_files = pe.Node(interface=Merge(len(renamers)),
                                  name='merge_renamed_files_%s_%d' % (
                                      slice_name,
                                      stage_nr,
                                  ))
    for (i, rename) in enumerate(renamers):
        workflow.connect(rename, 'out_file', merge_renamed_files,
                         'in%d' % (i + 1))

    # Now rename the output 'out' to 'out_files' so we can pass it in to a recursive
    # call to this function.
    out_to_out_files = pe.Node(interface=Function(input_names=['x'],
                                                  output_names=['out_files'],
                                                  function=identity_function),
                               name='out_to_out_files_%s_%d' % (
                                   slice_name,
                                   stage_nr,
                               ))
    workflow.connect(merge_renamed_files, 'out', out_to_out_files, 'x')

    return out_to_out_files
コード例 #10
0
    def make_workflow(self):
        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_str']),
                          name="infosource")
        infosource.iterables = ('subject_str', self.subject_list)

        # Infosource: Iterate through subject names
        #imgsrc = Node(interface=IdentityInterface(fields=['img']), name="imgsrc")
        #imgsrc.iterables = ('img', ['uni'])

        parse_scanner_dir = Node(
            interface=ParseScannerDir(raw_data_dir=self.raw_data_dir),
            name='parse_scanner_dir')

        ro = Node(interface=fsl.Reorient2Std(), name='ro')

        mv_uni = Node(interface=Rename(format_string='uni_'), name='mv_uni')
        mv_uniden = Node(interface=Rename(format_string='uniden'),
                         name='mv_uniden')
        mv_flair = Node(interface=Rename(format_string='flair'),
                        name='mv_flair')
        mv_bold = Node(interface=Rename(format_string='bold_'), name='mv_bold')
        mv_boldmag1 = Node(interface=Rename(format_string='boldmag1'),
                           name='mv_boldmag1')
        mv_boldmag2 = Node(interface=Rename(format_string='boldmag2'),
                           name='mv_boldmag2')
        mv_phasediff = Node(interface=Rename(format_string='boldphdiff'),
                            name='mv_phasediff')
        sink = Node(interface=DataSink(), name='sink')
        sink.inputs.base_directory = self.bids_root
        #sink.inputs.substitutions = [('mp2rage075iso', '{}'.format(str(sink.inputs._outputs.keys()))),
        #                              ('uni', 'uni.nii.gz')]#,
        #                              ('_uniden_DEN', ''),
        #                              ('DEN_mp2rage_orig_reoriented_masked_maths', 'mUNIbrain_DENskull_SPMmasked'),
        #                              ('_mp2rage_orig_reoriented_maths_maths_bin', '_brain_bin')]
        sink.inputs.regexp_substitutions = [
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/uni_',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/anat/sub-NeuroMET\g<subid>_ses-0\g<sesid>_T1w.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/uniden',
             r'/derivatives/Siemens/sub-NeuroMET\g<subid>/ses-0\g<sesid>/anat/sub-NeuroMET\g<subid>_ses-0\g<sesid>_desc-UNIDEN.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/flair',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/anat/sub-NeuroMET\g<subid>_ses-0\g<sesid>_FLAIR.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/bold_',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/func/sub-NeuroMET\g<subid>_ses-0\g<sesid>_task-rest_bold.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/boldmag1',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/fmap/sub-NeuroMET\g<subid>_ses-0\g<sesid>_magnitude1.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/boldmag2',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/fmap/sub-NeuroMET\g<subid>_ses-0\g<sesid>_magnitude2.nii.gz'
             ),
            (r'_subject_str_2(?P<subid>[0-9][0-9][0-9])T(?P<sesid>[0-9])/boldphdiff',
             r'sub-NeuroMET\g<subid>/ses-0\g<sesid>/fmap/sub-NeuroMET\g<subid>_ses-0\g<sesid>_phasediff.nii.gz'
             ),
        ]
        #    (r'c1{prefix}(.*).UNI_brain_bin.nii.gz'.format(prefix=self.project_prefix),
        #                                      r'{prefix}\1.UNI_brain_bin.nii.gz'.format(prefix=self.project_prefix)),
        #                                     (r'c1{prefix}(.*).DEN_brain_bin.nii.gz'.format(prefix=self.project_prefix),
        #                                      r'{prefix}\1.DEN_brain_bin.nii.gz'.format(prefix=self.project_prefix))]

        scanner_to_bids = Workflow(name='scanner_to_bids',
                                   base_dir=self.temp_dir)
        #scanner_to_bids.connect(imgsrc, 'img', mv, 'format_string')
        scanner_to_bids.connect(infosource, 'subject_str', parse_scanner_dir,
                                'subject_id')
        scanner_to_bids.connect(parse_scanner_dir, 'uni', mv_uni, 'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'uniden', mv_uniden,
                                'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'flair', mv_flair,
                                'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'bold', mv_bold, 'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'boldmag1', mv_boldmag1,
                                'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'boldmag2', mv_boldmag2,
                                'in_file')
        scanner_to_bids.connect(parse_scanner_dir, 'boldphdiff', mv_phasediff,
                                'in_file')
        scanner_to_bids.connect(mv_uni, 'out_file', sink, '@uni')
        scanner_to_bids.connect(mv_uniden, 'out_file', sink, '@uniden')
        scanner_to_bids.connect(mv_flair, 'out_file', sink, '@flair')
        scanner_to_bids.connect(mv_bold, 'out_file', sink, '@bold')
        scanner_to_bids.connect(mv_boldmag1, 'out_file', sink, '@boldmag1')
        scanner_to_bids.connect(mv_boldmag2, 'out_file', sink, '@boldmag2')
        scanner_to_bids.connect(mv_phasediff, 'out_file', sink, '@phasediff')

        return scanner_to_bids
コード例 #11
0
ファイル: workflows.py プロジェクト: MichlF/misc
def create_motion_correction_workflow(name='moco',
                                      method='AFNI',
                                      extend_moco_params=False):
    """uses sub-workflows to perform different registration steps.
    Requires fsl and freesurfer tools
    Parameters
    ----------
    name : string
        name of workflow

    Example
    -------
    >>> motion_correction_workflow = create_motion_correction_workflow('motion_correction_workflow')
    >>> motion_correction_workflow.inputs.inputspec.output_directory = '/data/project/raw/BIDS/sj_1/'
    >>> motion_correction_workflow.inputs.inputspec.in_files = ['sub-001.nii.gz','sub-002.nii.gz']
    >>> motion_correction_workflow.inputs.inputspec.which_file_is_EPI_space = 'middle'

    Inputs::
          inputspec.output_directory : directory in which to sink the result files
          inputspec.in_files : list of functional files
          inputspec.which_file_is_EPI_space : determines which file is the 'standard EPI space'
    Outputs::
           outputspec.EPI_space_file : standard EPI space file, one timepoint
           outputspec.motion_corrected_files : motion corrected files
           outputspec.motion_correction_plots : motion correction plots
           outputspec.motion_correction_parameters : motion correction parameters
    """

    ### NODES
    input_node = pe.Node(IdentityInterface(fields=[
        'in_files', 'output_directory', 'which_file_is_EPI_space', 'sub_id',
        'tr'
    ]),
                         name='inputspec')
    output_node = pe.Node(IdentityInterface(fields=([
        'motion_corrected_files', 'EPI_space_file', 'mask_EPI_space_file',
        'motion_correction_plots', 'motion_correction_parameters',
        'extended_motion_correction_parameters',
        'new_motion_correction_parameters'
    ])),
                          name='outputspec')

    ########################################################################################
    # Invariant nodes
    ########################################################################################

    EPI_file_selector_node = pe.Node(interface=EPI_file_selector,
                                     name='EPI_file_selector_node')
    mean_bold = pe.Node(interface=fsl.maths.MeanImage(dimension='T'),
                        name='mean_space')
    rename_mean_bold = pe.Node(niu.Rename(format_string='session_EPI_space',
                                          keep_ext=True),
                               name='rename_mean_bold')

    ########################################################################################
    # Workflow
    ########################################################################################

    motion_correction_workflow = pe.Workflow(name=name)
    motion_correction_workflow.connect(input_node, 'which_file_is_EPI_space',
                                       EPI_file_selector_node, 'which_file')
    motion_correction_workflow.connect(input_node, 'in_files',
                                       EPI_file_selector_node, 'in_files')

    ########################################################################################
    # outputs via datasink
    ########################################################################################
    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    # first link the workflow's output_directory into the datasink.
    motion_correction_workflow.connect(input_node, 'output_directory',
                                       datasink, 'base_directory')
    motion_correction_workflow.connect(input_node, 'sub_id', datasink,
                                       'container')

    ########################################################################################
    # FSL MCFlirt
    ########################################################################################
    # new approach, which should aid in the joint motion correction of
    # multiple sessions together, by pre-registering each run.
    # the strategy would be to, for each run, take the first TR
    # and FLIRT-align (6dof) it to the EPI_space file.
    # then we can use this as an --infile argument to mcflirt.

    if method == 'FSL':

        rename_motion_files = pe.MapNode(
            niu.Rename(keep_ext=False),
            name='rename_motion_files',
            iterfield=['in_file', 'format_string'])

        remove_niigz_ext = pe.MapNode(interface=Remove_extension,
                                      name='remove_niigz_ext',
                                      iterfield=['in_file'])

        motion_correct_EPI_space = pe.Node(interface=fsl.MCFLIRT(
            cost='normcorr', interpolation='sinc', mean_vol=True),
                                           name='motion_correct_EPI_space')

        motion_correct_all = pe.MapNode(interface=fsl.MCFLIRT(
            save_mats=True,
            save_plots=True,
            cost='normcorr',
            interpolation='sinc',
            stats_imgs=True),
                                        name='motion_correct_all',
                                        iterfield=['in_file'])

        plot_motion = pe.MapNode(
            interface=fsl.PlotMotionParams(in_source='fsl'),
            name='plot_motion',
            iterfield=['in_file'])

        if extend_moco_params:
            # make extend_motion_pars node here
            # extend_motion_pars = pe.MapNode(Function(input_names=['moco_par_file', 'tr'], output_names=['new_out_file', 'ext_out_file'],
            # function=_extend_motion_parameters), name='extend_motion_pars', iterfield = ['moco_par_file'])
            pass

        # create reference:
        motion_correction_workflow.connect(EPI_file_selector_node, 'out_file',
                                           motion_correct_EPI_space, 'in_file')
        motion_correction_workflow.connect(motion_correct_EPI_space,
                                           'out_file', mean_bold, 'in_file')
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           motion_correct_all, 'ref_file')

        # motion correction across runs
        motion_correction_workflow.connect(input_node, 'in_files',
                                           motion_correct_all, 'in_file')
        #motion_correction_workflow.connect(motion_correct_all, 'out_file', output_node, 'motion_corrected_files')
        # motion_correction_workflow.connect(motion_correct_all, 'par_file', extend_motion_pars, 'moco_par_file')
        # motion_correction_workflow.connect(input_node, 'tr', extend_motion_pars, 'tr')
        # motion_correction_workflow.connect(extend_motion_pars, 'ext_out_file', output_node, 'extended_motion_correction_parameters')
        # motion_correction_workflow.connect(extend_motion_pars, 'new_out_file', output_node, 'new_motion_correction_parameters')

        ########################################################################################
        # Plot the estimated motion parameters
        ########################################################################################

        # rename:
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           rename_mean_bold, 'in_file')
        motion_correction_workflow.connect(motion_correct_all, 'par_file',
                                           rename_motion_files, 'in_file')
        motion_correction_workflow.connect(motion_correct_all, 'par_file',
                                           remove_niigz_ext, 'in_file')
        motion_correction_workflow.connect(remove_niigz_ext, 'out_file',
                                           rename_motion_files,
                                           'format_string')

        # plots:
        plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           plot_motion, 'in_file')
        motion_correction_workflow.connect(plot_motion, 'out_file',
                                           output_node,
                                           'motion_correction_plots')

        # output node:
        motion_correction_workflow.connect(mean_bold, 'out_file', output_node,
                                           'EPI_space_file')
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           output_node,
                                           'motion_correction_parameters')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           output_node,
                                           'motion_corrected_files')

        # datasink:
        motion_correction_workflow.connect(rename_mean_bold, 'out_file',
                                           datasink, 'reg')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           datasink, 'mcf')
        motion_correction_workflow.connect(rename_motion_files, 'out_file',
                                           datasink, 'mcf.motion_pars')
        motion_correction_workflow.connect(plot_motion, 'out_file', datasink,
                                           'mcf.motion_plots')
        # motion_correction_workflow.connect(extend_motion_pars, 'ext_out_file', datasink, 'mcf.ext_motion_pars')
        # motion_correction_workflow.connect(extend_motion_pars, 'new_out_file', datasink, 'mcf.new_motion_pars')

    ########################################################################################
    # AFNI 3DVolReg
    ########################################################################################
    # for speed, we use AFNI's 3DVolReg brute-force.
    # this loses plotting of motion parameters but increases speed
    # we hold on to the same setup, first moco the selected run
    # and then moco everything to that image, but without the
    # intermediate FLIRT step.

    if method == 'AFNI':
        motion_correct_EPI_space = pe.Node(
            interface=afni.Volreg(
                outputtype='NIFTI_GZ',
                zpad=5,
                args=' -cubic '  # -twopass -Fourier
            ),
            name='motion_correct_EPI_space')

        motion_correct_all = pe.MapNode(
            interface=afni.Volreg(
                outputtype='NIFTI_GZ',
                zpad=5,
                args=' -cubic '  # -twopass
            ),
            name='motion_correct_all',
            iterfield=['in_file'])

        # for renaming *_volreg.nii.gz to *_mcf.nii.gz
        set_postfix_mcf = pe.MapNode(interface=Set_postfix,
                                     name='set_postfix_mcf',
                                     iterfield=['in_file'])
        set_postfix_mcf.inputs.postfix = 'mcf'

        rename_volreg = pe.MapNode(interface=Rename(keep_ext=True),
                                   name='rename_volreg',
                                   iterfield=['in_file', 'format_string'])

        # curate for moco between sessions
        motion_correction_workflow.connect(EPI_file_selector_node, 'out_file',
                                           motion_correct_EPI_space, 'in_file')
        motion_correction_workflow.connect(motion_correct_EPI_space,
                                           'out_file', mean_bold, 'in_file')

        # motion correction across runs
        motion_correction_workflow.connect(input_node, 'in_files',
                                           motion_correct_all, 'in_file')
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           motion_correct_all, 'basefile')
        # motion_correction_workflow.connect(mean_bold, 'out_file', motion_correct_all, 'rotparent')
        # motion_correction_workflow.connect(mean_bold, 'out_file', motion_correct_all, 'gridparent')

        # output node:
        motion_correction_workflow.connect(mean_bold, 'out_file', output_node,
                                           'EPI_space_file')
        motion_correction_workflow.connect(motion_correct_all, 'md1d_file',
                                           output_node,
                                           'max_displacement_info')
        motion_correction_workflow.connect(motion_correct_all, 'oned_file',
                                           output_node,
                                           'motion_correction_parameter_info')
        motion_correction_workflow.connect(
            motion_correct_all, 'oned_matrix_save', output_node,
            'motion_correction_parameter_matrix')
        motion_correction_workflow.connect(input_node, 'in_files',
                                           set_postfix_mcf, 'in_file')
        motion_correction_workflow.connect(set_postfix_mcf, 'out_file',
                                           rename_volreg, 'format_string')
        motion_correction_workflow.connect(motion_correct_all, 'out_file',
                                           rename_volreg, 'in_file')
        motion_correction_workflow.connect(rename_volreg, 'out_file',
                                           output_node,
                                           'motion_corrected_files')

        # datasink:
        motion_correction_workflow.connect(mean_bold, 'out_file',
                                           rename_mean_bold, 'in_file')
        motion_correction_workflow.connect(rename_mean_bold, 'out_file',
                                           datasink, 'reg')
        motion_correction_workflow.connect(rename_volreg, 'out_file', datasink,
                                           'mcf')
        motion_correction_workflow.connect(motion_correct_all, 'md1d_file',
                                           datasink,
                                           'mcf.max_displacement_info')
        motion_correction_workflow.connect(motion_correct_all, 'oned_file',
                                           datasink, 'mcf.parameter_info')
        motion_correction_workflow.connect(motion_correct_all,
                                           'oned_matrix_save', datasink,
                                           'mcf.motion_pars')

    return motion_correction_workflow
コード例 #12
0
def create_registration_workflow(analysis_info, name='reg'):
    """uses sub-workflows to perform different registration steps.
    Requires fsl and freesurfer tools
    Parameters
    ----------
    name : string
        name of workflow
    analysis_info : dict
        contains session information needed for workflow, such as
        whether to use FreeSurfer or FLIRT etc.
    Example
    -------
    >>> registration_workflow = create_registration_workflow(name = 'registration_workflow', analysis_info = {'use_FS':True})
    >>> registration_workflow.inputs.inputspec.output_directory = '/data/project/raw/BIDS/sj_1/'
    >>> registration_workflow.inputs.inputspec.EPI_space_file = 'example_func.nii.gz'
    >>> registration_workflow.inputs.inputspec.T1_file = 'T1.nii.gz' # if using freesurfer, this file will be created instead of used.
    >>> registration_workflow.inputs.inputspec.freesurfer_subject_ID = 'sub_01'
    >>> registration_workflow.inputs.inputspec.freesurfer_subject_dir = '$SUBJECTS_DIR'
    >>> registration_workflow.inputs.inputspec.reference_file = '/usr/local/fsl/data/standard/standard152_T1_2mm_brain.nii.gz'

    Inputs::
          inputspec.output_directory : directory in which to sink the result files
          inputspec.T1_file : T1 anatomy file
          inputspec.EPI_space_file : EPI session file
          inputspec.freesurfer_subject_ID : FS subject ID
          inputspec.freesurfer_subject_dir : $SUBJECTS_DIR
    Outputs::
           outputspec.out_reg_file : BBRegister registration file that maps EPI space to T1
           outputspec.out_matrix_file : FLIRT registration file that maps EPI space to T1
           outputspec.out_inv_matrix_file : FLIRT registration file that maps T1 space to EPI
    """

    ### NODES
    input_node = pe.Node(IdentityInterface(fields=[
        'EPI_space_file', 'output_directory', 'freesurfer_subject_ID',
        'freesurfer_subject_dir', 'T1_file', 'standard_file', 'sub_id'
    ]),
                         name='inputspec')

    ### Workflow to be returned
    registration_workflow = pe.Workflow(name=name)

    ### sub-workflows
    epi_2_T1 = create_epi_to_T1_workflow(name='epi',
                                         use_FS=analysis_info['use_FS'],
                                         do_FAST=analysis_info['do_FAST'])
    T1_to_standard = create_T1_to_standard_workflow(
        name='T1_to_standard',
        use_FS=analysis_info['use_FS'],
        do_fnirt=analysis_info['do_fnirt'],
        use_AFNI_ss=analysis_info['use_AFNI_ss'])
    concat_2_feat = create_concat_2_feat_workflow(name='concat_2_feat')

    output_node = pe.Node(IdentityInterface(
        fields=('EPI_T1_matrix_file', 'T1_EPI_matrix_file',
                'EPI_T1_register_file', 'T1_standard_matrix_file',
                'standard_T1_matrix_file', 'EPI_T1_matrix_file',
                'T1_EPI_matrix_file', 'T1_file', 'standard_file',
                'EPI_space_file')),
                          name='outputspec')

    ###########################################################################
    # EPI to T1
    ###########################################################################

    registration_workflow.connect([(input_node, epi_2_T1, [
        ('EPI_space_file', 'inputspec.EPI_space_file'),
        ('output_directory', 'inputspec.output_directory'),
        ('freesurfer_subject_ID', 'inputspec.freesurfer_subject_ID'),
        ('freesurfer_subject_dir', 'inputspec.freesurfer_subject_dir'),
        ('T1_file', 'inputspec.T1_file')
    ])])

    ###########################################################################
    # T1 to standard
    ###########################################################################

    registration_workflow.connect([(input_node, T1_to_standard, [
        ('freesurfer_subject_ID', 'inputspec.freesurfer_subject_ID'),
        ('freesurfer_subject_dir', 'inputspec.freesurfer_subject_dir'),
        ('T1_file', 'inputspec.T1_file'),
        ('standard_file', 'inputspec.standard_file')
    ])])

    ###########################################################################
    # concatenation of all matrices
    ###########################################################################

    # then, the inputs from the previous sub-workflows
    registration_workflow.connect([(epi_2_T1, concat_2_feat, [
        ('outputspec.EPI_T1_matrix_file', 'inputspec.EPI_T1_matrix_file'),
    ])])

    registration_workflow.connect([(T1_to_standard, concat_2_feat, [
        ('outputspec.T1_standard_matrix_file',
         'inputspec.T1_standard_matrix_file'),
    ])])

    ###########################################################################
    # Rename nodes, for the datasink
    ###########################################################################

    if analysis_info['use_FS']:
        rename_register = pe.Node(Rename(format_string='register.dat',
                                         keep_ext=False),
                                  name='rename_register')

        registration_workflow.connect(epi_2_T1,
                                      'outputspec.EPI_T1_register_file',
                                      rename_register, 'in_file')

    rename_example_func = pe.Node(Rename(format_string='example_func',
                                         keep_ext=True),
                                  name='rename_example_func')

    registration_workflow.connect(input_node, 'EPI_space_file',
                                  rename_example_func, 'in_file')

    rename_highres = pe.Node(Rename(format_string='highres', keep_ext=True),
                             name='rename_highres')
    registration_workflow.connect(T1_to_standard, 'outputspec.T1_file',
                                  rename_highres, 'in_file')

    rename_standard = pe.Node(Rename(format_string='standard', keep_ext=True),
                              name='rename_standard')

    registration_workflow.connect(input_node, 'standard_file', rename_standard,
                                  'in_file')

    rename_example_func2standard = pe.Node(Rename(
        format_string='example_func2standard.mat', keep_ext=False),
                                           name='rename_example_func2standard')

    registration_workflow.connect(concat_2_feat,
                                  'outputspec.EPI_standard_matrix_file',
                                  rename_example_func2standard, 'in_file')

    rename_example_func2highres = pe.Node(Rename(
        format_string='example_func2highres.mat', keep_ext=False),
                                          name='rename_example_func2highres')

    registration_workflow.connect(epi_2_T1, 'outputspec.EPI_T1_matrix_file',
                                  rename_example_func2highres, 'in_file')

    rename_highres2standard = pe.Node(Rename(
        format_string='highres2standard.mat', keep_ext=False),
                                      name='rename_highres2standard')
    registration_workflow.connect(T1_to_standard,
                                  'outputspec.T1_standard_matrix_file',
                                  rename_highres2standard, 'in_file')

    rename_standard2example_func = pe.Node(Rename(
        format_string='standard2example_func.mat', keep_ext=False),
                                           name='rename_standard2example_func')

    registration_workflow.connect(concat_2_feat,
                                  'outputspec.standard_EPI_matrix_file',
                                  rename_standard2example_func, 'in_file')

    rename_highres2example_func = pe.Node(Rename(
        format_string='highres2example_func.mat', keep_ext=False),
                                          name='rename_highres2example_func')

    registration_workflow.connect(epi_2_T1, 'outputspec.T1_EPI_matrix_file',
                                  rename_highres2example_func, 'in_file')

    rename_standard2highres = pe.Node(Rename(
        format_string='standard2highres.mat', keep_ext=False),
                                      name='rename_standard2highres')
    registration_workflow.connect(T1_to_standard,
                                  'outputspec.standard_T1_matrix_file',
                                  rename_standard2highres, 'in_file')

    # outputs via datasink
    datasink = pe.Node(DataSink(infields=['reg']), name='sinker')
    datasink.inputs.parameterization = False
    registration_workflow.connect(input_node, 'output_directory', datasink,
                                  'base_directory')
    registration_workflow.connect(input_node, 'sub_id', datasink, 'container')

    # NEW SETUP WITH RENAME (WITHOUT MERGER)
    if analysis_info['use_FS']:
        registration_workflow.connect(rename_register, 'out_file', datasink,
                                      'reg.@dat')

    registration_workflow.connect(rename_example_func, 'out_file', datasink,
                                  'reg.@example_func')
    registration_workflow.connect(rename_standard, 'out_file', datasink,
                                  'reg.@standard')
    registration_workflow.connect(rename_highres, 'out_file', datasink,
                                  'reg.@highres')
    registration_workflow.connect(rename_example_func2highres, 'out_file',
                                  datasink, 'reg.@example_func2highres')
    registration_workflow.connect(rename_highres2example_func, 'out_file',
                                  datasink, 'reg.@highres2example_func')
    registration_workflow.connect(rename_highres2standard, 'out_file',
                                  datasink, 'reg.@highres2standard')
    registration_workflow.connect(rename_standard2highres, 'out_file',
                                  datasink, 'reg.@standard2highres')
    registration_workflow.connect(rename_standard2example_func, 'out_file',
                                  datasink, 'reg.@standard2example_func')
    registration_workflow.connect(rename_example_func2standard, 'out_file',
                                  datasink, 'reg.@example_func2standard')

    registration_workflow.connect(rename_highres, 'out_file', output_node,
                                  'T1_file')

    # put the nifti and mat files, renamed above, in the reg/feat directory.
    # don't yet know what's wrong with this merge to datasink
    # registration_workflow.connect(merge_for_reg_N, 'out', datasink, 'reg')

    return registration_workflow
コード例 #13
0
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    num_slices = len(slice_times)
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
    slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
    slice_timing.inputs.ref_slice = int(num_slices / 2)

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_files')]),
        (realign, slice_timing, [('realigned_files', 'in_files')]),
        (slice_timing, art, [('timecorrected_files', 'realigned_files')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
    wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_files')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'smoothed_files', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) +
                                     [17, 18, 26, 47] + list(range(49, 55)) +
                                     [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        out_names = []
        for filename in files:
            _, name, _ = split_filename(filename)
            out_names.append(name + suffix)
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', '')]
    regex_subs = [
        ('_ts_masker.*/sar', '/smooth/'),
        ('_ts_masker.*/ar', '/unsmooth/'),
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink,
               'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'smoothed_files', datasink,
               'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
コード例 #14
0
def mk_w_angio(freesurfer_dir, angiogram, out_dir):

    n_input = Node(IdentityInterface(fields=[
        'fs_dir',
        'fs_subj',
        'angiogram',
        'out_dir',
    ]),
                   name='input')

    n_input.inputs.fs_dir = str(freesurfer_dir.parent)
    n_input.inputs.fs_subj = freesurfer_dir.name
    n_input.inputs.angiogram = str(angiogram)
    n_input.inputs.out_dir = str(out_dir)

    n_coreg = Node(Registration(), name='antsReg')
    n_coreg.inputs.num_threads = 40
    n_coreg.inputs.use_histogram_matching = False
    n_coreg.inputs.dimension = 3
    n_coreg.inputs.winsorize_lower_quantile = 0.001
    n_coreg.inputs.winsorize_upper_quantile = 0.999
    n_coreg.inputs.float = True
    n_coreg.inputs.interpolation = 'Linear'
    n_coreg.inputs.transforms = [
        'Rigid',
    ]
    n_coreg.inputs.transform_parameters = [
        [
            0.1,
        ],
    ]
    n_coreg.inputs.metric = [
        'MI',
    ]
    n_coreg.inputs.metric_weight = [
        1,
    ]
    n_coreg.inputs.radius_or_number_of_bins = [
        32,
    ]
    n_coreg.inputs.sampling_strategy = [
        'Regular',
    ]
    n_coreg.inputs.sampling_percentage = [
        0.5,
    ]
    n_coreg.inputs.sigma_units = [
        'mm',
    ]
    n_coreg.inputs.convergence_threshold = [
        1e-6,
    ]
    n_coreg.inputs.smoothing_sigmas = [
        [1, 0],
    ]
    n_coreg.inputs.shrink_factors = [
        [1, 1],
    ]
    n_coreg.inputs.convergence_window_size = [
        10,
    ]
    n_coreg.inputs.number_of_iterations = [
        [250, 100],
    ]
    n_coreg.inputs.output_warped_image = True
    n_coreg.inputs.output_inverse_warped_image = True
    n_coreg.inputs.output_transform_prefix = 'angio_to_struct'

    n_apply = Node(ApplyTransforms(), name='ants_apply')
    n_apply.inputs.dimension = 3
    n_apply.inputs.interpolation = 'Linear'
    n_apply.inputs.default_value = 0

    n_convert = Node(MRIConvert(), 'convert')
    n_convert.inputs.out_type = 'niigz'

    n_binarize = Node(Threshold(), 'make_mask')
    n_binarize.inputs.thresh = .1
    n_binarize.inputs.args = '-bin'

    n_mask = Node(BinaryMaths(), 'mask')
    n_mask.inputs.operation = 'mul'

    n_veins = Node(Rename(), 'rename_veins')
    n_veins.inputs.format_string = 'angiogram.nii.gz'

    n_sink = Node(DataSink(), 'sink')
    n_sink.inputs.base_directory = '/Fridge/users/giovanni/projects/intraop/loenen/angiogram'
    n_sink.inputs.remove_dest_dir = True

    fs = Node(FreeSurferSource(), 'freesurfer')

    n_split = Node(Split(), 'split_pca')
    n_split.inputs.dimension = 't'

    w = Workflow('tmp_angiogram')
    w.base_dir = str(out_dir)

    w.connect(n_input, 'fs_dir', fs, 'subjects_dir')
    w.connect(n_input, 'fs_subj', fs, 'subject_id')
    w.connect(n_input, 'angiogram', n_split, 'in_file')
    w.connect(n_split, ('out_files', select_file, 0), n_coreg, 'moving_image')
    w.connect(fs, 'T1', n_coreg, 'fixed_image')

    w.connect(n_coreg, 'forward_transforms', n_apply, 'transforms')
    w.connect(n_split, ('out_files', select_file, 1), n_apply, 'input_image')
    w.connect(fs, 'T1', n_apply, 'reference_image')
    w.connect(fs, 'brain', n_convert, 'in_file')
    w.connect(n_convert, 'out_file', n_binarize, 'in_file')
    w.connect(n_apply, 'output_image', n_mask, 'in_file')
    w.connect(n_binarize, 'out_file', n_mask, 'operand_file')
    w.connect(n_mask, 'out_file', n_veins, 'in_file')
    w.connect(n_input, 'out_dir', n_sink, 'base_directory')
    w.connect(n_veins, 'out_file', n_sink, '@angiogram')
    w.connect(n_convert, 'out_file', n_sink, '@brain')

    return w
コード例 #15
0
ファイル: clipSeeds.py プロジェクト: BRAINSia/rs-fMRI-pilot
def writeSeedFiles():
    CACHE_DIR = 'seeds_CACHE'
    RESULTS_DIR = 'seeds'
    REWRITE_DATASINKS = True
    nacAtlasFile = "/Shared/paulsen/Experiments/rsFMRI/rs-fMRI-pilot/ReferenceAtlas/template_t1.nii.gz"
    nacAtlasLabel = "/Shared/paulsen/Experiments/rsFMRI/rs-fMRI-pilot/ReferenceAtlas/template_nac_labels.nii.gz"
    nacResampleResolution = (2.0, 2.0, 2.0)
    downsampledNACfilename = 'downsampledNACatlas.nii.gz'

    preproc = pipe.Workflow(name=CACHE_DIR)
    preproc.base_dir = os.getcwd()

    labels, seeds = getAtlasPoints(
        '/Shared/paulsen/Experiments/rsFMRI/rs-fMRI-pilot/seeds.fcsv')

    seedsIdentity = pipe.Node(interface=IdentityInterface(fields=['index']),
                              name='seedsIdentity')
    seedsIdentity.iterables = ('index', range(len(labels)))

    selectSeed = pipe.Node(interface=Select(), name='selectSeed')
    selectSeed.inputs.inlist = seeds
    preproc.connect(seedsIdentity, 'index', selectSeed, 'index')

    selectLabel = pipe.Node(interface=Select(), name='selectLabel')
    selectLabel.inputs.inlist = labels
    preproc.connect(seedsIdentity, 'index', selectLabel, 'index')

    points = pipe.Node(interface=Function(
        function=createSphereExpression,
        input_names=['coordinates', 'radius'],
        output_names=['expression']),
                       name='createSphereExpression')
    preproc.connect(selectSeed, 'out', points, 'coordinates')

    downsampleAtlas = pipe.Node(interface=Function(
        function=resampleImage,
        input_names=['inputVolume', 'outputVolume', 'resolution'],
        output_names=['outputVolume']),
                                name="downsampleAtlas")
    downsampleAtlas.inputs.inputVolume = nacAtlasFile
    downsampleAtlas.inputs.outputVolume = downsampledNACfilename
    downsampleAtlas.inputs.resolution = [int(x) for x in nacResampleResolution]

    spheres = pipe.Node(interface=Calc(letters=['a']), name='afni3Dcalc_seeds')
    spheres.inputs.outputtype = 'NIFTI_GZ'
    preproc.connect(downsampleAtlas, 'outputVolume', spheres, 'in_file_a')
    spheres.inputs.args = '-nscale'

    preproc.connect(points, 'expression', spheres, 'expr')

    renameMasks = pipe.Node(interface=Rename(format_string='%(label)s_mask'),
                            name='renameMasksAtlas')
    renameMasks.inputs.keep_ext = True
    preproc.connect(selectLabel, 'out', renameMasks, 'label')
    preproc.connect(spheres, 'out_file', renameMasks, 'in_file')

    atlas_DataSink = pipe.Node(interface=DataSink(), name="atlas_DataSink")
    atlas_DataSink.inputs.base_directory = preproc.base_dir  # '/Shared/paulsen/Experiments/20130417_rsfMRI_Results'
    atlas_DataSink.inputs.container = RESULTS_DIR
    atlas_DataSink.inputs.parameterization = False
    atlas_DataSink.overwrite = REWRITE_DATASINKS
    preproc.connect(renameMasks, 'out_file', atlas_DataSink, 'Atlas')
    preproc.connect(downsampleAtlas, 'outputVolume', atlas_DataSink,
                    'Atlas.@resampled')
    preproc.run()
コード例 #16
0
ファイル: workflows.py プロジェクト: MichlF/misc
def create_compcor_workflow(name='compcor'):
    """ Creates A/T compcor workflow. """

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'fast_files', 'highres2epi_mat', 'n_comp_tcompcor',
        'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(
        fields=['tcompcor_file', 'acompcor_file', 'epi_mask']),
                          name='outputspec')

    extract_task = pe.MapNode(interface=Extract_task,
                              iterfield=['in_file'],
                              name='extract_task')

    rename_acompcor = pe.MapNode(interface=Rename(
        format_string='task-%(task)s_acompcor.tsv', keepext=True),
                                 iterfield=['task', 'in_file'],
                                 name='rename_acompcor')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    average_func = pe.MapNode(interface=fsl.maths.MeanImage(dimension='T'),
                              name='average_func',
                              iterfield=['in_file'])

    epi_mask = pe.MapNode(interface=fsl.BET(frac=.3,
                                            mask=True,
                                            no_output=True,
                                            robust=True),
                          iterfield=['in_file'],
                          name='epi_mask')

    wm2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                        iterfield=['reference'],
                        name='wm2epi')

    csf2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                         iterfield=['reference'],
                         name='csf2epi')

    erode_csf = pe.MapNode(interface=Erode_mask,
                           name='erode_csf',
                           iterfield=['epi_mask', 'in_file'])
    erode_csf.inputs.erosion_mm = 0
    erode_csf.inputs.epi_mask_erosion_mm = 30

    erode_wm = pe.MapNode(interface=Erode_mask,
                          name='erode_wm',
                          iterfield=['epi_mask', 'in_file'])

    erode_wm.inputs.erosion_mm = 6
    erode_wm.inputs.epi_mask_erosion_mm = 10

    merge_wm_and_csf_masks = pe.MapNode(Merge(2),
                                        name='merge_wm_and_csf_masks',
                                        iterfield=['in1', 'in2'])

    # This should be fit on the 30mm eroded mask from CSF
    tcompcor = pe.MapNode(TCompCor(components_file='tcomcor_comps.txt'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='tcompcor')

    # WM + CSF mask
    acompcor = pe.MapNode(ACompCor(components_file='acompcor_comps.txt',
                                   merge_method='union'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='acompcor')

    compcor_wf = pe.Workflow(name=name)
    compcor_wf.connect(input_node, 'in_file', extract_task, 'in_file')
    compcor_wf.connect(extract_task, 'task_name', rename_acompcor, 'task')
    compcor_wf.connect(acompcor, 'components_file', rename_acompcor, 'in_file')

    compcor_wf.connect(input_node, 'sub_id', datasink, 'container')
    compcor_wf.connect(input_node, 'output_directory', datasink,
                       'base_directory')

    compcor_wf.connect(input_node, ('fast_files', pick_wm), wm2epi, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', wm2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', wm2epi, 'in_matrix_file')

    compcor_wf.connect(input_node, ('fast_files', pick_csf), csf2epi,
                       'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', csf2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', csf2epi,
                       'in_matrix_file')

    compcor_wf.connect(input_node, 'n_comp_tcompcor', tcompcor,
                       'num_components')
    compcor_wf.connect(input_node, 'n_comp_acompcor', acompcor,
                       'num_components')

    compcor_wf.connect(input_node, 'in_file', average_func, 'in_file')
    compcor_wf.connect(average_func, 'out_file', epi_mask, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', erode_csf, 'epi_mask')
    compcor_wf.connect(epi_mask, 'mask_file', erode_wm, 'epi_mask')

    compcor_wf.connect(wm2epi, 'out_file', erode_wm, 'in_file')
    compcor_wf.connect(csf2epi, 'out_file', erode_csf, 'in_file')

    compcor_wf.connect(erode_wm, 'roi_eroded', merge_wm_and_csf_masks, 'in1')
    compcor_wf.connect(erode_csf, 'roi_eroded', merge_wm_and_csf_masks, 'in2')
    compcor_wf.connect(merge_wm_and_csf_masks, 'out', acompcor, 'mask_files')

    compcor_wf.connect(input_node, 'in_file', acompcor, 'realigned_file')
    compcor_wf.connect(input_node, 'in_file', tcompcor, 'realigned_file')
    compcor_wf.connect(erode_csf, 'epi_mask_eroded', tcompcor, 'mask_files')

    #compcor_wf.connect(tcompcor, 'components_file', output_node, 'acompcor_file')
    #compcor_wf.connect(acompcor, 'components_file', output_node, 'tcompcor_file')
    compcor_wf.connect(epi_mask, 'mask_file', output_node, 'epi_mask')

    compcor_wf.connect(rename_acompcor, 'out_file', datasink, 'acompcor_file')

    #compcor_wf.connect(tcompcor, 'components_file', combine_files, 'tcomp')
    #compcor_wf.connect(acompcor, 'components_file', combine_files, 'acomp')
    #compcor_wf.connect(combine_files, 'out_file', datasink, 'confounds')

    return compcor_wf
コード例 #17
0
ファイル: dwi_dti_utils.py プロジェクト: ghisvail/clinica
def rename_into_caps(
    in_caps_dwi,
    in_norm_fa,
    in_norm_md,
    in_norm_ad,
    in_norm_rd,
    in_b_spline_transform,
    in_affine_matrix,
):
    """Rename different outputs of the pipelines into CAPS format.

    Returns:
        The different outputs with CAPS naming convention
    """
    from nipype.interfaces.utility import Rename

    from clinica.pipelines.dwi_dti.dwi_dti_utils import (
        extract_bids_identifier_from_caps_filename, )

    bids_identifier = extract_bids_identifier_from_caps_filename(in_caps_dwi)

    # CAPS normalized FA
    rename_fa = Rename()
    rename_fa.inputs.in_file = in_norm_fa
    rename_fa.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_FA.nii.gz")
    out_caps_fa = rename_fa.run()
    # CAPS normalized MD
    rename_md = Rename()
    rename_md.inputs.in_file = in_norm_md
    rename_md.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_MD.nii.gz")
    out_caps_md = rename_md.run()
    # CAPS normalized AD
    rename_ad = Rename()
    rename_ad.inputs.in_file = in_norm_ad
    rename_ad.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_AD.nii.gz")
    out_caps_ad = rename_ad.run()
    # CAPS normalized RD
    rename_rd = Rename()
    rename_rd.inputs.in_file = in_norm_rd
    rename_rd.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_RD.nii.gz")
    out_caps_rd = rename_rd.run()
    # CAPS B-spline transform
    rename_b_spline = Rename()
    rename_b_spline.inputs.in_file = in_b_spline_transform
    rename_b_spline.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_deformation.nii.gz")
    out_caps_b_spline_transform = rename_b_spline.run()
    # CAPS Affine matrix
    rename_affine = Rename()
    rename_affine.inputs.in_file = in_affine_matrix
    rename_affine.inputs.format_string = (
        f"{bids_identifier}_space-MNI152Lin_res-1x1x1_affine.mat")
    out_caps_affine_matrix = rename_affine.run()

    return (
        out_caps_fa.outputs.out_file,
        out_caps_md.outputs.out_file,
        out_caps_ad.outputs.out_file,
        out_caps_rd.outputs.out_file,
        out_caps_b_spline_transform.outputs.out_file,
        out_caps_affine_matrix.outputs.out_file,
    )
コード例 #18
0
def rename_into_caps(in_caps_dwi,
                     in_norm_fa, in_norm_md, in_norm_ad, in_norm_rd,
                     in_b_spline_transform, in_affine_matrix):
    """
    Rename the outputs of the pipelines into CAPS format namely:
    <source_file>_space-T1w_preproc[.nii.gz|bval|bvec]

    Args:

    Returns:
        The different outputs in CAPS format
    """
    from nipype.utils.filemanip import split_filename
    from nipype.interfaces.utility import Rename
    import os

    from clinica.pipelines.dwi_processing_dti.dwi_processing_dti_utils import extract_bids_identifier_from_caps_filename

    bids_identifier = extract_bids_identifier_from_caps_filename(in_caps_dwi)

    # Extract base path from fname:
    base_dir_norm_fa, _, _ = split_filename(in_norm_fa)
    base_dir_norm_md, _, _ = split_filename(in_norm_md)
    base_dir_norm_ad, _, _ = split_filename(in_norm_ad)
    base_dir_norm_rd, _, _ = split_filename(in_norm_rd)
    base_dir_b_spline_transform, _, _ = split_filename(in_b_spline_transform)
    base_dir_affine_matrix, _, _ = split_filename(in_affine_matrix)

    # Rename into CAPS FA:
    rename_fa = Rename()
    rename_fa.inputs.in_file = in_norm_fa
    rename_fa.inputs.format_string = os.path.join(
        base_dir_norm_fa,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_fa.nii.gz")
    out_caps_fa = rename_fa.run()

    # Rename into CAPS MD:
    rename_md = Rename()
    rename_md.inputs.in_file = in_norm_md
    rename_md.inputs.format_string = os.path.join(
        base_dir_norm_md,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_md.nii.gz")
    out_caps_md = rename_md.run()

    # Rename into CAPS AD:
    rename_ad = Rename()
    rename_ad.inputs.in_file = in_norm_ad
    rename_ad.inputs.format_string = os.path.join(
        base_dir_norm_ad,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_ad.nii.gz")
    out_caps_ad = rename_ad.run()

    # Rename into CAPS RD:
    rename_rd = Rename()
    rename_rd.inputs.in_file = in_norm_rd
    rename_rd.inputs.format_string = os.path.join(
        base_dir_norm_rd,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_rd.nii.gz")
    out_caps_rd = rename_rd.run()

    # Rename into CAPS B-spline transform:
    rename_b_spline = Rename()
    rename_b_spline.inputs.in_file = in_b_spline_transform
    rename_b_spline.inputs.format_string = os.path.join(
        base_dir_b_spline_transform,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_deformation.nii.gz")
    out_caps_b_spline_transform = rename_b_spline.run()

    # Rename into CAPS Affine Matrix:
    rename_affine = Rename()
    rename_affine.inputs.in_file = in_affine_matrix
    rename_affine.inputs.format_string = os.path.join(
        base_dir_affine_matrix,
        bids_identifier + "_space-MNI152Lin_res-1x1x1_affine.mat")
    out_caps_affine_matrix = rename_affine.run()

    from clinica.utils.stream import cprint
    cprint("Renamed files:")
    cprint(out_caps_fa.outputs.out_file)
    cprint(out_caps_md.outputs.out_file)
    cprint(out_caps_ad.outputs.out_file)
    cprint(out_caps_rd.outputs.out_file)
    cprint(out_caps_b_spline_transform.outputs.out_file)
    cprint(out_caps_affine_matrix.outputs.out_file)

    return out_caps_fa.outputs.out_file, out_caps_md.outputs.out_file,\
        out_caps_ad.outputs.out_file, out_caps_rd.outputs.out_file, \
        out_caps_b_spline_transform.outputs.out_file, \
        out_caps_affine_matrix.outputs.out_file
コード例 #19
0
def rename_into_caps(
    in_bids_dwi,
    fname_dwi,
    fname_bval,
    fname_bvec,
    fname_brainmask,
    fname_magnitude,
    fname_fmap,
    fname_smoothed_fmap,
):
    """Rename the outputs of the pipelines into CAPS.

    Args:
        in_bids_dwi (str): Input BIDS DWI to extract the <source_file>
        fname_dwi (str): Preprocessed DWI file.
        fname_bval (str): Preprocessed bval.
        fname_bvec (str): Preprocessed bvec.
        fname_brainmask (str): B0 mask.
        fname_smoothed_fmap (str): Smoothed (calibrated) fmap on b0 space.
        fname_fmap (str): Calibrated fmap on b0 space.
        fname_magnitude (str): Magnitude image on b0 space.

    Returns:
        Tuple[str, str, str, str, str, str, str]: The different outputs in CAPS format.
    """
    import os

    from nipype.interfaces.utility import Rename
    from nipype.utils.filemanip import split_filename

    # Extract <source_file> in format sub-CLNC01_ses-M00_[acq-label]_dwi
    _, source_file_dwi, _ = split_filename(in_bids_dwi)

    # Extract base path from fname:
    base_dir_dwi, _, _ = split_filename(fname_dwi)
    base_dir_bval, _, _ = split_filename(fname_bval)
    base_dir_bvec, _, _ = split_filename(fname_bvec)
    base_dir_brainmask, _, _ = split_filename(fname_brainmask)
    base_dir_smoothed_fmap, _, _ = split_filename(fname_smoothed_fmap)
    base_dir_calibrated_fmap, _, _ = split_filename(fname_fmap)
    base_dir_magnitude, _, _ = split_filename(fname_magnitude)

    # Rename into CAPS DWI:
    rename_dwi = Rename()
    rename_dwi.inputs.in_file = fname_dwi
    rename_dwi.inputs.format_string = os.path.join(
        base_dir_dwi, f"{source_file_dwi}_space-b0_preproc.nii.gz")
    out_caps_dwi = rename_dwi.run()

    # Rename into CAPS bval:
    rename_bval = Rename()
    rename_bval.inputs.in_file = fname_bval
    rename_bval.inputs.format_string = os.path.join(
        base_dir_bval, f"{source_file_dwi}_space-b0_preproc.bval")
    out_caps_bval = rename_bval.run()

    # Rename into CAPS bvec:
    rename_bvec = Rename()
    rename_bvec.inputs.in_file = fname_bvec
    rename_bvec.inputs.format_string = os.path.join(
        base_dir_bvec, f"{source_file_dwi}_space-b0_preproc.bvec")
    out_caps_bvec = rename_bvec.run()

    # Rename into CAPS brainmask:
    rename_brainmask = Rename()
    rename_brainmask.inputs.in_file = fname_brainmask
    rename_brainmask.inputs.format_string = os.path.join(
        base_dir_brainmask, f"{source_file_dwi}_space-b0_brainmask.nii.gz")
    out_caps_brainmask = rename_brainmask.run()

    # Rename into CAPS magnitude:
    rename_magnitude = Rename()
    rename_magnitude.inputs.in_file = fname_magnitude
    rename_magnitude.inputs.format_string = os.path.join(
        base_dir_magnitude, f"{source_file_dwi}_space-b0_magnitude1.nii.gz")
    out_caps_magnitude = rename_magnitude.run()

    # Rename into CAPS fmap:
    rename_calibrated_fmap = Rename()
    rename_calibrated_fmap.inputs.in_file = fname_fmap
    rename_calibrated_fmap.inputs.format_string = os.path.join(
        base_dir_calibrated_fmap, f"{source_file_dwi}_space-b0_fmap.nii.gz")
    out_caps_fmap = rename_calibrated_fmap.run()

    # Rename into CAPS smoothed fmap:
    rename_smoothed_fmap = Rename()
    rename_smoothed_fmap.inputs.in_file = fname_smoothed_fmap
    rename_smoothed_fmap.inputs.format_string = os.path.join(
        base_dir_smoothed_fmap,
        f"{source_file_dwi}_space-b0_fwhm-4_fmap.nii.gz")
    out_caps_smoothed_fmap = rename_smoothed_fmap.run()

    return (
        out_caps_dwi.outputs.out_file,
        out_caps_bval.outputs.out_file,
        out_caps_bvec.outputs.out_file,
        out_caps_brainmask.outputs.out_file,
        out_caps_magnitude.outputs.out_file,
        out_caps_fmap.outputs.out_file,
        out_caps_smoothed_fmap.outputs.out_file,
    )
コード例 #20
0
def pipeline(args):
    if args['debug']:
        config.enable_debug_mode()
    config.update_config(
        {'logging': {
            'log_directory': makeSupportDir(args['name'], "logs")
        }})
    logging.update_logging(config)

    # CONSTANTS
    sessionID = args['session']
    outputType = args['format'].upper()
    fOutputType = args['freesurfer']
    preprocessOn = args['preprocess']
    maskGM = args['maskgm']
    maskWholeBrain = args['maskwb']
    maskWhiteMatterFromSeeds = args['maskseeds']
    # print args['name']
    t1_experiment = "20141001_PREDICTHD_long_Results"  #"20130729_PREDICT_Results"
    atlasFile = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "ReferenceAtlas",
                     "template_t1.nii.gz"))
    wholeBrainFile = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "ReferenceAtlas",
                     "template_brain.nii.gz"))
    atlasLabel = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "ReferenceAtlas",
                     "template_nac_labels.nii.gz"))
    resampleResolution = (2.0, 2.0, 2.0)
    downsampledfilename = 'downsampled_atlas.nii.gz'

    master = pipe.Workflow(name=args['name'] + "_CACHE")
    master.base_dir = os.path.abspath("/Shared/sinapse/CACHE")

    sessions = pipe.Node(interface=IdentityInterface(fields=['session_id']),
                         name='sessionIDs')
    sessions.iterables = ('session_id', sessionID)
    downsampleAtlas = pipe.Node(interface=Function(
        function=resampleImage,
        input_names=['inputVolume', 'outputVolume', 'resolution'],
        output_names=['outputVolume']),
                                name="downsampleAtlas")
    downsampleAtlas.inputs.inputVolume = atlasFile
    downsampleAtlas.inputs.outputVolume = downsampledfilename
    downsampleAtlas.inputs.resolution = [int(x) for x in resampleResolution]

    # HACK: Remove node from pipeline until Nipype/AFNI file copy issue is resolved
    # fmri_DataSink = pipe.Node(interface=DataSink(), name="fmri_DataSink")
    # fmri_DataSink.overwrite = REWRITE_DATASINKS
    # Output to: /Shared/paulsen/Experiments/YYYYMMDD_<experiment>_Results/fmri
    # fmri_DataSink.inputs.base_directory = os.path.join(master.base_dir, RESULTS_DIR, 'fmri')
    # fmri_DataSink.inputs.substitutions = [('to_3D_out+orig', 'to3D')]
    # fmri_DataSink.inputs.parameterization = False
    #
    # master.connect([(sessions, fmri_DataSink, [('session_id', 'container')])])
    # END HACK

    registration = registrationWorkflow.workflow(t1_experiment,
                                                 outputType,
                                                 name="registration_wkfl")
    master.connect([(sessions, registration, [('session_id',
                                               "inputs.session_id")])])

    detrend = afninodes.detrendnode(outputType, 'afni3Ddetrend')
    # define grabber
    site = "*"
    subject = "*"
    if preprocessOn:
        grabber = dataio.iowaGrabber(t1_experiment, site, subject, maskGM,
                                     maskWholeBrain)
        master.connect([(sessions, grabber, [('session_id', 'session_id')]),
                        (grabber, registration, [('t1_File', 'inputs.t1')])])
        # Why isn't preprocessWorkflow.workflow() used instead? It would avoid most of the nuisance connections here...
        preprocessing = preprocessWorkflow.prepWorkflow(skipCount=6,
                                                        outputType=outputType)
        name = args.pop(
            'name')  # HACK: prevent name conflict with nuisance workflow
        nuisance = nuisanceWorkflow.workflow(outputType=outputType, **args)
        args['name'] = name  # END HACK
        master.connect([
            (grabber, preprocessing, [('fmri_dicom_dir', 'to_3D.infolder'),
                                      ('fmri_dicom_dir',
                                       'formatFMRINode.dicomDirectory')]),
            (grabber, nuisance, [('whmFile', 'wm.warpWMtoFMRI.input_image')]),
            (
                preprocessing,
                registration,
                [
                    ('merge.out_file', 'inputs.fmri'),  # 7
                    ('automask.out_file', 'tstat.mask_file')
                ]),  # *optional*
            (
                registration,
                nuisance,
                [
                    ('outputs.fmri_reference',
                     'csf.warpCSFtoFMRI.reference_image'),  # CSF
                    ('outputs.nac2fmri_list', 'csf.warpCSFtoFMRI.transforms'),
                    ('outputs.fmri_reference',
                     'wm.warpWMtoFMRI.reference_image'),  # WM
                    ('outputs.t12fmri_list', 'wm.warpWMtoFMRI.transforms')
                ]),
        ])
        warpCSFtoFMRInode = nuisance.get_node('csf').get_node('warpCSFtoFMRI')
        warpCSFtoFMRInode.inputs.input_image = atlasFile
        if maskGM:
            master.connect([
                (grabber, nuisance, [('gryFile', 'gm.warpGMtoFMRI.input_image')
                                     ]),
                (registration, nuisance, [('outputs.fmri_reference',
                                           'gm.warpGMtoFMRI.reference_image'),
                                          ('outputs.t12fmri_list',
                                           'gm.warpGMtoFMRI.transforms')]),
                (preprocessing, nuisance,
                 [('calc.out_file', 'gm.afni3DmaskAve_grm.in_file'),
                  ('volreg.oned_file', 'afni3Ddeconvolve.stim_file_4')])
            ])
        elif maskWholeBrain:
            master.connect([
                (registration, nuisance,
                 [('outputs.fmri_reference',
                   'wb.warpBraintoFMRI.reference_image'),
                  ('outputs.nac2fmri_list', 'wb.warpBraintoFMRI.transforms')]),
                (preprocessing, nuisance,
                 [('calc.out_file', 'wb.afni3DmaskAve_whole.in_file'),
                  ('volreg.oned_file', 'afni3Ddeconvolve.stim_file_4')])
            ])
            warpBraintoFMRInode = nuisance.get_node('wb').get_node(
                'warpBraintoFMRI')
            warpBraintoFMRInode.inputs.input_image = wholeBrainFile
        else:
            master.connect([(preprocessing, nuisance, [
                ('volreg.oned_file', 'afni3Ddeconvolve.stim_file_3')
            ])])

        master.connect([(preprocessing, nuisance,
                         [('calc.out_file', 'wm.afni3DmaskAve_wm.in_file'),
                          ('calc.out_file', 'csf.afni3DmaskAve_csf.in_file'),
                          ('calc.out_file', 'afni3Ddeconvolve.in_file')]),
                        (nuisance, detrend, [('afni3Ddeconvolve.out_errts',
                                              'in_file')])])  # 13
    else:
        cleveland_grabber = dataio.clevelandGrabber()
        grabber = dataio.autoworkupGrabber(t1_experiment, site, subject)
        converter = pipe.Node(interface=Copy(),
                              name='converter')  # Convert ANALYZE to AFNI

        master.connect([
            (sessions, grabber, [('session_id', 'session_id')]),
            (grabber, registration, [('t1_File', 'inputs.t1')]),
            (sessions, cleveland_grabber, [('session_id', 'session_id')]),
            (cleveland_grabber, converter, [('fmriHdr', 'in_file')]),
            (converter, registration, [('out_file', 'inputs.fmri')]),
            (converter, detrend, [('out_file', 'in_file')]),  # in fMRI_space
        ])

    t1_wf = registrationWorkflow.t1Workflow()
    babc_wf = registrationWorkflow.babcWorkflow()
    # HACK: No EPI
    # epi_wf = registrationWorkflow.epiWorkflow()
    lb_wf = registrationWorkflow.labelWorkflow()
    seed_wf = registrationWorkflow.seedWorkflow()
    bandpass = afninodes.fouriernode(
        outputType, 'fourier'
    )  # Fourier is the last NIFTI file format in the AFNI pipeline

    master.connect([
        (detrend, bandpass, [('out_file', 'in_file')
                             ]),  # Per Dawei, bandpass after running 3dDetrend
        (grabber, t1_wf, [('t1_File', 'warpT1toFMRI.input_image')]),
        (
            registration,
            t1_wf,
            [
                ('outputs.fmri_reference',
                 'warpT1toFMRI.reference_image'),  # T1
                ('outputs.t12fmri_list', 'warpT1toFMRI.transforms')
            ]),
        (grabber, babc_wf, [('csfFile', 'warpBABCtoFMRI.input_image')]),
        (
            registration,
            babc_wf,
            [
                ('outputs.fmri_reference',
                 'warpBABCtoFMRI.reference_image'),  # Labels
                ('outputs.t12fmri_list', 'warpBABCtoFMRI.transforms')
            ]),
        # HACK: No EPI
        # (downsampleAtlas, epi_wf, [('outputVolume', 'warpEPItoNAC.reference_image')]),
        # (registration, epi_wf,    [('outputs.fmri2nac_list', 'warpEPItoNAC.transforms')]),
        # (bandpass, epi_wf,         [('out_file', 'warpEPItoNAC.input_image')]),
        # END HACK
        (downsampleAtlas, lb_wf, [('outputVolume',
                                   'warpLabeltoNAC.reference_image')]),
        (registration, lb_wf, [('outputs.fmri2nac_list',
                                'warpLabeltoNAC.transforms')]),
        (t1_wf, seed_wf, [('warpT1toFMRI.output_image',
                           'warpSeedtoFMRI.reference_image')]),
        (registration, seed_wf, [('outputs.nac2fmri_list',
                                  'warpSeedtoFMRI.transforms')]),
    ])

    renameMasks = pipe.Node(interface=Rename(format_string='%(label)s_mask'),
                            name='renameMasksAtlas')
    renameMasks.inputs.keep_ext = True
    atlas_DataSink = dataio.atlasSink(base_directory=master.base_dir, **args)
    master.connect([
        (renameMasks, atlas_DataSink, [('out_file', 'Atlas')]),
        (downsampleAtlas, atlas_DataSink, [('outputVolume', 'Atlas.@resampled')
                                           ]),
    ])

    renameMasks2 = pipe.Node(
        interface=Rename(format_string='%(session)s_%(label)s_mask'),
        name='renameMasksFMRI')
    renameMasks2.inputs.keep_ext = True
    master.connect(sessions, 'session_id', renameMasks2, 'session')

    clipSeedWithVentriclesNode = pipe.Node(interface=Function(
        function=clipSeedWithVentricles,
        input_names=['seed', 'label', 'outfile'],
        output_names=['clipped_seed_fn']),
                                           name='clipSeedWithVentriclesNode')
    clipSeedWithVentriclesNode.inputs.outfile = "clipped_seed.nii.gz"

    master.connect(seed_wf, 'warpSeedtoFMRI.output_image',
                   clipSeedWithVentriclesNode, 'seed')
    master.connect(babc_wf, 'warpBABCtoFMRI.output_image',
                   clipSeedWithVentriclesNode, 'label')
    if not maskWhiteMatterFromSeeds:
        master.connect(clipSeedWithVentriclesNode, 'clipped_seed_fn',
                       renameMasks2, 'in_file')
    else:
        clipSeedWithWhiteMatterNode = pipe.Node(
            interface=Function(function=clipSeedWithWhiteMatter,
                               input_names=['seed', 'mask', 'outfile'],
                               output_names=['outfile']),
            name='clipSeedWithWhiteMatterNode')
        clipSeedWithWhiteMatterNode.inputs.outfile = 'clipped_wm_seed.nii.gz'
        master.connect(babc_wf, 'warpBABCtoFMRI.output_image',
                       clipSeedWithWhiteMatterNode, 'mask')
        master.connect(clipSeedWithVentriclesNode, 'clipped_seed_fn',
                       clipSeedWithWhiteMatterNode, 'seed')
        master.connect(clipSeedWithWhiteMatterNode, 'outfile', renameMasks2,
                       'in_file')
    # Labels are iterated over, so we need a seperate datasink to avoid overwriting any preprocessing
    # results when the labels are iterated (e.g. To3d output)
    # Write out to: /Shared/sinapse/CACHE/YYYYMMDD_<experiment>_Results/<SESSION>
    fmri_label_DataSink = dataio.fmriSink(master.base_dir, **args)
    master.connect(sessions, 'session_id', fmri_label_DataSink, 'container')
    master.connect(renameMasks2, 'out_file', fmri_label_DataSink, 'masks')
    master.connect(bandpass, 'out_file', fmri_label_DataSink,
                   'masks.@bandpass')

    roiMedian = afninodes.maskavenode('AFNI_1D', 'afni_roiMedian',
                                      '-mrange 1 1')
    master.connect(renameMasks2, 'out_file', roiMedian, 'mask')
    master.connect(bandpass, 'out_file', roiMedian, 'in_file')

    correlate = afninodes.fimnode('Correlation', 'afni_correlate')
    master.connect(roiMedian, 'out_file', correlate, 'ideal_file')
    master.connect(bandpass, 'out_file', correlate, 'in_file')

    regionLogCalc = afninodes.logcalcnode(outputType, 'afni_regionLogCalc')
    master.connect(correlate, 'out_file', regionLogCalc, 'in_file_a')

    renameZscore = pipe.Node(
        interface=Rename(format_string="%(session)s_%(label)s_zscore"),
        name='renameZscore')
    renameZscore.inputs.keep_ext = True
    master.connect(sessions, 'session_id', renameZscore, 'session')
    master.connect(regionLogCalc, 'out_file', renameZscore, 'in_file')
    master.connect(renameZscore, 'out_file', fmri_label_DataSink, 'zscores')
    master.connect(t1_wf, 'warpT1toFMRI.output_image', fmri_label_DataSink,
                   'zscores.@t1Underlay')

    # Move z values back into NAC atlas space
    # master.connect(downsampleAtlas, 'outputVolume', lb_wf, 'warpLabeltoNAC.reference_image')
    master.connect(regionLogCalc, 'out_file', lb_wf,
                   'warpLabeltoNAC.input_image')

    renameZscore2 = pipe.Node(
        interface=Rename(format_string="%(session)s_%(label)s_result"),
        name='renameZscore2')
    renameZscore2.inputs.keep_ext = True
    master.connect(sessions, 'session_id', renameZscore2, 'session')
    master.connect(lb_wf, 'warpLabeltoNAC.output_image', renameZscore2,
                   'in_file')
    master.connect(renameZscore2, 'out_file', atlas_DataSink, 'Atlas.@zscore')
    # Connect seed subworkflow
    seedSubflow = seedWorkflow.workflow(args['seeds'],
                                        outputType='NIFTI_GZ',
                                        name='seed_wkfl')
    master.connect([
        (downsampleAtlas, seedSubflow, [('outputVolume',
                                         'afni3Dcalc_seeds.in_file_a')]),
        (seedSubflow, renameMasks, [('afni3Dcalc_seeds.out_file', 'in_file'),
                                    ('selectLabel.out', 'label')]),
        (seedSubflow, renameMasks2, [('selectLabel.out', 'label')]),
        (seedSubflow, renameZscore, [('selectLabel.out', 'label')]),
        (seedSubflow, renameZscore2, [('selectLabel.out', 'label')]),
        (seedSubflow, seed_wf, [('afni3Dcalc_seeds.out_file',
                                 'warpSeedtoFMRI.input_image')])
    ])
    imageDir = makeSupportDir(args['name'], "images")
    if args['plot']:
        registration.write_graph(dotfilename=os.path.join(
            imageDir, 'register.dot'),
                                 graph2use='orig',
                                 format='png',
                                 simple_form=False)
        if preprocessOn:
            preprocessing.write_graph(dotfilename=os.path.join(
                imageDir, 'preprocess.dot'),
                                      graph2use='orig',
                                      format='png',
                                      simple_form=False)
            nuisance.write_graph(dotfilename=os.path.join(
                imageDir, 'nuisance.dot'),
                                 graph2use='orig',
                                 format='png',
                                 simple_form=False)
        seedSubflow.write_graph(dotfilename=os.path.join(imageDir, 'seed.dot'),
                                graph2use='orig',
                                format='png',
                                simple_form=False)
        master.write_graph(dotfilename=os.path.join(imageDir, 'master.dot'),
                           graph2use="orig",
                           format='png',
                           simple_form=False)
    elif args['debug']:
        try:
            master.run(updatehash=True)
            # Run restingState on the all threads
            # Setup environment for CPU load balancing of ITK based programs.
            # --------
            # import multiprocessing
            # total_CPUS = 10  # multiprocessing.cpu_count()
            # master.run(plugin='MultiProc', plugin_args={'n_proc': total_CPUS})  #, updatehash=True)
            # --------
            # Run restingState on the local cluster
            # master.run(plugin='SGE', plugin_args={'template': os.path.join(os.getcwd(), 'ENV/bin/activate'),
            #                                        'qsub_args': '-S /bin/bash -cwd'})  #, updatehash=True)
        except:
            pass
        master.name = "master"  # HACK: Bug in Graphviz for nodes beginning with numbers
        master.write_graph(dotfilename=os.path.join(imageDir,
                                                    'debug_hier.dot'),
                           graph2use="colored",
                           format='png')
        master.write_graph(dotfilename=os.path.join(imageDir,
                                                    'debug_orig.dot'),
                           graph2use="flat",
                           format='png')
    else:
        import multiprocessing
        total_CPUS = multiprocessing.cpu_count()
        master.run(plugin='MultiProc',
                   plugin_args={'n_proc': total_CPUS})  #, updatehash=True)
    return 0