Esempio n. 1
0
 def __init__(self, in_files=['path'], **options):
     import nipype.interfaces.spm as spm
     smooth = spm.Smooth()
     smooth.inputs.in_files = in_files
     for ef in options:
         setattr(smooth.inputs, ef, options[ef])
     self.res = smooth.run()
Esempio n. 2
0
def spm_smooth(infiles, fwhm=8):
    startdir = os.getcwd()
    basedir = os.path.split(infiles[0])[0]
    os.chdir(basedir)
    smth = spm.Smooth(matlab_cmd='matlab-spm8')
    smth.inputs.in_files = infiles
    smth.inputs.fwhm = fwhm
    #smth.inputs.ignore_exception = True
    sout = smth.run()
    os.chdir(startdir)
    return sout
Esempio n. 3
0
    def __init__(self, experiment_dir, output_dir, working_dir, func_source,
                 struct_source, datasink):
        self.experiment_dir = experiment_dir
        self.output_dir = output_dir
        self.working_dir = working_dir

        # specify input and output nodes
        self.func_source = func_source
        self.struct_source = struct_source
        self.datasink = datasink

        # specify workflow instance
        self.workflow = pe.Workflow(name='workflow')

        # specify nodes
        self.realign = pe.Node(interface=spm.Realign(), name='realign')

        self.coregister = pe.Node(interface=spm.Coregister(),
                                  name="coregister")
        self.coregister.inputs.jobtype = 'estimate'

        self.segment = pe.Node(interface=spm.Segment(), name="segment")

        self.normalize_func = pe.Node(interface=spm.Normalize(),
                                      name="normalize_func")
        self.normalize_func.inputs.jobtype = "write"

        self.normalize_struc = pe.Node(interface=spm.Normalize(),
                                       name="normalize_struc")
        self.normalize_struc.inputs.jobtype = "write"

        self.smooth = pe.Node(interface=spm.Smooth(), name="smooth")

        # connect the nodes to complete the workflow
        self.workflow.connect([
            (self.func_source, self.realign, [('outfiles', 'in_files')]),
            (self.struct_source, self.coregister, [('outfiles', 'source')]),
            (self.realign, self.coregister, [('mean_image', 'target')]),
            (self.coregister, self.segment, [('coregistered_source', 'data')]),
            (self.segment, self.normalize_func, [('transformation_mat',
                                                  'parameter_file')]),
            (self.realign, self.normalize_func, [('realigned_files',
                                                  'apply_to_files')]),
            (self.normalize_func, self.smooth, [('normalized_files',
                                                 'in_files')]),
            #(self.realign, self.datasink, [('realigned_files', 'realign')]),
            #(self.realign, self.datasink, [('mean_image', 'mean')]),
            (self.normalize_func, self.datasink, [('normalized_files', 'norm')]
             ),
            (self.smooth, self.datasink, [('smoothed_files', 'smooth')])
        ])
Esempio n. 4
0
def test_spm(name='test_spm_3d'):
    """
    A simple workflow to test SPM's installation. By default will split the 4D volume in
    time-steps.
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']),
                        name='inputnode')
    dgr = pe.Node(nio.DataGrabber(template="feeds/data/fmri.nii.gz",
                                  outfields=['out_file'],
                                  sort_filelist=False),
                  name='datasource')

    stc = pe.Node(spm.SliceTiming(num_slices=21,
                                  time_repetition=1.0,
                                  time_acquisition=2. - 2. / 32,
                                  slice_order=list(range(21, 0, -1)),
                                  ref_slice=10),
                  name='stc')
    realign_estimate = pe.Node(spm.Realign(jobtype='estimate'),
                               name='realign_estimate')
    realign_write = pe.Node(spm.Realign(jobtype='write'), name='realign_write')
    realign_estwrite = pe.Node(spm.Realign(jobtype='estwrite'),
                               name='realign_estwrite')
    smooth = pe.Node(spm.Smooth(fwhm=[6, 6, 6]), name='smooth')

    if name == 'test_spm_3d':
        split = pe.Node(fsl.Split(dimension="t", output_type="NIFTI"),
                        name="split")
        workflow.connect([(dgr, split, [(('out_file', _get_first), 'in_file')
                                        ]),
                          (split, stc, [("out_files", "in_files")])])
    elif name == 'test_spm_4d':
        gunzip = pe.Node(Gunzip(), name="gunzip")
        workflow.connect([(dgr, gunzip, [(('out_file', _get_first), 'in_file')
                                         ]),
                          (gunzip, stc, [("out_file", "in_files")])])
    else:
        raise NotImplementedError(
            'No implementation of the test workflow \'{}\' was found'.format(
                name))

    workflow.connect([
        (inputnode, dgr, [('in_data', 'base_directory')]),
        (stc, realign_estimate, [('timecorrected_files', 'in_files')]),
        (realign_estimate, realign_write, [('modified_in_files', 'in_files')]),
        (stc, realign_estwrite, [('timecorrected_files', 'in_files')]),
        (realign_write, smooth, [('realigned_files', 'in_files')])
    ])
    return workflow
Esempio n. 5
0
    def __init__(self):
        super(Smooth, self).__init__()
        self.requirement = ['matlab', 'spm']

        # Inputs description
        in_files_desc = 'List of files to smooth. A list of items which are an existing, uncompressed file (valid extensions: [.img, .nii, .hdr]).'
        fwhm_desc = 'Full-width at half maximum (FWHM) of the Gaussian smoothing kernel in mm. A list of 3 items which are a float of fwhm for each dimension.'
        data_type_desc = 'Data type of the output images (an integer [int or long]).'
        implicit_masking_desc = 'A mask implied by a particular voxel value (a boolean).'
        out_prefix_desc = 'Specify  the string to be prepended to the filenames of the smoothed image file(s) (a string).'

        # Outputs description
        smoothed_files_desc = 'The smoothed files (a list of items which are an existing file name).'

        # Input traits
        self.add_trait(
            "in_files",
            InputMultiPath(ImageFileSPM(),
                           copyfile=False,
                           output=False,
                           desc=in_files_desc))
        self.add_trait(
            "fwhm",
            traits.List([6, 6, 6], output=False, optional=True,
                        desc=fwhm_desc))

        self.add_trait(
            "data_type",
            traits.Int(output=False, optional=True, desc=data_type_desc))

        self.add_trait(
            "implicit_masking",
            traits.Bool(output=False,
                        optional=True,
                        desc=implicit_masking_desc))

        self.add_trait(
            "out_prefix",
            traits.String('s',
                          usedefault=True,
                          output=False,
                          optional=True,
                          desc=out_prefix_desc))

        # Output traits
        self.add_trait(
            "smoothed_files",
            OutputMultiPath(File(), output=True, desc=smoothed_files_desc))

        self.process = spm.Smooth()
        self.change_dir = True
Esempio n. 6
0
def test_smooth():
    yield assert_equal, spm.Smooth._jobtype, 'spatial'
    yield assert_equal, spm.Smooth._jobname, 'smooth'
    input_map = dict(
        data_type=dict(field='dtype', ),
        fwhm=dict(field='fwhm', ),
        in_files=dict(
            copyfile=False,
            mandatory=True,
            field='data',
        ),
    )
    instance = spm.Smooth()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
Esempio n. 7
0
def smooth_images(write_dir, **template_dict):
    """This function runs smoothing on input images. Ex: modulated images"""
    from nipype.interfaces import spm
    from nipype.interfaces.io import DataSink
    smooth = pe.Node(interface=spm.Smooth(), name='smooth')
    smooth.inputs.paths = template_dict['spm_path']
    smooth.inputs.implicit_masking = template_dict['implicit_masking']
    smooth.inputs.in_files = glob.glob(os.path.join(write_dir, 'mwc*.nii'))
    smooth.inputs.fwhm = template_dict['FWHM_SMOOTH']
    vbm_smooth_modulated_images = pe.Workflow(
        name="vbm_smooth_modulated_images")
    datasink = pe.Node(interface=DataSink(), name='datasink')
    datasink.inputs.base_directory = write_dir
    vbm_smooth_modulated_images.connect([(smooth, datasink, [('smoothed_files',
                                                              write_dir)])])
    with stdchannel_redirected(sys.stderr, os.devnull):
        vbm_smooth_modulated_images.run()
Esempio n. 8
0
    def analysis_steps(self):
        self.analysis = type('', (), {})()
        # Get files
        subj_list = [
            subj.split('_')[:-1] for subj in next(os.walk(self.proj_dir))[1]
        ]
        # TODO limit the subj_list to those without sw processed files.

        # for parallelization by subject, use idnetityInterface
        self.analysis.infosource = Node(
            IdentityInterface(fields=['subj_id', 'task']), name="infosource")
        self.analysis.infosource.iterables = [('subject_id', subj_list),
                                              ('task', self.task_names)]

        templates = {
            'anat': '{subj_id}/t1/{subj_id}_t1*.nii',
            'func': '{subj_id}/{task}*/{subj_id}_{task}*.nii'
        }
        self.analysis.sf = Node(SelectFiles(templates), name='selectfiles')
        self.analysis.sf.inputs.base_directory = self.proj_dir

        # Realign
        self.analysis.realign = Node(spm.Realign(register_to_mean=True,
                                                 fwhm=self.opts.fwhm),
                                     name='realign')

        # Coregister
        self.analysis.coreg = Node(spm.Coregister(), name='coregistration')
        # Normalize
        self.analysis.norm12 = Node(spm.Normalize12(
            bias_regularization=1e-05, affine_regularization_type='mni'),
                                    name='normalize')

        #Smooth
        self.analysis.smooth = Node(spm.Smooth(), name='smooth')
        #smooth.inputs.in_files = 'functional.nii'
        self.analysis.smooth.inputs.fwhm = self.opts.smooth_fwhm
Esempio n. 9
0
    def build_core_nodes(self):
        """Build and connect an output node to the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.spm.utils as spmutils
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from nipype.interfaces.petpvc import PETPVC

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        from .pet_volume_utils import (
            apply_binary_mask,
            atlas_statistics,
            create_binary_mask,
            create_pvc_mask,
            get_from_list,
            init_input_node,
            normalize_to_reference,
            pet_pvc_name,
        )

        if spm_standalone_is_available():
            use_spm_standalone()

        # Initialize pipeline
        # ===================
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=["pet_nii"],
                output_names=["pet_nii"],
                function=init_input_node,
            ),
            name="init_pipeline",
        )

        # Unzipping
        # =========
        unzip_pet_image = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_pet_image",
        )

        unzip_t1_image_native = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_t1_image_native",
        )

        unzip_flow_fields = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_flow_fields",
        )

        unzip_dartel_template = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_dartel_template",
        )

        unzip_reference_mask = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_reference_mask",
        )

        unzip_mask_tissues = npe.MapNode(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="unzip_mask_tissues",
            iterfield=["in_file"],
        )

        # Coregister PET into T1 native space
        # ===================================
        coreg_pet_t1 = npe.Node(spm.Coregister(), name="coreg_pet_t1")

        # Spatially normalize PET into MNI
        # ================================
        dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name="dartel_mni_reg")
        dartel_mni_reg.inputs.modulate = False
        dartel_mni_reg.inputs.fwhm = 0

        # Reslice reference region mask into PET
        # ======================================
        reslice = npe.Node(spmutils.Reslice(), name="reslice")

        # Normalize PET values according to reference region
        # ==================================================
        norm_to_ref = npe.Node(
            nutil.Function(
                input_names=["pet_image", "region_mask"],
                output_names=["suvr_pet_path"],
                function=normalize_to_reference,
            ),
            name="norm_to_ref",
        )

        # Create binary mask from segmented tissues
        # =========================================
        binary_mask = npe.Node(
            nutil.Function(
                input_names=["tissues", "threshold"],
                output_names=["out_mask"],
                function=create_binary_mask,
            ),
            name="binary_mask",
        )
        binary_mask.inputs.threshold = self.parameters["mask_threshold"]

        # Mask PET image
        # ==============
        apply_mask = npe.Node(
            nutil.Function(
                input_names=["image", "binary_mask"],
                output_names=["masked_image_path"],
                function=apply_binary_mask,
            ),
            name="apply_mask",
        )

        # Smoothing
        # =========
        if self.parameters["smooth"] is not None and len(
                self.parameters["smooth"]) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name="smoothing_node",
                                         iterfield=["fwhm", "out_prefix"])
            smoothing_node.inputs.fwhm = [[x, x, x]
                                          for x in self.parameters["smooth"]]
            smoothing_node.inputs.out_prefix = [
                "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"]
            ]
            # fmt: off
            self.connect([
                (apply_mask, smoothing_node, [("masked_image_path", "in_files")
                                              ]),
                (smoothing_node, self.output_node,
                 [("smoothed_files", "pet_suvr_masked_smoothed")]),
            ])
            # fmt: on
        else:
            self.output_node.inputs.pet_suvr_masked_smoothed = [[]]

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(
            nutil.Function(
                input_names=["in_image", "in_atlas_list"],
                output_names=["atlas_statistics"],
                function=atlas_statistics,
            ),
            name="atlas_stats_node",
            iterfield=["in_image"],
        )
        atlas_stats_node.inputs.in_atlas_list = self.parameters["atlases"]

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, init_node, [("pet_image", "pet_nii")]),
            (init_node, unzip_pet_image, [("pet_nii", "in_file")]),
            (self.input_node, unzip_t1_image_native, [("t1_image_native",
                                                       "in_file")]),
            (self.input_node, unzip_flow_fields, [("flow_fields", "in_file")]),
            (self.input_node, unzip_dartel_template, [("dartel_template",
                                                       "in_file")]),
            (self.input_node, unzip_reference_mask, [("reference_mask",
                                                      "in_file")]),
            (self.input_node, unzip_mask_tissues, [("mask_tissues", "in_file")
                                                   ]),
            (unzip_pet_image, coreg_pet_t1, [("out_file", "source")]),
            (unzip_t1_image_native, coreg_pet_t1, [("out_file", "target")]),
            (unzip_flow_fields, dartel_mni_reg, [("out_file",
                                                  "flowfield_files")]),
            (unzip_dartel_template, dartel_mni_reg, [("out_file",
                                                      "template_file")]),
            (unzip_reference_mask, reslice, [("out_file", "in_file")]),
            (unzip_mask_tissues, binary_mask, [("out_file", "tissues")]),
            (coreg_pet_t1, dartel_mni_reg, [("coregistered_source",
                                             "apply_to_files")]),
            (dartel_mni_reg, reslice, [("normalized_files", "space_defining")
                                       ]),
            (dartel_mni_reg, norm_to_ref, [("normalized_files", "pet_image")]),
            (reslice, norm_to_ref, [("out_file", "region_mask")]),
            (norm_to_ref, apply_mask, [("suvr_pet_path", "image")]),
            (binary_mask, apply_mask, [("out_mask", "binary_mask")]),
            (norm_to_ref, atlas_stats_node, [("suvr_pet_path", "in_image")]),
            (coreg_pet_t1, self.output_node, [("coregistered_source",
                                               "pet_t1_native")]),
            (dartel_mni_reg, self.output_node, [("normalized_files", "pet_mni")
                                                ]),
            (norm_to_ref, self.output_node, [("suvr_pet_path", "pet_suvr")]),
            (binary_mask, self.output_node, [("out_mask", "binary_mask")]),
            (apply_mask, self.output_node, [("masked_image_path",
                                             "pet_suvr_masked")]),
            (atlas_stats_node, self.output_node, [("atlas_statistics",
                                                   "atlas_statistics")]),
        ])
        # fmt: on

        # PVC
        # ==========
        if self.parameters["apply_pvc"]:
            # Unzipping
            # =========
            unzip_pvc_mask_tissues = npe.MapNode(
                nutil.Function(
                    input_names=["in_file"],
                    output_names=["out_file"],
                    function=unzip_nii,
                ),
                name="unzip_pvc_mask_tissues",
                iterfield=["in_file"],
            )

            # Creating Mask to use in PVC
            # ===========================
            pvc_mask = npe.Node(
                nutil.Function(
                    input_names=["tissues"],
                    output_names=["out_mask"],
                    function=create_pvc_mask,
                ),
                name="pvc_mask",
            )
            # PET PVC
            # =======
            petpvc = npe.Node(PETPVC(), name="pvc")
            petpvc.inputs.pvc = "RBV"
            petpvc.inputs.out_file = "pvc.nii"

            # Spatially normalize PET into MNI
            # ================================
            dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(),
                                          name="dartel_mni_reg_pvc")
            dartel_mni_reg_pvc.inputs.modulate = False
            dartel_mni_reg_pvc.inputs.fwhm = 0

            # Reslice reference region mask into PET
            # ======================================
            reslice_pvc = npe.Node(spmutils.Reslice(), name="reslice_pvc")

            # Normalize PET values according to reference region
            # ==================================================
            norm_to_ref_pvc = npe.Node(
                nutil.Function(
                    input_names=["pet_image", "region_mask"],
                    output_names=["suvr_pet_path"],
                    function=normalize_to_reference,
                ),
                name="norm_to_ref_pvc",
            )

            # Mask PET image
            # ==============
            apply_mask_pvc = npe.Node(
                nutil.Function(
                    input_names=["image", "binary_mask"],
                    output_names=["masked_image_path"],
                    function=apply_binary_mask,
                ),
                name="apply_mask_pvc",
            )
            # Smoothing
            # =========
            if (self.parameters["smooth"] is not None
                    and len(self.parameters["smooth"]) > 0):
                smoothing_pvc = npe.MapNode(spm.Smooth(),
                                            name="smoothing_pvc",
                                            iterfield=["fwhm", "out_prefix"])
                smoothing_pvc.inputs.fwhm = [[x, x, x]
                                             for x in self.parameters["smooth"]
                                             ]
                smoothing_pvc.inputs.out_prefix = [
                    "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"]
                ]
                # fmt: off
                self.connect([
                    (apply_mask_pvc, smoothing_pvc, [("masked_image_path",
                                                      "in_files")]),
                    (smoothing_pvc, self.output_node,
                     [("smoothed_files", "pet_pvc_suvr_masked_smoothed")]),
                ])
                # fmt: on
            else:
                self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
            # Atlas Statistics
            # ================
            atlas_stats_pvc = npe.MapNode(
                nutil.Function(
                    input_names=["in_image", "in_atlas_list"],
                    output_names=["atlas_statistics"],
                    function=atlas_statistics,
                ),
                name="atlas_stats_pvc",
                iterfield=["in_image"],
            )
            atlas_stats_pvc.inputs.in_atlas_list = self.parameters["atlases"]

            # Connection
            # ==========
            # fmt: off
            self.connect([
                (self.input_node, unzip_pvc_mask_tissues, [("pvc_mask_tissues",
                                                            "in_file")]),
                (unzip_pvc_mask_tissues, pvc_mask, [("out_file", "tissues")]),
                (unzip_flow_fields, dartel_mni_reg_pvc, [("out_file",
                                                          "flowfield_files")]),
                (unzip_dartel_template, dartel_mni_reg_pvc,
                 [("out_file", "template_file")]),
                (unzip_reference_mask, reslice_pvc, [("out_file", "in_file")]),
                (coreg_pet_t1, petpvc, [("coregistered_source", "in_file"),
                                        (("coregistered_source", pet_pvc_name,
                                          "RBV"), "out_file")]),
                (pvc_mask, petpvc, [("out_mask", "mask_file")]),
                (self.input_node, petpvc,
                 [(("psf", get_from_list, 0), "fwhm_x"),
                  (("psf", get_from_list, 1), "fwhm_y"),
                  (("psf", get_from_list, 2), "fwhm_z")]),
                (petpvc, dartel_mni_reg_pvc, [("out_file", "apply_to_files")]),
                (dartel_mni_reg_pvc, reslice_pvc, [("normalized_files",
                                                    "space_defining")]),
                (dartel_mni_reg_pvc, norm_to_ref_pvc, [("normalized_files",
                                                        "pet_image")]),
                (reslice_pvc, norm_to_ref_pvc, [("out_file", "region_mask")]),
                (norm_to_ref_pvc, apply_mask_pvc, [("suvr_pet_path", "image")
                                                   ]),
                (binary_mask, apply_mask_pvc, [("out_mask", "binary_mask")]),
                (norm_to_ref_pvc, atlas_stats_pvc, [("suvr_pet_path",
                                                     "in_image")]),
                (petpvc, self.output_node, [("out_file", "pet_pvc")]),
                (dartel_mni_reg_pvc, self.output_node, [("normalized_files",
                                                         "pet_pvc_mni")]),
                (norm_to_ref_pvc, self.output_node, [("suvr_pet_path",
                                                      "pet_pvc_suvr")]),
                (apply_mask_pvc, self.output_node, [("masked_image_path",
                                                     "pet_pvc_suvr_masked")]),
                (atlas_stats_pvc, self.output_node,
                 [("atlas_statistics", "pvc_atlas_statistics")]),
            ])
            # fmt: on
        else:
            self.output_node.inputs.pet_pvc = [[]]
            self.output_node.inputs.pet_pvc_mni = [[]]
            self.output_node.inputs.pet_pvc_suvr = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked = [[]]
            self.output_node.inputs.pvc_atlas_statistics = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
Esempio n. 10
0
body registration of the functional data to the structural data.
"""

coregister = pe.Node(spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estimate'
"""Warp functional and structural data to SPM's T1 template using
:class:`nipype.interfaces.spm.Normalize`.  The tutorial data set
includes the template image, T1.nii.
"""

normalize = pe.Node(spm.Normalize(), name="normalize")
"""Smooth the functional data using
:class:`nipype.interfaces.spm.Smooth`.
"""

smooth = pe.Node(spm.Smooth(), name="smooth")
fwhmlist = [4]
smooth.iterables = ('fwhm', fwhmlist)

preproc.connect([
    (inputnode, normalize, [(('in_data', _template_path), 'template')]),
    (realign, coregister, [('mean_image', 'source'),
                           ('realigned_files', 'apply_to_files')]),
    (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
    (normalize, smooth, [('normalized_files', 'in_files')]),
    (normalize, skullstrip, [('normalized_source', 'in_file')]),
    (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    (normalize, art, [('normalized_files', 'realigned_files')]),
    (skullstrip, art, [('mask_file', 'mask_file')]),
])
"""
Esempio n. 11
0
iter_fwhm = pe.Node(
    interface=util.IdentityInterface(fields=["fwhm"]), name="iter_fwhm")
iter_fwhm.iterables = [('fwhm', [4, 8])]

iter_smoothing_method = pe.Node(
    interface=util.IdentityInterface(fields=["smoothing_method"]),
    name="iter_smoothing_method")
iter_smoothing_method.iterables = [('smoothing_method', [
    'isotropic_voxel', 'anisotropic_voxel', 'isotropic_surface'
])]

realign = pe.Node(interface=spm.Realign(), name="realign")
realign.inputs.register_to_mean = True

isotropic_voxel_smooth = pe.Node(
    interface=spm.Smooth(), name="isotropic_voxel_smooth")
preprocessing.connect(realign, "realigned_files", isotropic_voxel_smooth,
                      "in_files")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_voxel_smooth, "fwhm")

compute_mask = pe.Node(interface=nipy.ComputeMask(), name="compute_mask")
preprocessing.connect(realign, "mean_image", compute_mask, "mean_volume")

anisotropic_voxel_smooth = fsl_wf.create_susan_smooth(
    name="anisotropic_voxel_smooth", separate_masks=False)
anisotropic_voxel_smooth.inputs.smooth.output_type = 'NIFTI'
preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth,
                      "inputnode.in_files")
preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth,
                      "inputnode.fwhm")
preprocessing.connect(compute_mask, "brain_mask", anisotropic_voxel_smooth,
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.io import unzip_nii
        from clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils import prepare_flowfields, join_smoothed_files, atlas_statistics, select_gm_images

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd,
                                              use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError(
                    'MCR_HOME variable not in environnement. Althought, ' +
                    'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print(
                        'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                        + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                    )
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    tissue_map = os.path.join(
                        str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM standalone version not supported. Please upgrade SPM standalone.'
                    )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # =========
        unzip_tissues_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_tissues_node',
                                         iterfield=['in_file'])
        unzip_flowfields_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                            name='unzip_flowfields_node',
                                            iterfield=['in_file'])
        unzip_template_node = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                       name='unzip_template_node')

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(
            spm.DARTELNorm2MNI(),
            name='dartel2MNI',
            iterfield=['apply_to_files', 'flowfield_files'])

        if self.parameters['bounding_box'] is not None:
            dartel2mni_node.inputs.bounding_box = self.parameters[
                'bounding_box']
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size']
        dartel2mni_node.inputs.modulate = self.parameters['modulation']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['fwhm'] is not None and len(
                self.parameters['fwhm']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [
                ('fwhm', [[x, x, x] for x in self.parameters['fwhm']]),
                ('out_prefix',
                 ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])
            ]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(
                interface=nutil.Function(
                    input_names=['smoothed_normalized_files'],
                    output_names=['smoothed_normalized_files'],
                    function=join_smoothed_files),
                joinsource='smoothing_node',
                joinfield='smoothed_normalized_files',
                name='join_smoothing_node')
            self.connect([(dartel2mni_node, smoothing_node,
                           [('normalized_files', 'in_files')]),
                          (smoothing_node, join_smoothing_node,
                           [('smoothed_files', 'smoothed_normalized_files')]),
                          (join_smoothing_node, self.output_node,
                           [('smoothed_normalized_files',
                             'smoothed_normalized_files')])])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(
            input_names=['in_image', 'in_atlas_list'],
            output_names=['atlas_statistics'],
            function=atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list']

        # Connection
        # ==========
        self.connect([(self.input_node, unzip_tissues_node, [('apply_to_files',
                                                              'in_file')]),
                      (self.input_node, unzip_flowfields_node,
                       [('flowfield_files', 'in_file')]),
                      (self.input_node, unzip_template_node, [('template_file',
                                                               'in_file')]),
                      (unzip_tissues_node, dartel2mni_node,
                       [('out_file', 'apply_to_files')]),
                      (unzip_flowfields_node, dartel2mni_node,
                       [(('out_file', prepare_flowfields,
                          self.parameters['tissues']), 'flowfield_files')]),
                      (unzip_template_node, dartel2mni_node,
                       [('out_file', 'template_file')]),
                      (dartel2mni_node, self.output_node,
                       [('normalized_files', 'normalized_files')]),
                      (dartel2mni_node, atlas_stats_node,
                       [(('normalized_files', select_gm_images), 'in_image')]),
                      (atlas_stats_node, self.output_node,
                       [('atlas_statistics', 'atlas_statistics')])])
#         roi_files.append(roi_file)  
#     return roi_files

        
# extract = pe.Node(util.Function(
#         input_names = ['in_files'],
#         function = extract_all, output_names = ['roi_files']),
#         name = 'extract')
        
extract = pe.MapNode(fsl.ExtractROI(), name="extract", iterfield = ['in_file'])
extract.inputs.t_min = del_scan
extract.inputs.t_size = -1
extract.inputs.output_type='NIFTI'

# smoothing
smooth = Node(spm.Smooth(), name="smooth", fwhm = fwhm)

# set contrasts, depend on the condition
cond_names = ['Med_amb', 'Med_ambxMed_amb_reward_prob^1', 'Med_risk', 'Med_riskxMed_risk_reward_prob^1',
              'Mon_amb', 'Mon_ambxMon_amb_reward_prob^1', 'Mon_risk', 'Mon_riskxMon_risk_reward_prob^1']

# general activation
cont1 = ['Med_Amb', 'T', cond_names, [1, 0, 0, 0, 0, 0, 0, 0]]
cont2 = ['Med_Risk', 'T', cond_names, [0, 0, 1, 0, 0, 0, 0, 0]]
cont3 = ['Med_Amb>Risk', 'T', cond_names, [1, 0, -1, 0, 0, 0, 0, 0]]

cont4 = ['Mon_Amb', 'T', cond_names, [0, 0, 0, 0, 1, 0, 0, 0]]
cont5 = ['Mon_Risk', 'T', cond_names, [0, 0, 0, 0, 0, 0, 1, 0]]
cont6 = ['Mon_Amb>Risk', 'T', cond_names, [0, 0, 0, 0, 1, 0, -1, 0]]

cont7 = ['Med>Mon_Amb', 'T', cond_names, [1, 0, 0, 0, -1, 0, 0, 0]]
Esempio n. 14
0
preprocessing = pe.Workflow(name="preprocessing")

iter_fwhm = pe.Node(interface=util.IdentityInterface(fields=["fwhm"]),
                    name="iter_fwhm")
iter_fwhm.iterables = [('fwhm', [4, 8])]

iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]),
                                name="iter_smoothing_method")
iter_smoothing_method.iterables = [('smoothing_method', ['isotropic_voxel',
                                                         'anisotropic_voxel',
                                                         'isotropic_surface'])]

realign = pe.Node(interface=spm.Realign(), name="realign")
realign.inputs.register_to_mean = True

isotropic_voxel_smooth = pe.Node(interface=spm.Smooth(),
                                 name="isotropic_voxel_smooth")
preprocessing.connect(realign, "realigned_files", isotropic_voxel_smooth,
                      "in_files")
preprocessing.connect(iter_fwhm, "fwhm", isotropic_voxel_smooth, "fwhm")

compute_mask = pe.Node(interface=nipy.ComputeMask(), name="compute_mask")
preprocessing.connect(realign, "mean_image", compute_mask, "mean_volume")

anisotropic_voxel_smooth = fsl_wf.create_susan_smooth(name="anisotropic_voxel_smooth",
                                                      separate_masks=False)
anisotropic_voxel_smooth.inputs.smooth.output_type = 'NIFTI'
preprocessing.connect(realign, "realigned_files", anisotropic_voxel_smooth,
                      "inputnode.in_files")
preprocessing.connect(iter_fwhm, "fwhm", anisotropic_voxel_smooth,
                      "inputnode.fwhm")
Esempio n. 15
0
def create_spm_preproc(c, name='preproc'):
    """Create an spm preprocessing workflow with freesurfer registration and
artifact detection.

The workflow realigns and smooths and registers the functional images with
the subject's freesurfer space.

Example
-------

>>> preproc = create_spm_preproc()
>>> preproc.base_dir = '.'
>>> preproc.inputs.inputspec.fwhm = 6
>>> preproc.inputs.inputspec.subject_id = 's1'
>>> preproc.inputs.inputspec.subjects_dir = '.'
>>> preproc.inputs.inputspec.functionals = ['f3.nii', 'f5.nii']
>>> preproc.inputs.inputspec.norm_threshold = 1
>>> preproc.inputs.inputspec.zintensity_threshold = 3

Inputs::

inputspec.functionals : functional runs use 4d nifti
inputspec.subject_id : freesurfer subject id
inputspec.subjects_dir : freesurfer subjects dir
inputspec.fwhm : smoothing fwhm
inputspec.norm_threshold : norm threshold for outliers
inputspec.zintensity_threshold : intensity threshold in z-score

Outputs::

outputspec.realignment_parameters : realignment parameter files
outputspec.smoothed_files : smoothed functional files
outputspec.outlier_files : list of outliers
outputspec.outlier_stats : statistics of outliers
outputspec.outlier_plots : images of outliers
outputspec.mask_file : binary mask file in reference image space
outputspec.reg_file : registration file that maps reference image to
freesurfer space
outputspec.reg_cost : cost of registration (useful for detecting misalignment)
"""
    from nipype.workflows.smri.freesurfer.utils import create_getmask_flow
    import nipype.algorithms.rapidart as ra
    import nipype.interfaces.spm as spm
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    """
Initialize the workflow
"""

    workflow = pe.Workflow(name=name)
    """
Define the inputs to this workflow
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'functionals', 'subject_id', 'subjects_dir', 'fwhm', 'norm_threshold',
        'zintensity_threshold', 'tr', 'do_slicetime', 'sliceorder', 'node',
        'csf_prob', 'wm_prob', 'gm_prob'
    ]),
                        name='inputspec')
    """
Setup the processing nodes and create the mask generation and coregistration
workflow
"""

    poplist = lambda x: x.pop()
    #realign = pe.Node(spm.Realign(), name='realign')

    sym_func = pe.Node(niu.Function(input_names=['in_file'],
                                    output_names=['out_link'],
                                    function=do_symlink),
                       name='func_symlink')

    realign = pe.Node(niu.Function(
        input_names=['node', 'in_file', 'tr', 'do_slicetime', 'sliceorder'],
        output_names=['out_file', 'par_file'],
        function=mod_realign),
                      name="mod_realign")

    mean = art_mean_workflow()
    workflow.connect(realign, 'out_file', mean, 'inputspec.realigned_files')
    workflow.connect(realign, 'par_file', mean,
                     'inputspec.realignment_parameters')
    mean.inputs.inputspec.parameter_source = 'FSL'  # Modular realign puts it in FSL format for consistency

    #workflow.connect(inputnode, 'functionals', realign, 'in_file')
    workflow.connect(inputnode, 'functionals', sym_func, 'in_file')
    workflow.connect(sym_func, 'out_link', realign, 'in_file')

    workflow.connect(inputnode, 'tr', realign, 'tr')
    workflow.connect(inputnode, 'do_slicetime', realign, 'do_slicetime')
    workflow.connect(inputnode, 'sliceorder', realign, 'sliceorder')
    workflow.connect(inputnode, 'node', realign, 'node')

    maskflow = create_getmask_flow()
    workflow.connect([(inputnode, maskflow,
                       [('subject_id', 'inputspec.subject_id'),
                        ('subjects_dir', 'inputspec.subjects_dir')])])
    maskflow.inputs.inputspec.contrast_type = 't2'
    workflow.connect(mean, 'outputspec.mean_image', maskflow,
                     'inputspec.source_file')
    smooth = pe.Node(spm.Smooth(), name='smooth')

    normalize = pe.Node(spm.Normalize(jobtype='write'), name='normalize')
    normalize_struct = normalize.clone('normalize_struct')
    segment = pe.Node(spm.Segment(csf_output_type=[True, True, False],
                                  gm_output_type=[True, True, False],
                                  wm_output_type=[True, True, False]),
                      name='segment')

    mergefunc = lambda in1, in2, in3: [in1, in2, in3]

    # merge = pe.Node(niu.Merge(),name='merge')
    merge = pe.Node(niu.Function(input_names=['in1', 'in2', 'in3'],
                                 output_names=['out'],
                                 function=mergefunc),
                    name='merge')
    workflow.connect(inputnode, 'csf_prob', merge, 'in3')
    workflow.connect(inputnode, 'wm_prob', merge, 'in2')
    workflow.connect(inputnode, 'gm_prob', merge, 'in1')

    #workflow.connect(merge,'out', segment,'tissue_prob_maps')

    sym_prob = sym_func.clone('sym_prob')
    workflow.connect(merge, 'out', sym_prob, 'in_file')
    workflow.connect(sym_prob, 'out_link', segment, 'tissue_prob_maps')

    workflow.connect(maskflow, ('outputspec.mask_file', pickfirst), segment,
                     'mask_image')
    workflow.connect(inputnode, 'fwhm', smooth, 'fwhm')

    #sym_brain = sym_func.clone('sym_brain')
    #workflow.connect(realign, 'mean_image', normalize, 'source')
    #workflow.connect(maskflow,'fssource.brain',segment,'data')
    fssource = maskflow.get_node('fssource')
    import nipype.interfaces.freesurfer as fs
    convert_brain = pe.Node(interface=fs.ApplyVolTransform(inverse=True),
                            name='convert')
    workflow.connect(fssource, 'brain', convert_brain, 'target_file')
    workflow.connect(maskflow, ('outputspec.reg_file', pickfirst),
                     convert_brain, 'reg_file')
    workflow.connect(mean, 'outputspec.mean_image', convert_brain,
                     'source_file')
    convert2nii = pe.Node(fs.MRIConvert(in_type='mgz', out_type='nii'),
                          name='convert2nii')
    workflow.connect(convert_brain, 'transformed_file', convert2nii, 'in_file')
    workflow.connect(convert2nii, 'out_file', segment, 'data')

    workflow.connect(segment, 'transformation_mat', normalize,
                     'parameter_file')
    workflow.connect(segment, 'transformation_mat', normalize_struct,
                     'parameter_file')
    workflow.connect(convert2nii, 'out_file', normalize_struct,
                     'apply_to_files')
    workflow.connect(realign, 'out_file', normalize, 'apply_to_files')
    #normalize.inputs.template='/software/spm8/templates/EPI.nii'
    workflow.connect(normalize, 'normalized_files', smooth, 'in_files')
    #workflow.connect(realign, 'realigned_files', smooth, 'in_files')

    artdetect = pe.Node(ra.ArtifactDetect(mask_type='file',
                                          parameter_source='FSL',
                                          use_differences=[True, False],
                                          use_norm=True,
                                          save_plot=True),
                        name='artdetect')
    workflow.connect([(inputnode, artdetect,
                       [('norm_threshold', 'norm_threshold'),
                        ('zintensity_threshold', 'zintensity_threshold')])])
    workflow.connect([(realign, artdetect, [('out_file', 'realigned_files'),
                                            ('par_file',
                                             'realignment_parameters')])])
    workflow.connect(maskflow, ('outputspec.mask_file', poplist), artdetect,
                     'mask_file')
    """
Define the outputs of the workflow and connect the nodes to the outputnode
"""

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "realignment_parameters", "smoothed_files", "mask_file", "mean_image",
        "reg_file", "reg_cost", 'outlier_files', 'outlier_stats',
        'outlier_plots', 'norm_components', 'mod_csf', 'unmod_csf', 'mod_wm',
        'unmod_wm', 'mod_gm', 'unmod_gm', 'mean', 'normalized_struct',
        'struct_in_functional_space', 'normalization_parameters',
        'reverse_normalize_parameters'
    ]),
                         name="outputspec")
    workflow.connect([
        (maskflow, outputnode, [("outputspec.reg_file", "reg_file")]),
        (maskflow, outputnode, [("outputspec.reg_cost", "reg_cost")]),
        (maskflow, outputnode, [(("outputspec.mask_file", poplist),
                                 "mask_file")]),
        (realign, outputnode, [('par_file', 'realignment_parameters')]),
        (smooth, outputnode, [('smoothed_files', 'smoothed_files')]),
        (artdetect, outputnode, [('outlier_files', 'outlier_files'),
                                 ('statistic_files', 'outlier_stats'),
                                 ('plot_files', 'outlier_plots'),
                                 ('norm_files', 'norm_components')])
    ])
    workflow.connect(segment, 'modulated_csf_image', outputnode, 'mod_csf')
    workflow.connect(segment, 'modulated_wm_image', outputnode, 'mod_wm')
    workflow.connect(segment, 'modulated_gm_image', outputnode, 'mod_gm')
    workflow.connect(segment, 'normalized_csf_image', outputnode, 'unmod_csf')
    workflow.connect(segment, 'normalized_wm_image', outputnode, 'unmod_wm')
    workflow.connect(segment, 'normalized_gm_image', outputnode, 'unmod_gm')
    workflow.connect(mean, 'outputspec.mean_image', outputnode, 'mean')
    workflow.connect(normalize_struct, 'normalized_files', outputnode,
                     'normalized_struct')
    workflow.connect(segment, 'transformation_mat', outputnode,
                     'normalization_parameters')
    workflow.connect(segment, 'inverse_transformation_mat', outputnode,
                     'reverse_normalize_parameters')
    workflow.connect(convert2nii, 'out_file', outputnode,
                     'struct_in_functional_space')

    workflow.inputs.inputspec.fwhm = c.fwhm
    workflow.inputs.inputspec.subjects_dir = c.surf_dir
    workflow.inputs.inputspec.norm_threshold = c.norm_thresh
    workflow.inputs.inputspec.zintensity_threshold = c.z_thresh
    workflow.inputs.inputspec.node = c.motion_correct_node
    workflow.inputs.inputspec.tr = c.TR
    workflow.inputs.inputspec.do_slicetime = c.do_slicetiming
    workflow.inputs.inputspec.sliceorder = c.SliceOrder
    workflow.inputs.inputspec.csf_prob = c.csf_prob
    workflow.inputs.inputspec.gm_prob = c.grey_prob
    workflow.inputs.inputspec.wm_prob = c.white_prob
    workflow.base_dir = c.working_dir
    workflow.config = {'execution': {'crashdump_dir': c.crash_dir}}

    datagrabber = get_dataflow(c)

    workflow.connect(datagrabber, 'func', inputnode, 'functionals')

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id']),
                         name='subject_names')
    if not c.test_mode:
        infosource.iterables = ('subject_id', c.subjects)
    else:
        infosource.iterables = ('subject_id', c.subjects[:1])

    workflow.connect(infosource, 'subject_id', inputnode, 'subject_id')
    workflow.connect(infosource, 'subject_id', datagrabber, 'subject_id')
    sub = lambda x: [('_subject_id_%s' % x, '')]

    sinker = pe.Node(nio.DataSink(), name='sinker')
    workflow.connect(infosource, 'subject_id', sinker, 'container')
    workflow.connect(infosource, ('subject_id', sub), sinker, 'substitutions')
    sinker.inputs.base_directory = c.sink_dir
    outputspec = workflow.get_node('outputspec')
    workflow.connect(outputspec, 'realignment_parameters', sinker,
                     'spm_preproc.realignment_parameters')
    workflow.connect(outputspec, 'smoothed_files', sinker,
                     'spm_preproc.smoothed_outputs')
    workflow.connect(outputspec, 'outlier_files', sinker,
                     'spm_preproc.art.@outlier_files')
    workflow.connect(outputspec, 'outlier_stats', sinker,
                     'spm_preproc.art.@outlier_stats')
    workflow.connect(outputspec, 'outlier_plots', sinker,
                     'spm_preproc.art.@outlier_plots')
    workflow.connect(outputspec, 'norm_components', sinker,
                     'spm_preproc.art.@norm')
    workflow.connect(outputspec, 'reg_file', sinker,
                     'spm_preproc.bbreg.@reg_file')
    workflow.connect(outputspec, 'reg_cost', sinker,
                     'spm_preproc.bbreg.@reg_cost')
    workflow.connect(outputspec, 'mask_file', sinker,
                     'spm_preproc.mask.@mask_file')
    workflow.connect(outputspec, 'mod_csf', sinker,
                     'spm_preproc.segment.mod.@csf')
    workflow.connect(outputspec, 'mod_wm', sinker,
                     'spm_preproc.segment.mod.@wm')
    workflow.connect(outputspec, 'mod_gm', sinker,
                     'spm_preproc.segment.mod.@gm')
    workflow.connect(outputspec, 'unmod_csf', sinker,
                     'spm_preproc.segment.unmod.@csf')
    workflow.connect(outputspec, 'unmod_wm', sinker,
                     'spm_preproc.segment.unmod.@wm')
    workflow.connect(outputspec, 'unmod_gm', sinker,
                     'spm_preproc.segment.unmod.@gm')
    workflow.connect(outputspec, 'mean', sinker, 'spm_preproc.mean')
    workflow.connect(outputspec, 'normalized_struct', sinker,
                     'spm_preproc.normalized_struct')
    workflow.connect(outputspec, 'normalization_parameters', sinker,
                     'spm_preproc.normalization_parameters.@forward')
    workflow.connect(outputspec, 'reverse_normalize_parameters', sinker,
                     'spm_preproc.normalization_parameters.@reverse')
    workflow.connect(outputspec, 'struct_in_functional_space', sinker,
                     'spm_preproc.struct_in_func_space')

    return workflow
Esempio n. 16
0
def create_spm_preproc_func_pipeline(data_dir=None,
                                     subject_id=None,
                                     task_list=None):

    ###############################
    ## Set up Nodes
    ###############################

    ds = Node(nio.DataGrabber(infields=['subject_id', 'task_id'],
                              outfields=['func', 'struc']),
              name='datasource')
    ds.inputs.base_directory = os.path.abspath(data_dir + '/' + subject_id)
    ds.inputs.template = '*'
    ds.inputs.sort_filelist = True
    ds.inputs.template_args = {'func': [['task_id']], 'struc': []}
    ds.inputs.field_template = {
        'func': 'Functional/Raw/%s/func.nii',
        'struc': 'Structural/SPGR/spgr.nii'
    }
    ds.inputs.subject_id = subject_id
    ds.inputs.task_id = task_list
    ds.iterables = ('task_id', task_list)
    # ds.run().outputs #show datafiles

    # #Setup Data Sinker for writing output files
    # datasink = Node(nio.DataSink(), name='sinker')
    # datasink.inputs.base_directory = '/path/to/output'
    # workflow.connect(realigner, 'realignment_parameters', datasink, 'motion.@par')
    # datasink.inputs.substitutions = [('_variable', 'variable'),('file_subject_', '')]

    #Get Timing Acquisition for slice timing
    tr = 2
    ta = Node(interface=util.Function(input_names=['tr', 'n_slices'],
                                      output_names=['ta'],
                                      function=get_ta),
              name="ta")
    ta.inputs.tr = tr

    #Slice Timing: sequential ascending
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.time_repetition = tr
    slice_timing.inputs.ref_slice = 1

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ra.ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    #Coregister - 12 parameters, cost function = 'nmi', fwhm 7, interpolate, don't mask
    #anatomical to functional mean across all available data.
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estimate'

    # Segment structural, gray/white/csf,mni,
    segment = Node(interface=spm.Segment(), name="segment")
    segment.inputs.save_bias_corrected = True

    #Normalize - structural to MNI - then apply this to the coregistered functionals
    normalize = Node(interface=spm.Normalize(), name="normalize")
    normalize.inputs.template = os.path.abspath(t1_template_file)

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    fwhmlist = [0, 5, 8]
    smooth.iterables = ('fwhm', fwhmlist)

    #Create Covariate matrix
    make_covariates = Node(interface=Create_Covariates(),
                           name="make_covariates")

    ###############################
    ## Create Pipeline
    ###############################

    Preprocessed = Workflow(name="Preprocessed")
    Preprocessed.base_dir = os.path.abspath(data_dir + '/' + subject_id +
                                            '/Functional')

    Preprocessed.connect([
        (ds, ta, [(('func', get_n_slices), "n_slices")]),
        (ta, slice_timing, [("ta", "time_acquisition")]),
        (ds, slice_timing, [
            ('func', 'in_files'),
            (('func', get_n_slices), "num_slices"),
            (('func', get_slice_order), "slice_order"),
        ]),
        (slice_timing, realign, [('timecorrected_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (realign, coregister, [('mean_image', 'target')]),
        (ds, coregister, [('struc', 'source')]),
        (coregister, segment, [('coregistered_source', 'data')]),
        (segment, normalize, [
            ('transformation_mat', 'parameter_file'),
            ('bias_corrected_image', 'source'),
        ]),
        (realign, normalize, [('realigned_files', 'apply_to_files'),
                              (('realigned_files', get_vox_dims),
                               'write_voxel_sizes')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
        (realign, art, [('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_covariates, [('realignment_parameters',
                                     'realignment_parameters')]),
        (art, make_covariates, [('outlier_files', 'spike_id')]),
    ])
    return Preprocessed
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.filemanip import unzip_nii
        from ..t1_volume_dartel2mni import t1_volume_dartel2mni_utils as dartel2mni_utils

        # Unzipping
        # =========
        unzip_tissues_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_tissues_node',
                                         iterfield=['in_file'])
        unzip_flowfields_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                            name='unzip_flowfields_node',
                                            iterfield=['in_file'])
        unzip_template_node = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                       name='unzip_template_node')

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(
            spm.DARTELNorm2MNI(),
            name='dartel2MNI',
            iterfield=['apply_to_files', 'flowfield_files'])
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = tuple(
                self.parameters['voxel_size'])
        dartel2mni_node.inputs.modulate = self.parameters['modulate']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['smooth'] is not None and len(
                self.parameters['smooth']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [
                ('fwhm', [[x, x, x] for x in self.parameters['smooth']]),
                ('out_prefix',
                 ['fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']])
            ]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(
                interface=nutil.Function(
                    input_names=['smoothed_normalized_files'],
                    output_names=['smoothed_normalized_files'],
                    function=dartel2mni_utils.join_smoothed_files),
                joinsource='smoothing_node',
                joinfield='smoothed_normalized_files',
                name='join_smoothing_node')
            self.connect([(dartel2mni_node, smoothing_node,
                           [('normalized_files', 'in_files')]),
                          (smoothing_node, join_smoothing_node,
                           [('smoothed_files', 'smoothed_normalized_files')]),
                          (join_smoothing_node, self.output_node,
                           [('smoothed_normalized_files',
                             'smoothed_normalized_files')])])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Connection
        # ==========
        self.connect([(self.input_node, unzip_tissues_node,
                       [('native_segmentations', 'in_file')]),
                      (self.input_node, unzip_flowfields_node,
                       [('flowfield_files', 'in_file')]),
                      (self.input_node, unzip_template_node, [('template_file',
                                                               'in_file')]),
                      (unzip_tissues_node, dartel2mni_node,
                       [('out_file', 'apply_to_files')]),
                      (unzip_flowfields_node, dartel2mni_node,
                       [(('out_file', dartel2mni_utils.prepare_flowfields,
                          self.parameters['tissues']), 'flowfield_files')]),
                      (unzip_template_node, dartel2mni_node,
                       [('out_file', 'template_file')]),
                      (dartel2mni_node, self.output_node,
                       [('normalized_files', 'normalized_files')])])
Esempio n. 18
0
def create_spm_preproc(name='preproc'):
    """Create an spm preprocessing workflow with freesurfer registration and
    artifact detection.

    The workflow realigns and smooths and registers the functional images with
    the subject's freesurfer space.

    Example
    -------

    >>> preproc = create_spm_preproc()
    >>> preproc.base_dir = '.'
    >>> preproc.inputs.inputspec.fwhm = 6
    >>> preproc.inputs.inputspec.subject_id = 's1'
    >>> preproc.inputs.inputspec.subjects_dir = '.'
    >>> preproc.inputs.inputspec.functionals = ['f3.nii', 'f5.nii']
    >>> preproc.inputs.inputspec.norm_threshold = 1
    >>> preproc.inputs.inputspec.zintensity_threshold = 3

    Inputs::

         inputspec.functionals : functional runs use 4d nifti
         inputspec.subject_id : freesurfer subject id
         inputspec.subjects_dir : freesurfer subjects dir
         inputspec.fwhm : smoothing fwhm
         inputspec.norm_threshold : norm threshold for outliers
         inputspec.zintensity_threshold : intensity threshold in z-score

    Outputs::

         outputspec.realignment_parameters : realignment parameter files
         outputspec.smoothed_files : smoothed functional files
         outputspec.outlier_files : list of outliers
         outputspec.outlier_stats : statistics of outliers
         outputspec.outlier_plots : images of outliers
         outputspec.mask_file : binary mask file in reference image space
         outputspec.reg_file : registration file that maps reference image to
                                 freesurfer space
         outputspec.reg_cost : cost of registration (useful for detecting misalignment)
    """
    """
    Initialize the workflow
    """

    workflow = pe.Workflow(name=name)
    """
    Define the inputs to this workflow
    """

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'functionals', 'subject_id', 'subjects_dir', 'fwhm', 'norm_threshold',
        'zintensity_threshold'
    ]),
                        name='inputspec')
    """
    Setup the processing nodes and create the mask generation and coregistration
    workflow
    """

    poplist = lambda x: x.pop()
    realign = pe.Node(spm.Realign(), name='realign')
    workflow.connect(inputnode, 'functionals', realign, 'in_files')
    maskflow = create_getmask_flow()
    workflow.connect([(inputnode, maskflow,
                       [('subject_id', 'inputspec.subject_id'),
                        ('subjects_dir', 'inputspec.subjects_dir')])])
    maskflow.inputs.inputspec.contrast_type = 't2'
    workflow.connect(realign, 'mean_image', maskflow, 'inputspec.source_file')
    smooth = pe.Node(spm.Smooth(), name='smooth')
    workflow.connect(inputnode, 'fwhm', smooth, 'fwhm')
    workflow.connect(realign, 'realigned_files', smooth, 'in_files')
    artdetect = pe.Node(ra.ArtifactDetect(mask_type='file',
                                          parameter_source='SPM',
                                          use_differences=[True, False],
                                          use_norm=True,
                                          save_plot=True),
                        name='artdetect')
    workflow.connect([(inputnode, artdetect,
                       [('norm_threshold', 'norm_threshold'),
                        ('zintensity_threshold', 'zintensity_threshold')])])
    workflow.connect([(realign, artdetect, [
        ('realigned_files', 'realigned_files'),
        ('realignment_parameters', 'realignment_parameters')
    ])])
    workflow.connect(maskflow, ('outputspec.mask_file', poplist), artdetect,
                     'mask_file')
    """
    Define the outputs of the workflow and connect the nodes to the outputnode
    """

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "realignment_parameters", "smoothed_files", "mask_file", "reg_file",
        "reg_cost", 'outlier_files', 'outlier_stats', 'outlier_plots'
    ]),
                         name="outputspec")
    workflow.connect([
        (maskflow, outputnode, [("outputspec.reg_file", "reg_file")]),
        (maskflow, outputnode, [("outputspec.reg_cost", "reg_cost")]),
        (maskflow, outputnode, [(("outputspec.mask_file", poplist),
                                 "mask_file")]),
        (realign, outputnode, [('realignment_parameters',
                                'realignment_parameters')]),
        (smooth, outputnode, [('smoothed_files', 'smoothed_files')]),
        (artdetect, outputnode, [('outlier_files', 'outlier_files'),
                                 ('statistic_files', 'outlier_stats'),
                                 ('plot_files', 'outlier_plots')])
    ])
    return workflow
Esempio n. 19
0
# contrasts of interest
contrasts_of_interest = pe.Node(niu.Function(inputnames=['subject_id',
                                                         'conditions'],
                                             output_names=['contrasts'],
                                             function=_specify_contrast),
                                name='contrasts_of_interest')
contrasts_of_interest.inputs.conditions = CONDITIONS
workflow.connect(infosource, 'subject_id', contrasts_of_interest, 'subject_id')

# fmri model specifications
unzip_source = pe.MapNode(misc.Gunzip(),
                          iterfield=['in_file'],
                          name='unzip_source')
workflow.connect(datasource, 'func', unzip_source, 'in_file')

smooth = pe.Node(interface=spm.Smooth(fwhm=[8, 8, 8]),
                    name='smooth')
workflow.connect(unzip_source, 'out_file', smooth, 'in_files')

modelspec = pe.Node(interface=modelgen.SpecifySPMModel(),
                    name='modelspec')
modelspec.inputs.input_units = 'secs'
modelspec.inputs.output_units = 'secs'
modelspec.inputs.time_repetition = TR
modelspec.inputs.high_pass_filter_cutoff = HIGHPASS_CUTOFF
workflow.connect(get_session_informations, 'informations', modelspec, 'subject_info')
workflow.connect(smooth, 'smoothed_files', modelspec, 'functional_runs')

# merge runs's masks
merge_masks = pe.Node(interface=fsl.Merge(dimension='t'),
                     name='merge_masks')
def create_spm_preproc(c, name='preproc'):
    """
"""

    from nipype.workflows.smri.freesurfer.utils import create_getmask_flow
    import nipype.algorithms.rapidart as ra
    import nipype.interfaces.spm as spm
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    import nipype.interfaces.freesurfer as fs

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'functionals', 'subject_id', 'subjects_dir', 'fwhm', 'norm_threshold',
        'zintensity_threshold', 'tr', 'do_slicetime', 'sliceorder',
        'parameters', 'node', 'csf_prob', 'wm_prob', 'gm_prob'
    ]),
                        name='inputspec')

    poplist = lambda x: x.pop()

    sym_func = pe.Node(niu.Function(input_names=['in_file'],
                                    output_names=['out_link'],
                                    function=do_symlink),
                       name='func_symlink')

    # REALIGN

    realign = pe.Node(niu.Function(
        input_names=[
            'node', 'in_file', 'tr', 'do_slicetime', 'sliceorder', 'parameters'
        ],
        output_names=['out_file', 'par_file', 'parameter_source'],
        function=mod_realign),
                      name="mod_realign")
    workflow.connect(inputnode, 'parameters', realign, 'parameters')
    workflow.connect(inputnode, 'functionals', realign, 'in_file')
    workflow.connect(inputnode, 'tr', realign, 'tr')
    workflow.connect(inputnode, 'do_slicetime', realign, 'do_slicetime')
    workflow.connect(inputnode, 'sliceorder', realign, 'sliceorder')
    workflow.connect(inputnode, 'node', realign, 'node')

    # TAKE MEAN IMAGE

    mean = art_mean_workflow()
    workflow.connect(realign, 'out_file', mean, 'inputspec.realigned_files')
    workflow.connect(realign, 'par_file', mean,
                     'inputspec.realignment_parameters')
    workflow.connect(realign, 'parameter_source', mean,
                     'inputspec.parameter_source')

    # CREATE BRAIN MASK

    maskflow = create_getmask_flow()
    workflow.connect([(inputnode, maskflow,
                       [('subject_id', 'inputspec.subject_id'),
                        ('subjects_dir', 'inputspec.subjects_dir')])])
    maskflow.inputs.inputspec.contrast_type = 't2'
    workflow.connect(mean, 'outputspec.mean_image', maskflow,
                     'inputspec.source_file')

    # SEGMENT

    segment = pe.Node(spm.Segment(csf_output_type=[True, True, False],
                                  gm_output_type=[True, True, False],
                                  wm_output_type=[True, True, False]),
                      name='segment')
    mergefunc = lambda in1, in2, in3: [in1, in2, in3]

    merge = pe.Node(niu.Function(input_names=['in1', 'in2', 'in3'],
                                 output_names=['out'],
                                 function=mergefunc),
                    name='merge')
    workflow.connect(inputnode, 'csf_prob', merge, 'in3')
    workflow.connect(inputnode, 'wm_prob', merge, 'in2')
    workflow.connect(inputnode, 'gm_prob', merge, 'in1')

    sym_prob = sym_func
    workflow.connect(merge, 'out', sym_prob, 'in_file')
    workflow.connect(sym_prob, 'out_link', segment, 'tissue_prob_maps')

    xform_mask = pe.Node(fs.ApplyVolTransform(fs_target=True),
                         name='transform_mask')
    workflow.connect(maskflow, ('outputspec.reg_file', pickfirst), xform_mask,
                     'reg_file')
    workflow.connect(maskflow, ('outputspec.mask_file', pickfirst), xform_mask,
                     'source_file')
    workflow.connect(xform_mask, "transformed_file", segment, 'mask_image')

    fssource = maskflow.get_node('fssource')
    convert2nii = pe.Node(fs.MRIConvert(in_type='mgz', out_type='nii'),
                          name='convert2nii')
    workflow.connect(fssource, 'brain', convert2nii, 'in_file')
    workflow.connect(convert2nii, 'out_file', segment, 'data')

    # NORMALIZE

    normalize = pe.MapNode(spm.Normalize(jobtype='write'),
                           name='normalize',
                           iterfield=['apply_to_files'])
    normalize_struct = normalize.clone('normalize_struct')
    normalize_mask = normalize.clone('normalize_mask')

    workflow.connect(segment, 'transformation_mat', normalize,
                     'parameter_file')
    workflow.connect(segment, 'transformation_mat', normalize_mask,
                     'parameter_file')
    workflow.connect(segment, 'transformation_mat', normalize_struct,
                     'parameter_file')
    workflow.connect(convert2nii, 'out_file', normalize_struct,
                     'apply_to_files')
    workflow.connect(xform_mask, "transformed_file", normalize_mask,
                     'apply_to_files')

    xform_image = pe.MapNode(fs.ApplyVolTransform(fs_target=True),
                             name='xform_image',
                             iterfield=['source_file'])
    workflow.connect(maskflow, ('outputspec.reg_file', pickfirst), xform_image,
                     'reg_file')
    workflow.connect(realign, 'out_file', xform_image, "source_file")
    workflow.connect(xform_image, "transformed_file", normalize,
                     "apply_to_files")

    #SMOOTH

    smooth = pe.Node(spm.Smooth(), name='smooth')

    workflow.connect(inputnode, 'fwhm', smooth, 'fwhm')
    workflow.connect(normalize, 'normalized_files', smooth, 'in_files')

    # ART

    artdetect = pe.Node(ra.ArtifactDetect(mask_type='file',
                                          use_differences=[True, False],
                                          use_norm=True,
                                          save_plot=True),
                        name='artdetect')
    workflow.connect(realign, 'parameter_source', artdetect,
                     'parameter_source')
    workflow.connect([(inputnode, artdetect,
                       [('norm_threshold', 'norm_threshold'),
                        ('zintensity_threshold', 'zintensity_threshold')])])
    workflow.connect([(realign, artdetect, [('out_file', 'realigned_files'),
                                            ('par_file',
                                             'realignment_parameters')])])
    workflow.connect(maskflow, ('outputspec.mask_file', poplist), artdetect,
                     'mask_file')

    # OUTPUTS

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "realignment_parameters", "smoothed_files", "mask_file", "mean_image",
        "reg_file", "reg_cost", 'outlier_files', 'outlier_stats',
        'outlier_plots', 'norm_components', 'mod_csf', 'unmod_csf', 'mod_wm',
        'unmod_wm', 'mod_gm', 'unmod_gm', 'mean', 'normalized_struct',
        'normalization_parameters', 'reverse_normalize_parameters'
    ]),
                         name="outputspec")
    workflow.connect([
        (maskflow, outputnode, [("outputspec.reg_file", "reg_file")]),
        (maskflow, outputnode, [("outputspec.reg_cost", "reg_cost")]),
        (realign, outputnode, [('par_file', 'realignment_parameters')]),
        (smooth, outputnode, [('smoothed_files', 'smoothed_files')]),
        (artdetect, outputnode, [('outlier_files', 'outlier_files'),
                                 ('statistic_files', 'outlier_stats'),
                                 ('plot_files', 'outlier_plots'),
                                 ('norm_files', 'norm_components')])
    ])
    workflow.connect(normalize_mask, "normalized_files", outputnode,
                     "mask_file")
    workflow.connect(segment, 'modulated_csf_image', outputnode, 'mod_csf')
    workflow.connect(segment, 'modulated_wm_image', outputnode, 'mod_wm')
    workflow.connect(segment, 'modulated_gm_image', outputnode, 'mod_gm')
    workflow.connect(segment, 'normalized_csf_image', outputnode, 'unmod_csf')
    workflow.connect(segment, 'normalized_wm_image', outputnode, 'unmod_wm')
    workflow.connect(segment, 'normalized_gm_image', outputnode, 'unmod_gm')
    workflow.connect(mean, 'outputspec.mean_image', outputnode, 'mean')
    workflow.connect(normalize_struct, 'normalized_files', outputnode,
                     'normalized_struct')
    workflow.connect(segment, 'transformation_mat', outputnode,
                     'normalization_parameters')
    workflow.connect(segment, 'inverse_transformation_mat', outputnode,
                     'reverse_normalize_parameters')

    # CONNECT TO CONFIG

    workflow.inputs.inputspec.fwhm = c.fwhm
    workflow.inputs.inputspec.subjects_dir = c.surf_dir
    workflow.inputs.inputspec.norm_threshold = c.norm_thresh
    workflow.inputs.inputspec.zintensity_threshold = c.z_thresh
    workflow.inputs.inputspec.node = c.motion_correct_node
    workflow.inputs.inputspec.tr = c.TR
    workflow.inputs.inputspec.do_slicetime = c.do_slicetiming
    workflow.inputs.inputspec.sliceorder = c.SliceOrder
    workflow.inputs.inputspec.csf_prob = c.csf_prob
    workflow.inputs.inputspec.gm_prob = c.grey_prob
    workflow.inputs.inputspec.wm_prob = c.white_prob
    workflow.inputs.inputspec.parameters = {"order": c.order}
    workflow.base_dir = c.working_dir
    workflow.config = {'execution': {'crashdump_dir': c.crash_dir}}

    datagrabber = get_dataflow(c)

    workflow.connect(datagrabber, 'func', inputnode, 'functionals')

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id']),
                         name='subject_names')
    if not c.test_mode:
        infosource.iterables = ('subject_id', c.subjects)
    else:
        infosource.iterables = ('subject_id', c.subjects[:1])

    workflow.connect(infosource, 'subject_id', inputnode, 'subject_id')
    workflow.connect(infosource, 'subject_id', datagrabber, 'subject_id')
    sub = lambda x: [('_subject_id_%s' % x, '')]

    sinker = pe.Node(nio.DataSink(), name='sinker')
    workflow.connect(infosource, 'subject_id', sinker, 'container')
    workflow.connect(infosource, ('subject_id', sub), sinker, 'substitutions')
    sinker.inputs.base_directory = c.sink_dir
    outputspec = workflow.get_node('outputspec')
    workflow.connect(outputspec, 'realignment_parameters', sinker,
                     'spm_preproc.realignment_parameters')
    workflow.connect(outputspec, 'smoothed_files', sinker,
                     'spm_preproc.smoothed_outputs')
    workflow.connect(outputspec, 'outlier_files', sinker,
                     'spm_preproc.art.@outlier_files')
    workflow.connect(outputspec, 'outlier_stats', sinker,
                     'spm_preproc.art.@outlier_stats')
    workflow.connect(outputspec, 'outlier_plots', sinker,
                     'spm_preproc.art.@outlier_plots')
    workflow.connect(outputspec, 'norm_components', sinker,
                     'spm_preproc.art.@norm')
    workflow.connect(outputspec, 'reg_file', sinker,
                     'spm_preproc.bbreg.@reg_file')
    workflow.connect(outputspec, 'reg_cost', sinker,
                     'spm_preproc.bbreg.@reg_cost')
    workflow.connect(outputspec, 'mask_file', sinker,
                     'spm_preproc.mask.@mask_file')
    workflow.connect(outputspec, 'mod_csf', sinker,
                     'spm_preproc.segment.mod.@csf')
    workflow.connect(outputspec, 'mod_wm', sinker,
                     'spm_preproc.segment.mod.@wm')
    workflow.connect(outputspec, 'mod_gm', sinker,
                     'spm_preproc.segment.mod.@gm')
    workflow.connect(outputspec, 'unmod_csf', sinker,
                     'spm_preproc.segment.unmod.@csf')
    workflow.connect(outputspec, 'unmod_wm', sinker,
                     'spm_preproc.segment.unmod.@wm')
    workflow.connect(outputspec, 'unmod_gm', sinker,
                     'spm_preproc.segment.unmod.@gm')
    workflow.connect(outputspec, 'mean', sinker, 'spm_preproc.mean')
    workflow.connect(outputspec, 'normalized_struct', sinker,
                     'spm_preproc.normalized_struct')
    workflow.connect(outputspec, 'normalization_parameters', sinker,
                     'spm_preproc.normalization_parameters.@forward')
    workflow.connect(outputspec, 'reverse_normalize_parameters', sinker,
                     'spm_preproc.normalization_parameters.@reverse')

    return workflow
Esempio n. 21
0
    def build_core_nodes(self):
        """Build and connect an output node to the pipeline."""
        import nipype.interfaces.spm as spm
        import nipype.interfaces.spm.utils as spmutils
        from nipype.interfaces.petpvc import PETPVC
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone
        import clinica.pipelines.pet_volume.pet_volume_utils as utils

        if spm_standalone_is_available():
            use_spm_standalone()

        # Initialize pipeline
        # ===================
        init_node = npe.Node(interface=nutil.Function(
            input_names=['pet_nii'],
            output_names=['pet_nii'],
            function=utils.init_input_node),
                             name='init_pipeline')

        # Unzipping
        # =========
        unzip_pet_image = npe.Node(nutil.Function(input_names=['in_file'],
                                                  output_names=['out_file'],
                                                  function=unzip_nii),
                                   name='unzip_pet_image')

        unzip_t1_image_native = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_t1_image_native')

        unzip_flow_fields = npe.Node(nutil.Function(input_names=['in_file'],
                                                    output_names=['out_file'],
                                                    function=unzip_nii),
                                     name='unzip_flow_fields')

        unzip_dartel_template = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_dartel_template')

        unzip_reference_mask = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                        name='unzip_reference_mask')

        unzip_mask_tissues = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_mask_tissues',
                                         iterfield=['in_file'])

        # Coregister PET into T1 native space
        # ===================================
        coreg_pet_t1 = npe.Node(spm.Coregister(), name='coreg_pet_t1')

        # Spatially normalize PET into MNI
        # ================================
        dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name='dartel_mni_reg')
        dartel_mni_reg.inputs.modulate = False
        dartel_mni_reg.inputs.fwhm = 0

        # Reslice reference region mask into PET
        # ======================================
        reslice = npe.Node(spmutils.Reslice(), name='reslice')

        # Normalize PET values according to reference region
        # ==================================================
        norm_to_ref = npe.Node(nutil.Function(
            input_names=['pet_image', 'region_mask'],
            output_names=['suvr_pet_path'],
            function=utils.normalize_to_reference),
                               name='norm_to_ref')

        # Create binary mask from segmented tissues
        # =========================================
        binary_mask = npe.Node(nutil.Function(
            input_names=['tissues', 'threshold'],
            output_names=['out_mask'],
            function=utils.create_binary_mask),
                               name='binary_mask')
        binary_mask.inputs.threshold = self.parameters['mask_threshold']

        # Mask PET image
        # ==============
        apply_mask = npe.Node(nutil.Function(
            input_names=['image', 'binary_mask'],
            output_names=['masked_image_path'],
            function=utils.apply_binary_mask),
                              name='apply_mask')

        # Smoothing
        # =========
        if self.parameters['smooth'] is not None and len(
                self.parameters['smooth']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['fwhm', 'out_prefix'])
            smoothing_node.inputs.fwhm = [[x, x, x]
                                          for x in self.parameters['smooth']]
            smoothing_node.inputs.out_prefix = [
                'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']
            ]
            self.connect([(apply_mask, smoothing_node, [('masked_image_path',
                                                         'in_files')]),
                          (smoothing_node, self.output_node,
                           [('smoothed_files', 'pet_suvr_masked_smoothed')])])
        else:
            self.output_node.inputs.pet_suvr_masked_smoothed = [[]]

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(
            input_names=['in_image', 'in_atlas_list'],
            output_names=['atlas_statistics'],
            function=utils.atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlases']

        # Connection
        # ==========
        self.connect([
            (self.input_node, init_node, [('pet_image', 'pet_nii')]),
            (init_node, unzip_pet_image, [('pet_nii', 'in_file')]),
            (self.input_node, unzip_t1_image_native, [('t1_image_native',
                                                       'in_file')]),
            (self.input_node, unzip_flow_fields, [('flow_fields', 'in_file')]),
            (self.input_node, unzip_dartel_template, [('dartel_template',
                                                       'in_file')]),
            (self.input_node, unzip_reference_mask, [('reference_mask',
                                                      'in_file')]),
            (self.input_node, unzip_mask_tissues, [('mask_tissues', 'in_file')
                                                   ]),
            (unzip_pet_image, coreg_pet_t1, [('out_file', 'source')]),
            (unzip_t1_image_native, coreg_pet_t1, [('out_file', 'target')]),
            (unzip_flow_fields, dartel_mni_reg, [('out_file',
                                                  'flowfield_files')]),
            (unzip_dartel_template, dartel_mni_reg, [('out_file',
                                                      'template_file')]),
            (unzip_reference_mask, reslice, [('out_file', 'in_file')]),
            (unzip_mask_tissues, binary_mask, [('out_file', 'tissues')]),
            (coreg_pet_t1, dartel_mni_reg, [('coregistered_source',
                                             'apply_to_files')]),
            (dartel_mni_reg, reslice, [('normalized_files', 'space_defining')
                                       ]),
            (dartel_mni_reg, norm_to_ref, [('normalized_files', 'pet_image')]),
            (reslice, norm_to_ref, [('out_file', 'region_mask')]),
            (norm_to_ref, apply_mask, [('suvr_pet_path', 'image')]),
            (binary_mask, apply_mask, [('out_mask', 'binary_mask')]),
            (norm_to_ref, atlas_stats_node, [('suvr_pet_path', 'in_image')]),
            (coreg_pet_t1, self.output_node, [('coregistered_source',
                                               'pet_t1_native')]),
            (dartel_mni_reg, self.output_node, [('normalized_files', 'pet_mni')
                                                ]),
            (norm_to_ref, self.output_node, [('suvr_pet_path', 'pet_suvr')]),
            (binary_mask, self.output_node, [('out_mask', 'binary_mask')]),
            (apply_mask, self.output_node, [('masked_image_path',
                                             'pet_suvr_masked')]),
            (atlas_stats_node, self.output_node, [('atlas_statistics',
                                                   'atlas_statistics')])
        ])

        # PVC
        # ==========
        if self.parameters['apply_pvc']:
            # Unzipping
            # =========
            unzip_pvc_mask_tissues = npe.MapNode(nutil.Function(
                input_names=['in_file'],
                output_names=['out_file'],
                function=unzip_nii),
                                                 name='unzip_pvc_mask_tissues',
                                                 iterfield=['in_file'])

            # Creating Mask to use in PVC
            # ===========================
            pvc_mask = npe.Node(nutil.Function(input_names=['tissues'],
                                               output_names=['out_mask'],
                                               function=utils.create_pvc_mask),
                                name='pvc_mask')
            # PET PVC
            # =======
            petpvc = npe.Node(PETPVC(), name='pvc')
            petpvc.inputs.pvc = 'RBV'
            petpvc.inputs.out_file = 'pvc.nii'

            # Spatially normalize PET into MNI
            # ================================
            dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(),
                                          name='dartel_mni_reg_pvc')
            dartel_mni_reg_pvc.inputs.modulate = False
            dartel_mni_reg_pvc.inputs.fwhm = 0

            # Reslice reference region mask into PET
            # ======================================
            reslice_pvc = npe.Node(spmutils.Reslice(), name='reslice_pvc')

            # Normalize PET values according to reference region
            # ==================================================
            norm_to_ref_pvc = npe.Node(nutil.Function(
                input_names=['pet_image', 'region_mask'],
                output_names=['suvr_pet_path'],
                function=utils.normalize_to_reference),
                                       name='norm_to_ref_pvc')

            # Mask PET image
            # ==============
            apply_mask_pvc = npe.Node(nutil.Function(
                input_names=['image', 'binary_mask'],
                output_names=['masked_image_path'],
                function=utils.apply_binary_mask),
                                      name='apply_mask_pvc')
            # Smoothing
            # =========
            if self.parameters['smooth'] is not None and len(
                    self.parameters['smooth']) > 0:
                smoothing_pvc = npe.MapNode(spm.Smooth(),
                                            name='smoothing_pvc',
                                            iterfield=['fwhm', 'out_prefix'])
                smoothing_pvc.inputs.fwhm = [[x, x, x]
                                             for x in self.parameters['smooth']
                                             ]
                smoothing_pvc.inputs.out_prefix = [
                    'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']
                ]
                self.connect([(apply_mask_pvc, smoothing_pvc,
                               [('masked_image_path', 'in_files')]),
                              (smoothing_pvc, self.output_node,
                               [('smoothed_files',
                                 'pet_pvc_suvr_masked_smoothed')])])
            else:
                self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
            # Atlas Statistics
            # ================
            atlas_stats_pvc = npe.MapNode(nutil.Function(
                input_names=['in_image', 'in_atlas_list'],
                output_names=['atlas_statistics'],
                function=utils.atlas_statistics),
                                          name='atlas_stats_pvc',
                                          iterfield=['in_image'])
            atlas_stats_pvc.inputs.in_atlas_list = self.parameters['atlases']

            # Connection
            # ==========
            self.connect([
                (self.input_node, unzip_pvc_mask_tissues, [('pvc_mask_tissues',
                                                            'in_file')]),
                (unzip_pvc_mask_tissues, pvc_mask, [('out_file', 'tissues')]),
                (unzip_flow_fields, dartel_mni_reg_pvc, [('out_file',
                                                          'flowfield_files')]),
                (unzip_dartel_template, dartel_mni_reg_pvc,
                 [('out_file', 'template_file')]),
                (unzip_reference_mask, reslice_pvc, [('out_file', 'in_file')]),
                (coreg_pet_t1, petpvc, [('coregistered_source', 'in_file'),
                                        (('coregistered_source',
                                          utils.pet_pvc_name, 'RBV'),
                                         'out_file')]),
                (pvc_mask, petpvc, [('out_mask', 'mask_file')]),
                (self.input_node, petpvc,
                 [(('psf', utils.get_from_list, 0), 'fwhm_x'),
                  (('psf', utils.get_from_list, 1), 'fwhm_y'),
                  (('psf', utils.get_from_list, 2), 'fwhm_z')]),
                (petpvc, dartel_mni_reg_pvc, [('out_file', 'apply_to_files')]),
                (dartel_mni_reg_pvc, reslice_pvc, [('normalized_files',
                                                    'space_defining')]),
                (dartel_mni_reg_pvc, norm_to_ref_pvc, [('normalized_files',
                                                        'pet_image')]),
                (reslice_pvc, norm_to_ref_pvc, [('out_file', 'region_mask')]),
                (norm_to_ref_pvc, apply_mask_pvc, [('suvr_pet_path', 'image')
                                                   ]),
                (binary_mask, apply_mask_pvc, [('out_mask', 'binary_mask')]),
                (norm_to_ref_pvc, atlas_stats_pvc, [('suvr_pet_path',
                                                     'in_image')]),
                (petpvc, self.output_node, [('out_file', 'pet_pvc')]),
                (dartel_mni_reg_pvc, self.output_node, [('normalized_files',
                                                         'pet_pvc_mni')]),
                (norm_to_ref_pvc, self.output_node, [('suvr_pet_path',
                                                      'pet_pvc_suvr')]),
                (apply_mask_pvc, self.output_node, [('masked_image_path',
                                                     'pet_pvc_suvr_masked')]),
                (atlas_stats_pvc, self.output_node, [('atlas_statistics',
                                                      'pvc_atlas_statistics')])
            ])
        else:
            self.output_node.inputs.pet_pvc = [[]]
            self.output_node.inputs.pet_pvc_mni = [[]]
            self.output_node.inputs.pet_pvc_suvr = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked = [[]]
            self.output_node.inputs.pvc_atlas_statistics = [[]]
            self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
Esempio n. 22
0
# Start at the slice-time corrected image
base_fname = 'afds114_sub009_t2r1.nii'
structural_fname = 'ds114_sub009_highres.nii'

# Realign
realign = spm.Realign()
realign.inputs.in_files = base_fname
# Do not write resliced files, do write mean image
realign.inputs.write_which = [0, 1]
realign.run()

# Coregistration
coreg = spm.Coregister()
# Coregister structural to mean image from realignment
coreg.inputs.target = 'mean' + base_fname
coreg.inputs.source = structural_fname
coreg.run()

# Normalization / resampling with normalization + realign params
seg_norm = spm.Normalize12()
seg_norm.inputs.image_to_align = structural_fname
seg_norm.inputs.apply_to_files = base_fname
seg_norm.run()

# Smoothing
smooth = spm.Smooth()
smooth.inputs.in_files = 'w' + base_fname
smooth.inputs.fwhm = [8, 8, 8]
smooth.run()
Esempio n. 23
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import platform
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as seg_utils
        import clinica.pipelines.t1_volume_create_dartel.t1_volume_create_dartel_utils as dartel_utils
        import clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils as dartel2mni_utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError('MCR_HOME variable not in environnement. Althought, '
                                       + 'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print('You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                          + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.')
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    if platform.system() == 'Darwin':
                        tissue_map = os.path.join(str(spm_home), 'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii')
                    else:
                        tissue_map = os.path.join(str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM standalone version not supported. Please upgrade SPM standalone.')
        else:
            raise RuntimeError('SPM could not be found. Please verify your SPM_HOME environment variable.')

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node', iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters['affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters['sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters['warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters['write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(tissue_map,
                                                                 self.parameters['tissue_classes'],
                                                                 self.parameters['dartel_tissues'],
                                                                 self.parameters['save_warped_unmodulated'],
                                                                 self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:

            t1_to_mni = npe.MapNode(seg_utils.ApplySegmentationDeformation(),
                                    name='t1_to_mni',
                                    iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field', 'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # DARTEL template
        # ===============================
        dartel_template = npe.Node(spm.DARTEL(),
                                   name='dartel_template')

        if self.parameters['iteration_parameters'] is not None:
            dartel_template.inputs.iteration_parameters = self.parameters['iteration_parameters']
        if self.parameters['optimization_parameters'] is not None:
            dartel_template.inputs.optimization_parameters = self.parameters['optimization_parameters']
        if self.parameters['regularization_form'] is not None:
            dartel_template.inputs.regularization_form = self.parameters['regularization_form']

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(spm.DARTELNorm2MNI(),
                                      name='dartel2MNI',
                                      iterfield=['apply_to_files', 'flowfield_files'])

        if self.parameters['bounding_box'] is not None:
            dartel2mni_node.inputs.bounding_box = self.parameters['bounding_box']
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size']
        dartel2mni_node.inputs.modulate = self.parameters['modulation']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['fwhm'] is not None and len(self.parameters['fwhm']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [('fwhm', [[x, x, x] for x in self.parameters['fwhm']]),
                                        ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(interface=nutil.Function(input_names=['smoothed_normalized_files'],
                                                                        output_names=['smoothed_normalized_files'],
                                                                        function=dartel2mni_utils.join_smoothed_files),
                                               joinsource='smoothing_node',
                                               joinfield='smoothed_normalized_files',
                                               name='join_smoothing_node')
            self.connect([
                (dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]),
                (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]),
                (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])
            ])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(input_names=['in_image',
                                                                   'in_atlas_list'],
                                                      output_names=['atlas_statistics'],
                                                      function=dartel2mni_utils.atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list']

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node, [('bias_corrected_images', 'bias_corrected_images'),
                                             ('bias_field_images', 'bias_field_images'),
                                             ('dartel_input_images', 'dartel_input_images'),
                                             ('forward_deformation_field', 'forward_deformation_field'),
                                             ('inverse_deformation_field', 'inverse_deformation_field'),
                                             ('modulated_class_images', 'modulated_class_images'),
                                             ('native_class_images', 'native_class_images'),
                                             ('normalized_class_images', 'normalized_class_images'),
                                             ('transformation_mat', 'transformation_mat')]),
            (new_segment, dartel_template, [(('dartel_input_images', dartel_utils.get_class_images,
                                              self.parameters['dartel_tissues']), 'image_files')]),
            (dartel_template, self.output_node, [('dartel_flow_fields', 'dartel_flow_fields'),
                                                 ('final_template_file', 'final_template_file'),
                                                 ('template_files', 'template_files')]),
            (new_segment, dartel2mni_node, [(('native_class_images', seg_utils.group_nested_images_by_subject),
                                             'apply_to_files')]),
            (dartel_template, dartel2mni_node, [(('dartel_flow_fields', dartel2mni_utils.prepare_flowfields,
                                                  self.parameters['tissue_classes']), 'flowfield_files')]),
            (dartel_template, dartel2mni_node, [('final_template_file', 'template_file')]),
            (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]),
            (dartel2mni_node, atlas_stats_node, [(('normalized_files', dartel2mni_utils.select_gm_images),
                                                  'in_image')]),
            (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')])
        ])
Esempio n. 24
0
#Slice timing corrected (gets timing from header)
st_corr = Node(spm.SliceTiming(), name='slicetiming_correction')
st_corr.inputs.ref_slice = 1
#Outputs: timecorrected_files

#Realignment using SPM <--- Maybe just estimate and apply all transforms at the end?
realign = Node(spm.Realign(), name='realign')
realign.inputs.register_to_mean = False
realign.inputs.quality = 1.0
#Outputs: realignment_parameters, reliced epi images (motion corrected)

tsnr = Node(misc.TSNR(), name='tsnr')
tsnr.inputs.regress_poly = 2
#Outputs: detrended_file, mean_file, stddev_file, tsnr_file

smooth = Node(spm.Smooth(), name='smooth')
smooth.inputs.fwhm = fwhm

####Anatomical preprocessing####

#dcmstack - Convert dicoms to nii (with embeded metadata)
anat_stack = Node(dcmstack.DcmStack(), name='anatstack')
anat_stack.inputs.embed_meta = True
anat_stack.inputs.out_format = 'anat'
anat_stack.inputs.out_ext = '.nii'
#Outputs: out_file

#Coregisters FLAIR & mask to T1 (NOTE: settings taken from Clinical Toolbox)
flaircoreg = Node(spm.Coregister(), name='coreg2anat')
flaircoreg.inputs.cost_function = 'nmi'
flaircoreg.inputs.separation = [4, 2]
    plt.plot(abs_disp)
    plt.plot(rel_disp)
    plt.legend(['abs', 'rel'])

    plt.savefig('Motion')


Plot_Motion = Node(name = 'Plot_Motion',
                  interface = Function(input_names = ['motion_par','abs_disp','rel_disp'],
                  function = Plot_Motion))

# In[12]:

#-----------------------------------------------------------------------------------------------------
#Use spm smoothin, because, as you know, fsl does not support anisotropic smoothing
Spm_Smoothing = Node(spm.Smooth(), name = 'Smoothing')
#I tried all these kernels and this one is most reasonable one
Spm_Smoothing.inputs.fwhm = [5.75, 5.75, 8]
#Spm_Smoothing.iterables = ('fwhm', [[5,5,8],[5.75,5.75,8],[5.75,5.75,10], [5.75,5.75,16]])

#-----------------------------------------------------------------------------------------------------
#Getting median intensity
Median_Intensity = Node(fsl.ImageStats(), name = 'Median_Intensity')
#Put -k before -p 50
Median_Intensity.inputs.op_string = '-k %s -p 50'

#Scale median intensity 
def Scale_Median_Intensity (median_intensity):
    scaling = 10000/median_intensity
    return scaling
Esempio n. 26
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import fmri_preprocessing_workflows as utils
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.spm as spm
        import nipype.pipeline.engine as npe
        from clinica.utils.filemanip import zip_nii, unzip_nii

        # Zipping
        # =======
        unzip_node = npe.MapNode(name='Unzipping',
                                 iterfield=['in_file'],
                                 interface=nutil.Function(
                                     input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=unzip_nii))

        unzip_T1w = unzip_node.clone('UnzippingT1w')
        unzip_phasediff = unzip_node.clone('UnzippingPhasediff')
        unzip_bold = unzip_node.clone('UnzippingBold')
        unzip_magnitude1 = unzip_node.clone('UnzippingMagnitude1')

        # FieldMap calculation
        # ====================
        if self.parameters['unwarping']:
            fm_node = npe.MapNode(name="FieldMapCalculation",
                                  iterfield=[
                                      'phase', 'magnitude', 'epi', 'et',
                                      'blipdir', 'tert'
                                  ],
                                  interface=spm.FieldMap())

        # Slice timing correction
        # =======================
        st_node = npe.MapNode(name="SliceTimingCorrection",
                              iterfield=[
                                  'in_files', 'time_repetition', 'slice_order',
                                  'num_slices', 'ref_slice', 'time_acquisition'
                              ],
                              interface=spm.SliceTiming())

        # Motion correction and unwarping
        # ===============================

        if self.parameters['unwarping']:
            mc_node = npe.MapNode(name="MotionCorrectionUnwarping",
                                  iterfield=["scans", "pmscan"],
                                  interface=spm.RealignUnwarp())
            mc_node.inputs.register_to_mean = True
            mc_node.inputs.reslice_mask = False
        else:
            mc_node = npe.MapNode(name="MotionCorrection",
                                  iterfield=["in_files"],
                                  interface=spm.Realign())
            mc_node.inputs.register_to_mean = True

        # Brain extraction
        # ================
        import os.path as path
        from nipype.interfaces.freesurfer import MRIConvert
        if self.parameters['freesurfer_brain_mask']:
            brain_masks = [
                path.join(self.caps_directory, 'subjects', self.subjects[i],
                          self.sessions[i], 't1/freesurfer_cross_sectional',
                          self.subjects[i] + '_' + self.sessions[i],
                          'mri/brain.mgz') for i in range(len(self.subjects))
            ]
            conv_brain_masks = [
                str(self.subjects[i] + '_' + self.sessions[i] + '.nii')
                for i in range(len(self.subjects))
            ]
            bet_node = npe.MapNode(interface=MRIConvert(),
                                   iterfield=["in_file", "out_file"],
                                   name="BrainConversion")
            bet_node.inputs.in_file = brain_masks
            bet_node.inputs.out_file = conv_brain_masks
            bet_node.inputs.out_type = 'nii'
        else:
            bet_node = utils.BrainExtractionWorkflow(name="BrainExtraction")

        # Registration
        # ============
        reg_node = npe.MapNode(
            interface=spm.Coregister(),
            iterfield=["apply_to_files", "source", "target"],
            name="Registration")

        # Normalization
        # =============
        norm_node = npe.MapNode(interface=spm.Normalize12(),
                                iterfield=['image_to_align', 'apply_to_files'],
                                name='Normalization')

        # Smoothing
        # =========
        smooth_node = npe.MapNode(interface=spm.Smooth(),
                                  iterfield=['in_files'],
                                  name='Smoothing')
        smooth_node.inputs.fwhm = self.parameters['full_width_at_half_maximum']

        # Zipping
        # =======
        zip_node = npe.MapNode(name='Zipping',
                               iterfield=['in_file'],
                               interface=nutil.Function(
                                   input_names=['in_file'],
                                   output_names=['out_file'],
                                   function=zip_nii))

        zip_bet_node = zip_node.clone('ZippingBET')
        zip_mc_node = zip_node.clone('ZippingMC')
        zip_reg_node = zip_node.clone('ZippingRegistration')
        zip_norm_node = zip_node.clone('ZippingNormalization')
        zip_smooth_node = zip_node.clone('ZippingSmoothing')

        # Connections
        # ===========

        if self.parameters['freesurfer_brain_mask']:
            self.connect([
                # Brain extraction
                (bet_node, reg_node, [('out_file', 'target')]),
                (bet_node, zip_bet_node, [('out_file', 'in_file')]),
            ])
        else:
            self.connect([
                # Brain extraction
                (unzip_T1w, bet_node, [('out_file', 'Segmentation.data')]),
                (unzip_T1w, bet_node, [('out_file', 'ApplyMask.in_file')]),
                (bet_node, reg_node, [('ApplyMask.out_file', 'target')]),
                (bet_node, zip_bet_node, [('Fill.out_file', 'in_file')]),
            ])

        if self.parameters['unwarping']:
            self.connect([
                # FieldMap calculation
                (self.input_node, fm_node, [('et', 'et')]),
                (self.input_node, fm_node, [('blipdir', 'blipdir')]),
                (self.input_node, fm_node, [('tert', 'tert')]),
                (self.input_node, unzip_phasediff, [('phasediff', 'in_file')]),
                (self.input_node, unzip_magnitude1, [('magnitude1', 'in_file')
                                                     ]),
                (unzip_magnitude1, fm_node, [('out_file', 'magnitude')]),
                (unzip_phasediff, fm_node, [('out_file', 'phase')]),
                (unzip_bold, fm_node, [('out_file', 'epi')]),
                # Motion correction and unwarping
                (st_node, mc_node, [('timecorrected_files', 'scans')]),
                (fm_node, mc_node, [('vdm', 'pmscan')]),
                (mc_node, reg_node, [('realigned_unwarped_files',
                                      'apply_to_files')]),
                (mc_node, zip_mc_node, [('realigned_unwarped_files', 'in_file')
                                        ]),
            ])
        else:
            self.connect([
                # Motion correction and unwarping
                (st_node, mc_node, [('timecorrected_files', 'in_files')]),
                (mc_node, reg_node, [('realigned_files', 'apply_to_files')]),
                (mc_node, zip_mc_node, [('realigned_files', 'in_file')]),
            ])
        self.connect([
            # Unzipping
            (self.input_node, unzip_T1w, [('T1w', 'in_file')]),
            (self.input_node, unzip_bold, [('bold', 'in_file')]),
            # Slice timing correction
            (unzip_bold, st_node, [('out_file', 'in_files')]),
            (self.input_node, st_node, [('time_repetition', 'time_repetition')
                                        ]),
            (self.input_node, st_node, [('num_slices', 'num_slices')]),
            (self.input_node, st_node, [('slice_order', 'slice_order')]),
            (self.input_node, st_node, [('ref_slice', 'ref_slice')]),
            (self.input_node, st_node, [('time_acquisition',
                                         'time_acquisition')]),
            # Registration
            (mc_node, reg_node, [('mean_image', 'source')]),
            # Normalization
            (unzip_T1w, norm_node, [('out_file', 'image_to_align')]),
            (reg_node, norm_node, [('coregistered_files', 'apply_to_files')]),
            # Smoothing
            (norm_node, smooth_node, [('normalized_files', 'in_files')]),
            # Zipping
            (reg_node, zip_reg_node, [('coregistered_files', 'in_file')]),
            (norm_node, zip_norm_node, [('normalized_files', 'in_file')]),
            (smooth_node, zip_smooth_node, [('smoothed_files', 'in_file')]),
            # Returning output
            (zip_bet_node, self.output_node, [('out_file', 't1_brain_mask')]),
            (mc_node, self.output_node, [('realignment_parameters',
                                          'mc_params')]),
            (zip_mc_node, self.output_node, [('out_file', 'native_fmri')]),
            (zip_reg_node, self.output_node, [('out_file', 't1_fmri')]),
            (zip_norm_node, self.output_node, [('out_file', 'mni_fmri')]),
            (zip_smooth_node, self.output_node, [('out_file',
                                                  'mni_smoothed_fmri')]),
        ])
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    num_slices = len(slice_times)
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
    slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
    slice_timing.inputs.ref_slice = int(num_slices / 2)

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_files')]),
        (realign, slice_timing, [('realigned_files', 'in_files')]),
        (slice_timing, art, [('timecorrected_files', 'realigned_files')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
    wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_files')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'smoothed_files', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) +
                                     [17, 18, 26, 47] + list(range(49, 55)) +
                                     [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        out_names = []
        for filename in files:
            _, name, _ = split_filename(filename)
            out_names.append(name + suffix)
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', '')]
    regex_subs = [
        ('_ts_masker.*/sar', '/smooth/'),
        ('_ts_masker.*/ar', '/unsmooth/'),
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink,
               'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'smoothed_files', datasink,
               'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
Esempio n. 28
0
extract = Node(ExtractROI(t_min=init_volume, t_size=-1, output_type='NIFTI'),
               name="extract")

# MCFLIRT - motion correction
mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True, output_type='NIFTI'),
               name="motion_correction")

# SliceTimer - correct for slice wise acquisition
slicetimer = Node(SliceTimer(index_dir=False,
                             interleaved=True,
                             output_type='NIFTI',
                             time_repetition=TR),
                  name="slice_timing_correction")

# Smooth - image smoothing
smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")

# Artifact Detection - determines outliers in functional images
art = Node(ArtifactDetect(norm_threshold=2,
                          zintensity_threshold=3,
                          mask_type='spm_global',
                          parameter_source='FSL',
                          use_differences=[True, False],
                          plot_type='svg'),
           name="artifact_detection")

extract_confounds_ws_csf = Node(ExtractConfounds(out_file='ev_without_gs.csv'),
                                name='extract_confounds_ws_csf')

extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                             delimiter=','),
Esempio n. 29
0
# segment.inputs.gaussians_per_class = [1, 1, 1, 4]
"""Warp functional and structural data to SPM's T1 template using
:class:`nipype.interfaces.spm.Normalize`.  The tutorial data set
includes the template image, T1.nii.
"""

normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func")
normalize_func.inputs.jobtype = "write"

normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc")
normalize_struc.inputs.jobtype = "write"
"""Smooth the functional data using
:class:`nipype.interfaces.spm.Smooth`.
"""

smooth = pe.Node(interface=spm.Smooth(), name="smooth")
"""`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to
the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data
using the following function:
"""


def get_vox_dims(volume):
    import nibabel as nb
    from nipype.utils import NUMPY_MMAP
    if isinstance(volume, list):
        volume = volume[0]
    nii = nb.load(volume, mmap=NUMPY_MMAP)
    hdr = nii.header
    voxdims = hdr.get_zooms()
    return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])]
Esempio n. 30
0
def Couple_Preproc_Pipeline(base_dir=None,
                            output_dir=None,
                            subject_id=None,
                            spm_path=None):
    """ Create a preprocessing workflow for the Couples Conflict Study using nipype

    Args:
        base_dir: path to data folder where raw subject folder is located
        output_dir: path to where key output files should be saved
        subject_id: subject_id (str)
        spm_path: path to spm folder

    Returns:
        workflow: a nipype workflow that can be run
        
    """

    from nipype.interfaces.dcm2nii import Dcm2nii
    from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as util
    from nipype.interfaces.utility import Merge as Merge_List
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.fsl.maths import UnaryMaths
    from nipype.interfaces.nipy.preprocess import Trim
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces import spm
    from nipype.interfaces.spm import Normalize12
    from nipype.algorithms.misc import Gunzip
    from nipype.interfaces.nipy.preprocess import ComputeMask
    import nipype.interfaces.matlab as mlab
    from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes
    from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates
    import os
    import glob

    ########################################
    ## Setup Paths and Nodes
    ########################################

    # Specify Paths
    canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii')
    template_file = os.path.join(spm_path, 'tpm', 'TPM.nii')

    # Set the way matlab should be called
    mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    mlab.MatlabCommand.set_default_paths(spm_path)

    # Get File Names for different types of scans.  Parse into separate processing streams
    datasource = Node(interface=nio.DataGrabber(
        infields=['subject_id'], outfields=['struct', 'ap', 'pa']),
                      name='datasource')
    datasource.inputs.base_directory = base_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {
        'struct': '%s/Study*/t1w_32ch_mpr_08mm*',
        'ap': '%s/Study*/distortion_corr_32ch_ap*',
        'pa': '%s/Study*/distortion_corr_32ch_pa*'
    }
    datasource.inputs.template_args = {
        'struct': [['subject_id']],
        'ap': [['subject_id']],
        'pa': [['subject_id']]
    }
    datasource.inputs.subject_id = subject_id
    datasource.inputs.sort_filelist = True

    # iterate over functional scans to define paths
    scan_file_list = glob.glob(
        os.path.join(base_dir, subject_id, 'Study*', '*'))
    func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s]
    func_list = [s for s in func_list
                 if "SBRef" not in s]  # Exclude sbref for now.
    func_source = Node(interface=util.IdentityInterface(fields=['scan']),
                       name="func_source")
    func_source.iterables = ('scan', func_list)

    # Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional)
    ap_dcm2nii = Node(interface=Dcm2nii(), name='ap_dcm2nii')
    ap_dcm2nii.inputs.gzip_output = True
    ap_dcm2nii.inputs.output_dir = '.'
    ap_dcm2nii.inputs.date_in_filename = False

    pa_dcm2nii = Node(interface=Dcm2nii(), name='pa_dcm2nii')
    pa_dcm2nii.inputs.gzip_output = True
    pa_dcm2nii.inputs.output_dir = '.'
    pa_dcm2nii.inputs.date_in_filename = False

    f_dcm2nii = Node(interface=Dcm2nii(), name='f_dcm2nii')
    f_dcm2nii.inputs.gzip_output = True
    f_dcm2nii.inputs.output_dir = '.'
    f_dcm2nii.inputs.date_in_filename = False

    s_dcm2nii = Node(interface=Dcm2nii(), name='s_dcm2nii')
    s_dcm2nii.inputs.gzip_output = True
    s_dcm2nii.inputs.output_dir = '.'
    s_dcm2nii.inputs.date_in_filename = False

    ########################################
    ## Setup Nodes for distortion correction
    ########################################

    # merge output files into list
    merge_to_file_list = Node(interface=Merge_List(2),
                              infields=['in1', 'in2'],
                              name='merge_to_file_list')

    # fsl merge AP + PA files (depends on direction)
    merger = Node(interface=Merge(dimension='t'), name='merger')
    merger.inputs.output_type = 'NIFTI_GZ'

    # use topup to create distortion correction map
    topup = Node(interface=TOPUP(), name='topup')
    topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                              'epi_params_APPA_MB8.txt')
    topup.inputs.output_type = "NIFTI_GZ"
    topup.inputs.config = 'b02b0.cnf'

    # apply topup to all functional images
    apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
    apply_topup.inputs.in_index = [1]
    apply_topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                                    'epi_params_APPA_MB8.txt')
    apply_topup.inputs.output_type = "NIFTI_GZ"
    apply_topup.inputs.method = 'jac'
    apply_topup.inputs.interp = 'spline'

    # Clear out Zeros from spline interpolation using absolute value.
    abs_maths = Node(interface=UnaryMaths(), name='abs_maths')
    abs_maths.inputs.operation = 'abs'

    ########################################
    ## Preprocessing
    ########################################

    # Trim - remove first 10 TRs
    n_vols = 10
    trim = Node(interface=Trim(), name='trim')
    trim.inputs.begin_index = n_vols

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Coregister - 12 parameters
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estwrite'

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    # Gunzip - unzip the functional and structural images
    gunzip_struc = Node(Gunzip(), name="gunzip_struc")
    gunzip_func = Node(Gunzip(), name="gunzip_func")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize = Node(interface=Normalize12(jobtype='estwrite',
                                           tpm=template_file),
                     name="normalize")

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = 6

    #Create Covariate matrix
    make_cov = Node(interface=Create_Covariates(), name="make_cov")

    # Create a datasink to clean up output files
    datasink = Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_id

    ########################################
    # Create Workflow
    ########################################

    workflow = Workflow(name='Preprocessed')
    workflow.base_dir = os.path.join(base_dir, subject_id)
    workflow.connect([
        (datasource, ap_dcm2nii, [('ap', 'source_dir')]),
        (datasource, pa_dcm2nii, [('pa', 'source_dir')]),
        (datasource, s_dcm2nii, [('struct', 'source_dir')]),
        (func_source, f_dcm2nii, [('scan', 'source_dir')]),
        (ap_dcm2nii, merge_to_file_list, [('converted_files', 'in1')]),
        (pa_dcm2nii, merge_to_file_list, [('converted_files', 'in2')]),
        (merge_to_file_list, merger, [('out', 'in_files')]),
        (merger, topup, [('merged_file', 'in_file')]),
        (topup, apply_topup, [('out_fieldcoef', 'in_topup_fieldcoef'),
                              ('out_movpar', 'in_topup_movpar')]),
        (f_dcm2nii, trim, [('converted_files', 'in_file')]),
        (trim, apply_topup, [('out_file', 'in_files')]),
        (apply_topup, abs_maths, [('out_corrected', 'in_file')]),
        (abs_maths, gunzip_func, [('out_file', 'in_file')]),
        (gunzip_func, realign, [('out_file', 'in_files')]),
        (s_dcm2nii, gunzip_struc, [('converted_files', 'in_file')]),
        (gunzip_struc, coregister, [('out_file', 'source')]),
        (coregister, normalize, [('coregistered_source', 'image_to_align')]),
        (realign, coregister, [('mean_image', 'target'),
                               ('realigned_files', 'apply_to_files')]),
        (realign, normalize, [(('mean_image', get_vox_dims),
                               'write_voxel_sizes')]),
        (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters'),
                        ('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_cov, [('realignment_parameters',
                              'realignment_parameters')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (normalize, datasink, [('normalized_files', 'structural.@normalize')]),
        (coregister, datasink, [('coregistered_source', 'structural.@struct')
                                ]),
        (topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]),
        (topup, datasink, [('out_movpar', 'distortion.@movpar')]),
        (smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')])
    ])
    return workflow