Exemplo n.º 1
0
def test_newsegment():
    if spm.Info.name() == "SPM12":
        assert spm.NewSegment()._jobtype == "spatial"
        assert spm.NewSegment()._jobname == "preproc"
    else:
        assert spm.NewSegment()._jobtype == "tools"
        assert spm.NewSegment()._jobname == "preproc8"
Exemplo n.º 2
0
def test_newsegment():
    if spm.Info.version()['name'] == "SPM12":
        assert spm.NewSegment()._jobtype == 'spatial'
        assert spm.NewSegment()._jobname == 'preproc'
    else:
        assert spm.NewSegment()._jobtype == 'tools'
        assert spm.NewSegment()._jobname == 'preproc8'
Exemplo n.º 3
0
def test_newsegment():
    if spm.Info.version()['name'] == "SPM12":
        yield assert_equal, spm.NewSegment()._jobtype, 'spatial'
        yield assert_equal, spm.NewSegment()._jobname, 'preproc'
    else:
        yield assert_equal, spm.NewSegment()._jobtype, 'tools'
        yield assert_equal, spm.NewSegment()._jobname, 'preproc8'
Exemplo n.º 4
0
def do_spm_new_segment(tpm_file, infile):
    '''
    Parameters
    ----------
    tpm_file : str
        full path to the TPM.nii file in the SPM12 directory.
    infile : str
        full path to the T1-weighted image.

    Returns
    -------
    gray matter, white matter and CSF probability maps in native space.

    '''
    print(f'doing gray, white and CSF segmentation of {infile}\n')
    seg = spm.NewSegment()
    seg.inputs.affine_regularization = 'mni'
    seg.inputs.sampling_distance = 2
    seg.inputs.channel_files = infile  # T1-weighted image
    seg.inputs.channel_info = (0.0001, 60, (False, False))
    tissue1 = ((tpm_file, 1), 2, (True, False), (False, False))
    tissue2 = ((tpm_file, 2), 2, (True, False), (False, False))
    tissue3 = ((tpm_file, 3), 2, (True, False), (False, False))
    tissue4 = ((tpm_file, 4), 2, (False, False), (False, False))
    tissue5 = ((tpm_file, 5), 2, (False, False), (False, False))
    seg.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5]
    seg.inputs.use_mcr = True
    seg.run()
Exemplo n.º 5
0
def test_newsegment():
    yield assert_equal, spm.NewSegment._jobtype, 'tools'
    yield assert_equal, spm.NewSegment._jobname, 'preproc8'
    input_map = dict(
        affine_regularization=dict(field='warp.affreg', ),
        channel_files=dict(
            copyfile=False,
            mandatory=True,
            field='channel',
        ),
        channel_info=dict(field='channel', ),
        ignore_exception=dict(usedefault=True, ),
        matlab_cmd=dict(),
        mfile=dict(usedefault=True, ),
        paths=dict(),
        sampling_distance=dict(field='warp.samp', ),
        tissues=dict(field='tissue', ),
        use_mcr=dict(),
        warping_regularization=dict(field='warp.reg', ),
        write_deformation_fields=dict(field='warp.write', ),
    )
    instance = spm.NewSegment()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
Exemplo n.º 6
0
 def __init__(self, channel_files=['path'], **options):
     import nipype.interfaces.spm as spm
     seg = spm.NewSegment()
     seg.inputs.channel_files = channel_files
     for ef in options:
         setattr(seg.inputs, ef, options[ef])
     self.res = seg.run()
    def make_segment(self):
        # Ref: http://nipype.readthedocs.io/en/0.12.1/interfaces/generated/nipype.interfaces.fsl.utils.html#reorient2std
        ro = Node(interface=fsl.Reorient2Std(), name='ro')

        # Ref: http://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.spm/preprocess.html#segment
        seg = Node(interface=spm.NewSegment(channel_info=(0.0001, 60, (True,
                                                                       True))),
                   name="seg")

        spm_tissues_split = Node(Function(['in_list'], ['gm', 'wm', 'csf'],
                                          self.spm_tissues),
                                 name='spm_tissues_split')

        gzip = Node(Function(['in_list'], ['out_list'], self.gzip_spm),
                    name='gzip')

        segment = Workflow(name='Segment', base_dir=self.temp_dir)

        gunzip = Node(interface=Gunzip(), name='gunzip')
        # for new segment
        segment.connect(ro, 'out_file', gunzip, 'in_file')
        segment.connect(gunzip, 'out_file', seg, 'channel_files')
        segment.connect(seg, 'native_class_images', spm_tissues_split,
                        'in_list')
        return segment
Exemplo n.º 8
0
def spm_segment(anat_filepath=traits.Undefined, priors_path=None):
    """ SPM12 New Segment interface.

    Parameters
    ----------
    anat_filepath: str
        Path to the anatomical file path

    priors_path: str
        Path to the tissue probability maps file

    Returns
    -------
    seg: NewSegment interface
    """
    if priors_path is None:
        priors_path = spm_tpm_priors_path()

    seg = spm.NewSegment()

    tissue1 = ((priors_path, 1), 1, (True, True), (True, True))
    tissue2 = ((priors_path, 2), 1, (True, True), (True, True))
    tissue3 = ((priors_path, 3), 2, (True, True), (True, True))
    tissue4 = ((priors_path, 4), 3, (True, True), (True, True))
    tissue5 = ((priors_path, 5), 4, (True, False), (False, False))
    tissue6 = ((priors_path, 6), 2, (False, False), (False, False))
    seg.inputs.tissues = [tissue1, tissue2, tissue3, tissue4, tissue5, tissue6]
    seg.inputs.channel_info = (0.0001, 60, (True, True))
    #seg.inputs.warping_regularization = [0, 0.001, 0.5, 0.05, 0.2]
    seg.inputs.write_deformation_fields = [True, True]

    seg.inputs.channel_files = anat_filepath

    #seg.run()
    return seg
Exemplo n.º 9
0
 def __init__(self, **template_dict):
     self.node = pe.Node(interface=spm.NewSegment(), name='segmentation')
     self.node.inputs.paths = template_dict['spm_path']
     self.node.inputs.channel_info = (
         template_dict['BIAS_REGULARISATION'],
         template_dict['FWHM_GAUSSIAN_SMOOTH_BIAS'], (False, False))
     self.node.inputs.affine_regularization = template_dict[
         'affine_regularization']
     self.node.inputs.warping_regularization = template_dict[
         'warping_regularization']
     self.node.inputs.sampling_distance = template_dict['sampling_distance']
     self.node.inputs.mrf_weighting = template_dict['mrf_weighting']
     self.node.inputs.cleanup = template_dict['cleanup']
def run_spm_segmentT1(PVE_segmentation_dir):

    # Go into dir with acpc_aligned t1 and find the t1 filename
    os.chdir(PVE_segmentation_dir)
    for files in os.listdir(os.getcwd()):
        if files.endswith(".nii"):
            T1 = files

    # Run Spm_NewSegment on the T1 to get Gm,wm,ventricles
    print "Segmenting Grey matter and White matter from T1 structural..."
    seg = spm.NewSegment()
    seg.inputs.channel_files = T1
    seg.inputs.channel_info = (0.0001, 60, (True, True))
    seg.run()
Exemplo n.º 11
0
#Coregisters T1, FLAIR + mask to EPI (NOTE: settings taken from Clinical Toolbox)
coreg = MapNode(spm.Coregister(), iterfield='apply_to_files', name='coreg2epi')
coreg.inputs.cost_function = 'nmi'
coreg.inputs.separation = [4, 2]
coreg.inputs.tolerance = [
    0.02, 0.02, 0.02, 0.001, 0.001, 0.001, 0.01, 0.01, 0.01, 0.001, 0.001,
    0.001
]
coreg.inputs.fwhm = [7, 7]
coreg.inputs.write_interp = 1
coreg.inputs.write_wrap = [0, 0, 0]
coreg.inputs.write_mask = False
#Output: coregistered_files

#Segment anatomical
seg = Node(spm.NewSegment(), name='segment')
#Outputs:

#Warps to MNI space using a 3mm template image
#Note - The template is warped to subj space (with mask as
#cost function region) then the inverse transform (subj space > MNI) is used
#to warp the data.
antsnorm = Node(ants.Registration(), name='antsnorm')
antsnorm.inputs.output_transform_prefix = "new"
antsnorm.inputs.collapse_output_transforms = True
antsnorm.inputs.initial_moving_transform_com = True
antsnorm.inputs.num_threads = 1
antsnorm.inputs.output_inverse_warped_image = True
antsnorm.inputs.output_warped_image = True
antsnorm.inputs.sigma_units = ['vox'] * 3
antsnorm.inputs.transforms = ['Rigid', 'Affine', 'SyN']
Exemplo n.º 12
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.io as nio
        import nipype.interfaces.spm as spm
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe

        from clinica.utils.filemanip import unzip_nii, zip_nii
        from clinica.utils.nipype import container_from_filename, fix_join
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        from .t1_volume_tissue_segmentation_utils import (
            ApplySegmentationDeformation,
            get_tissue_tuples,
            init_input_node,
            print_end_pipeline,
            zip_list_files,
        )

        if spm_standalone_is_available():
            use_spm_standalone()

        # Get <subject_id> (e.g. sub-CLNC01_ses-M00) from input_node
        # and print begin message
        # =======================
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=self.get_input_fields(),
                output_names=["subject_id"] + self.get_input_fields(),
                function=init_input_node,
            ),
            name="0-InitNode",
        )

        # Unzipping
        # =========
        unzip_node = npe.Node(
            nutil.Function(input_names=["in_file"],
                           output_names=["out_file"],
                           function=unzip_nii),
            name="1-UnzipT1w",
        )

        # Unified Segmentation
        # ====================
        new_segment = npe.Node(spm.NewSegment(), name="2-SpmSegmentation")
        new_segment.inputs.write_deformation_fields = [True, True]
        new_segment.inputs.tissues = get_tissue_tuples(
            self.parameters["tissue_probability_maps"],
            self.parameters["tissue_classes"],
            self.parameters["dartel_tissues"],
            self.parameters["save_warped_unmodulated"],
            self.parameters["save_warped_modulated"],
        )

        # Apply segmentation deformation to T1 (into MNI space)
        # =====================================================
        t1_to_mni = npe.Node(ApplySegmentationDeformation(), name="3-T1wToMni")

        # Print end message
        # =================
        print_end_message = npe.Node(
            interface=nutil.Function(
                input_names=["subject_id", "final_file"],
                function=print_end_pipeline,
            ),
            name="WriteEndMessage",
        )

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, init_node, [("t1w", "t1w")]),
            (init_node, unzip_node, [("t1w", "in_file")]),
            (unzip_node, new_segment, [("out_file", "channel_files")]),
            (init_node, print_end_message, [("subject_id", "subject_id")]),
            (unzip_node, t1_to_mni, [("out_file", "in_files")]),
            (new_segment, t1_to_mni, [("forward_deformation_field",
                                       "deformation_field")]),
            (new_segment, self.output_node,
             [("bias_corrected_images", "bias_corrected_images"),
              ("bias_field_images", "bias_field_images"),
              ("dartel_input_images", "dartel_input_images"),
              ("forward_deformation_field", "forward_deformation_field"),
              ("inverse_deformation_field", "inverse_deformation_field"),
              ("modulated_class_images", "modulated_class_images"),
              ("native_class_images", "native_class_images"),
              ("normalized_class_images", "normalized_class_images"),
              ("transformation_mat", "transformation_mat")]),
            (t1_to_mni, self.output_node, [("out_files", "t1_mni")]),
            (self.output_node, print_end_message, [("t1_mni", "final_file")]),
        ])
        # fmt: on

        # Find container path from t1w filename
        # =====================================
        container_path = npe.Node(
            nutil.Function(
                input_names=["bids_or_caps_filename"],
                output_names=["container"],
                function=container_from_filename,
            ),
            name="ContainerPath",
        )

        # Writing CAPS
        # ============
        write_node = npe.Node(name="WriteCAPS", interface=nio.DataSink())
        write_node.inputs.base_directory = self.caps_directory
        write_node.inputs.parameterization = False
        write_node.inputs.regexp_substitutions = [
            (r"(.*)c1(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-graymatter\3"),
            (r"(.*)c2(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-whitematter\3"),
            (r"(.*)c3(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-csf\3"),
            (r"(.*)c4(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-bone\3"),
            (r"(.*)c5(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-softtissue\3"),
            (r"(.*)c6(sub-.*)(\.nii(\.gz)?)$", r"\1\2_segm-background\3"),
            (r"(.*)(/native_space/sub-.*)(\.nii(\.gz)?)$",
             r"\1\2_probability\3"),
            (
                r"(.*)(/([a-z]+)_deformation_field/)i?y_(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\4_target-Ixi549Space_transformation-\3_deformation\5",
            ),
            (
                r"(.*)(/t1_mni/)w(sub-.*)_T1w(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_T1w\4",
            ),
            (
                r"(.*)(/modulated_normalized/)mw(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_modulated-on_probability\4",
            ),
            (
                r"(.*)(/normalized/)w(sub-.*)(\.nii(\.gz)?)$",
                r"\1/normalized_space/\3_space-Ixi549Space_modulated-off_probability\4",
            ),
            (r"(.*/dartel_input/)r(sub-.*)(\.nii(\.gz)?)$",
             r"\1\2_dartelinput\3"),
            # Will remove trait_added empty folder
            (r"trait_added", r""),
        ]

        # fmt: off
        self.connect([
            (self.input_node, container_path, [("t1w", "bids_or_caps_filename")
                                               ]),
            (container_path, write_node, [(("container", fix_join, "t1", "spm",
                                            "segmentation"), "container")]),
            (self.output_node, write_node,
             [(("native_class_images", zip_list_files, True), "native_space"),
              (("dartel_input_images", zip_list_files, True), "dartel_input")
              ]),
            (self.output_node, write_node, [(("inverse_deformation_field",
                                              zip_nii, True),
                                             "inverse_deformation_field")]),
            (self.output_node, write_node, [(("forward_deformation_field",
                                              zip_nii, True),
                                             "forward_deformation_field")]),
            (self.output_node, write_node, [(("t1_mni", zip_nii, True),
                                             "t1_mni")]),
        ])
        if self.parameters["save_warped_unmodulated"]:
            self.connect([
                (self.output_node, write_node, [(("normalized_class_images",
                                                  zip_list_files, True),
                                                 "normalized")]),
            ])
        if self.parameters["save_warped_modulated"]:
            self.connect([
                (self.output_node, write_node, [(("modulated_class_images",
                                                  zip_list_files, True),
                                                 "modulated_normalized")]),
            ])
def segment_spm(population, workspace_dir):
    count = 0
    #subject = population[subject_index]
    for subject in population:
        count += 1
        print '========================================================================================'
        print '%s- Runnning SPM12 NewSegment on subject %s_%s' % (
            count, subject, workspace_dir[-1])
        print ''

        # define subject directory and anatomical file path
        subject_dir = os.path.join(workspace_dir, subject)
        anatomical_dir = os.path.join(subject_dir, 'anatomical_original')
        anatomical_file = os.path.join(anatomical_dir, 'ANATOMICAL.nii')

        # check if the file exists
        if os.path.isfile(
                os.path.join(
                    os.path.join(workspace_dir, subject, 'segmentation_spm',
                                 'TISSUE_CLASS_1_GM.nii'))):
            print 'Brain already segmented......... moving on '

        else:
            print '..... Segmenting Brain with SPM12-NewSegment'

            # define destination directory for spm segmentation outputs
            try:
                os.makedirs(
                    os.path.join(workspace_dir, subject, 'segmentation_spm'))
            except OSError:
                out_spm_dir = str(
                    os.path.join(workspace_dir, subject, 'segmentation_spm'))
            out_spm_dir = str(
                os.path.join(workspace_dir, subject, 'segmentation_spm'))

            # run SPM segmentation
            print '..... Starting matlab no splash to run segmentation'
            seg = spm.NewSegment()
            seg.inputs.channel_files = anatomical_file
            seg.inputs.channel_info = (0.0001, 60, (True, True))
            seg.out_dir = out_spm_dir
            seg.run()

            # rename output files
            print '..... Renaming outputs and dumping into SPM segmenation dir'

            for file in os.listdir(anatomical_dir):
                if 'c1' in file:
                    shutil.move(
                        str(os.path.join(anatomical_dir, file)),
                        str(os.path.join(out_spm_dir,
                                         'TISSUE_CLASS_1_GM.nii')))
                elif 'c2' in file:
                    shutil.move(
                        str(os.path.join(anatomical_dir, file)),
                        str(os.path.join(out_spm_dir,
                                         'TISSUE_CLASS_2_WM.nii')))
                elif 'c3' in file:
                    shutil.move(
                        str(os.path.join(anatomical_dir, file)),
                        str(os.path.join(out_spm_dir,
                                         'TISSUE_CLASS_3_CSF.nii')))
                elif 'c4' in file:
                    shutil.move(str(os.path.join(anatomical_dir, file)),
                                str(os.path.join(out_spm_dir, '___Skull.nii')))
                elif 'c5' in file:
                    shutil.move(
                        str(os.path.join(anatomical_dir, file)),
                        str(os.path.join(out_spm_dir, '___SoftTissue.nii')))
                elif 'BiasField' in file:
                    shutil.move(
                        str(os.path.join(anatomical_dir, file)),
                        str(os.path.join(out_spm_dir, '___BiasFieldMap.nii')))
                elif 'mANATOMICAL' in file:
                    shutil.move(str(os.path.join(anatomical_dir, file)),
                                str(os.path.join(out_spm_dir, '___mFile.nii')))
                elif 'ANATOMICAL_seg8' in file:
                    shutil.move(str(os.path.join(anatomical_dir, file)),
                                str(os.path.join(out_spm_dir, '___seg8.mat')))

        # threshold and biniarize spm tissue masks
        out_spm_dir = str(
            os.path.join(workspace_dir, subject, 'segmentation_spm'))
        gm_mask = str(os.path.join(out_spm_dir, 'TISSUE_CLASS_1_GM.nii'))
        wm_mask = str(os.path.join(out_spm_dir, 'TISSUE_CLASS_2_WM.nii'))
        csf_mask = str(os.path.join(out_spm_dir, 'TISSUE_CLASS_3_CSF.nii'))

        if os.path.isfile(
                os.path.join(
                    os.path.join(workspace_dir, subject, 'segmentation_spm',
                                 'TISSUE_CLASS_1_GM_BINARY05.nii.gz'))):
            print 'Tissues already binned.......... moving on '
        else:
            print '..... Thresholding and binazing tissue probablity maps '

            '###########################################'
            '##################  GM  ###################'
            thr_hbin_GM1 = fsl.Threshold()
            thr_hbin_GM1.inputs.in_file = gm_mask
            thr_hbin_GM1.inputs.thresh = 0.5
            thr_hbin_GM1.inputs.args = '-bin'
            thr_hbin_GM1.inputs.ignore_exception = True
            thr_hbin_GM1.inputs.out_file = str(
                os.path.join(out_spm_dir, 'TISSUE_CLASS_1_GM_BINARY05.nii.gz'))
            thr_hbin_GM1.run()

            thr_hbin_GM2 = fsl.Threshold()
            thr_hbin_GM2.inputs.in_file = gm_mask
            thr_hbin_GM2.inputs.thresh = 0.7
            thr_hbin_GM2.inputs.args = '-bin'
            thr_hbin_GM2.inputs.ignore_exception = True
            thr_hbin_GM2.inputs.out_file = str(
                os.path.join(out_spm_dir, 'TISSUE_CLASS_1_GM_BINARY07.nii.gz'))
            thr_hbin_GM2.run()

            '###########################################'
            '##################  WM  ###################'

            thr_hbin_WM1 = fsl.Threshold()
            thr_hbin_WM1.inputs.in_file = wm_mask
            thr_hbin_WM1.inputs.thresh = 0.5
            thr_hbin_WM1.inputs.args = '-bin'
            thr_hbin_WM1.inputs.ignore_exception = True
            thr_hbin_WM1.inputs.out_file = str(
                os.path.join(out_spm_dir, 'TISSUE_CLASS_2_WM_BINARY05.nii.gz'))
            thr_hbin_WM1.run()

            thr_hbin_WM2 = fsl.Threshold()
            thr_hbin_WM2.inputs.in_file = wm_mask
            thr_hbin_WM2.inputs.thresh = 0.9
            thr_hbin_WM2.inputs.args = '-bin'
            thr_hbin_WM2.inputs.ignore_exception = True
            thr_hbin_WM2.inputs.out_file = str(
                os.path.join(out_spm_dir, 'TISSUE_CLASS_2_WM_BINARY09.nii.gz'))
            thr_hbin_WM2.run()

            '###########################################'
            '##################  CSF  ###################'

            thr_hbin_CSF1 = fsl.Threshold()
            thr_hbin_CSF1.inputs.in_file = csf_mask
            thr_hbin_CSF1.inputs.thresh = 0.5
            thr_hbin_CSF1.inputs.args = '-bin'
            thr_hbin_CSF1.inputs.ignore_exception = True
            thr_hbin_CSF1.inputs.out_file = str(
                os.path.join(out_spm_dir,
                             'TISSUE_CLASS_3_CSF_BINARY05.nii.gz'))
            thr_hbin_CSF1.run()

            thr_hbin_CSF2 = fsl.Threshold()
            thr_hbin_CSF2.inputs.in_file = csf_mask
            thr_hbin_CSF2.inputs.thresh = 0.9
            thr_hbin_CSF2.inputs.args = '-bin'
            thr_hbin_CSF2.inputs.ignore_exception = True
            thr_hbin_CSF2.inputs.out_file = str(
                os.path.join(out_spm_dir,
                             'TISSUE_CLASS_3_CSF_BINARY09.nii.gz'))
            thr_hbin_CSF2.run()

        print '========================================================================================'
Exemplo n.º 14
0
def create_DARTEL_template(name='dartel_template'):
    """Create a vbm workflow that generates DARTEL-based template


    Example
    -------

    >>> preproc = create_DARTEL_template()
    >>> preproc.inputs.inputspec.structural_files = [
    ...     os.path.abspath('s1.nii'), os.path.abspath('s3.nii')]
    >>> preproc.inputs.inputspec.template_prefix = 'Template'
    >>> preproc.run() # doctest: +SKIP

    Inputs::

         inputspec.structural_files : structural data to be used to create templates
         inputspec.template_prefix : prefix for dartel template

    Outputs::

         outputspec.template_file : DARTEL template
         outputspec.flow_fields : warps from input struct files to the template

    """

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['structural_files', 'template_prefix']),
        name='inputspec')

    segment = pe.MapNode(spm.NewSegment(),
                         iterfield=['channel_files'],
                         name='segment')
    workflow.connect(inputnode, 'structural_files', segment, 'channel_files')

    spm_info = spm.Info.getinfo()
    if spm_info:
        spm_path = spm_info['path']
        if spm_info['name'] == 'SPM8':
            tissue1 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 1), 2,
                       (True, True), (False, False))
            tissue2 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 2), 2,
                       (True, True), (False, False))
            tissue3 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 3), 2,
                       (True, False), (False, False))
            tissue4 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 4), 3,
                       (False, False), (False, False))
            tissue5 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 5), 4,
                       (False, False), (False, False))
            tissue6 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 6), 2,
                       (False, False), (False, False))
        elif spm_info['name'] == 'SPM12':
            spm_path = spm_info['path']
            tissue1 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 1), 1,
                       (True, True), (False, False))
            tissue2 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 2), 1,
                       (True, True), (False, False))
            tissue3 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 3), 2,
                       (True, False), (False, False))
            tissue4 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 4), 3,
                       (False, False), (False, False))
            tissue5 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 5), 4,
                       (False, False), (False, False))
            tissue6 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 6), 2,
                       (False, False), (False, False))
        else:
            logger.critical('Unsupported version of SPM')

        segment.inputs.tissues = [
            tissue1, tissue2, tissue3, tissue4, tissue5, tissue6
        ]
    else:
        logger.critical('SPM not found')

    dartel = pe.Node(spm.DARTEL(), name='dartel')
    """Get the gray and white segmentation classes generated by NewSegment
    """
    def get2classes(dartel_files):
        class1images = []
        class2images = []
        for session in dartel_files:
            class1images.extend(session[0])
            class2images.extend(session[1])
        return [class1images, class2images]

    workflow.connect(segment, ('dartel_input_images', get2classes), dartel,
                     'image_files')
    workflow.connect(inputnode, 'template_prefix', dartel, 'template_prefix')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=["template_file", "flow_fields"]),
        name="outputspec")
    workflow.connect([
        (dartel, outputnode, [('final_template_file', 'template_file'),
                              ('dartel_flow_fields', 'flow_fields')]),
    ])

    return workflow
    os.makedirs(outDirSubj)

# finally, output directory for the results in MNI space
outDir = outDirSubj
# if the directory doesn't exist, create it
if not os.path.exists(outDir):
    os.makedirs(outDir)

#
#    T1 Normalization nodes
#
# gunzip node
gunzip_T1w = Node(Gunzip(in_file=imageT1), name="gunzip_T1w")

# Segmentation, native space
segNative = Node(spm.NewSegment(), name='segNative')

# Normalize - normalizes structural images to the MNI template
normalizeT1 = Node(spm.Normalize12(jobtype='estwrite',
                                   tpm=fTPM,
                                   write_bounding_box=[[-90, -120, -70],
                                                       [90, 90, 105]]),
                   name="normalizeT1")

# Segmentation, template space
segMNI = Node(spm.NewSegment(), name='segMNI')

#
#   fMRI pre-processing
#
# skip dummy scans
Exemplo n.º 16
0
def t1_pipeline(name='t1_preproc'):
    inputnode = pe.Node(utility.IdentityInterface(fields=['t1_file']),
                        run_without_submitting=True,
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(
        fields=['mask', 'corrected_t1', 'corrected_t1_brain']),
                         run_without_submitting=True,
                         name='outputspec')

    spm_path = spm.Info.version()['path']

    tpm = os.path.join(spm_path, 'toolbox', 'Seg', 'TPM.nii')
    n_newsegment_t1 = pe.Node(spm.NewSegment(
        write_deformation_fields=[False, True],
        channel_info=(0.0001, 60, (True, True)),
        tissues=[
            ((tpm, 1), 2, (True, True), (True, True)),
            ((tpm, 2), 2, (True, True), (True, True)),
            ((tpm, 3), 2, (True, False), (True, True)),
            ((tpm, 4), 3, (True, False), (False, False)),
            ((tpm, 5), 4, (True, False), (False, False)),
            ((tpm, 6), 2, (False, False), (False, False)),
        ]),
                              name='newsegment_t1')

    n_seg2mask = pe.Node(fsl.MultiImageMaths(
        output_type='NIFTI',
        output_datatype='char',
        op_string=' -add %s -add %s -thr 0.8 -bin -eroF -dilF -dilF'),
                         name='seg2mask')

    n_autobox_mask = pe.Node(afni.Autobox(padding=3, out_name='%s_crop'),
                             name='autobox_mask')

    n_merge_crop = pe.Node(utility.Merge(3), name='merge_crop')

    n_crop_all = pe.MapNode(nipy.Crop(out_file='%s_crop.nii.gz',
                                      outputtype='NIFTI_GZ'),
                            iterfield=['in_file'],
                            name='crop_all')

    w = pe.Workflow(name=name)

    w.connect([
        (inputnode, n_newsegment_t1, [('t1_file', 'channel_files')]),
        (n_newsegment_t1, n_seg2mask, [(('native_class_images', getitem_rec, 0,
                                         0), 'in_file'),
                                       (('native_class_images', getitem_rec,
                                         slice(1, 3), 0), 'operand_files')]),
        (inputnode, n_seg2mask, [(('t1_file', fname_presuffix_basename, '',
                                   '_mask', '.'), 'out_file')]),
        (n_seg2mask, n_autobox_mask, [('out_file', 'in_file')]),
        (n_newsegment_t1, n_merge_crop,
         [('bias_corrected_images', 'in1'),
          (('native_class_images', utility.select, [0, 1, 2]), 'in2')]),
        (n_seg2mask, n_merge_crop, [('out_file', 'in3')]),
        (n_merge_crop, n_crop_all, [(('out', utility.flatten), 'in_file')]),
        (n_autobox_mask, n_crop_all, [
            ('x_min', 'x_min'),
            ('x_max', 'x_max'),
            ('y_min', 'y_min'),
            ('y_max', 'y_max'),
            ('z_min', 'z_min'),
            ('z_max', 'z_max'),
        ]), (n_seg2mask, outputnode, [('out_file', 'mask')]),
        (n_newsegment_t1, outputnode, [('bias_corrected_images',
                                        'corrected_t1')])
    ])
    return w
Exemplo n.º 17
0
Arquivo: t1.py Projeto: bpinsard/misc
def t1_pipeline(name='t1_preproc'):
    inputnode = pe.Node(utility.IdentityInterface(fields=['t1_dicom_dir']),
                        name='inputspec')
    outputnode = pe.Node(utility.IdentityInterface(
        fields=['mask', 'corrected_t1', 'corrected_t1_brain']),
                         name='outputspec')

    n_t1_dicom_files = pe.Node(nio.DataGrabber(sort_filelist=True, ),
                               name='t1_dicom_files')

    n_to3d_t1 = pe.Node(afni.To3D(filetype='anat',
                                  environ=dict(AFNI_DICOM_RESCALE='YES')),
                        name='to3d_t1')

    n_reorient_t1 = pe.Node(afni.Resample(orientation='RPI'),
                            name='reorient_t1')

    n_autobox_t1 = pe.Node(afni.Autobox(padding=5), name='autobox_t1')

    n_zcut_t1 = pe.Node(afni.ZCutUp(outputtype='NIFTI'), name='zcut_t1')

    n_newsegment_t1 = pe.Node(spm.NewSegment(
        write_deformation_fields=[True, True],
        channel_info=(0.0001, 60, (True, True))),
                              name='newsegment_t1')

    n_seg2mask = pe.Node(fsl.MultiImageMaths(
        output_type='NIFTI',
        op_string=' -add %s -add %s -thr 0.8 -bin -eroF -dilF -dilF'),
                         name='seg2mask')

    n_mask_brain = pe.Node(interface=fsl.ImageMaths(op_string='-mul',
                                                    suffix='_brain',
                                                    output_type='NIFTI'),
                           name='mask_brain')

    w = pe.Workflow(name=name)

    def zmax2keep(z):
        return '%d %d' % (max(0, z - 174), z)

    w.connect([
        (inputnode, n_t1_dicom_files, [('t1_dicom_dir', 'base_directory')]),
        (n_t1_dicom_files, n_to3d_t1,
         [(('outfiles', sort_t1_files), 'in_files'),
          (('outfiles', t1_filename, 'nii.gz'), 'out_file')]),
        (n_to3d_t1, n_reorient_t1, [('out_file', 'in_file')]),
        (n_reorient_t1, n_autobox_t1, [('out_file', 'in_file')]),
        (n_reorient_t1, n_zcut_t1, [('out_file', 'in_file')]),
        (n_autobox_t1, n_zcut_t1, [(('z_max', zmax2keep), 'keep')]),
        (n_zcut_t1, n_newsegment_t1, [('out_file', 'channel_files')]),
        (n_newsegment_t1, n_seg2mask, [(('native_class_images', getitem_rec, 0,
                                         0), 'in_file'),
                                       (('native_class_images', getitem_rec,
                                         slice(1, 3), 0), 'operand_files')]),
        (n_zcut_t1, n_seg2mask, [(('out_file', fname_presuffix_basename, '',
                                   '_mask', '.'), 'out_file')]),
        (n_newsegment_t1, n_mask_brain, [('bias_corrected_images', 'in_file')
                                         ]),
        (n_seg2mask, n_mask_brain, [('out_file', 'in_file2')]),
        (n_seg2mask, outputnode, [('out_file', 'mask')]),
        (n_newsegment_t1, outputnode, [('bias_corrected_images',
                                        'corrected_t1')])
    ])
    return w
Exemplo n.º 18
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import os.path as op
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import t1_volume_tissue_segmentation_utils as utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        version = spm.Info.version()

        if version:
            spm_path = version['path']
            if version['name'] == 'SPM8':
                print 'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. ' \
                      'Please upgrade your SPM toolbox.'
                tissue_map = op.join(spm_path, 'toolbox/Seg/TPM.nii')
            elif version['name'] == 'SPM12':
                tissue_map = op.join(spm_path, 'tpm/TPM.nii')
            else:
                raise RuntimeError(
                    'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node',
                                 iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters[
                'affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters[
                'sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters[
                'warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters[
                'write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = utils.get_tissue_tuples(
            tissue_map, self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:

            t1_to_mni = npe.MapNode(
                utils.ApplySegmentationDeformation(),
                name='t1_to_mni',
                iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field',
                                           'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')])
        ])
Exemplo n.º 19
0
def segment_spm(population, workspace_dir):
    print '#############################################################################'
    print ''
    print '                 RUNNNING PROJECT NMR-093%s %s' %(workspace_dir[-10:-9], workspace_dir[-8:])
    print ''
    print '#############################################################################'

    count= 0

    for subject in population:
        count +=1
        print '========================================================================================'
        print '%s- Runnning SPM12 NewSegment on subject %s_%s' %(count, subject, workspace_dir[-10:-9])
        print ''

        # define subject directory and anatomical file path
        subject_dir     = os.path.join(workspace_dir ,  subject)
        anatomical_dir  = os.path.join(subject_dir   , 'anatomical_original')
        anatomical_file = os.path.join(anatomical_dir, 'ANATOMICAL.nii')

        # check if the file exists
        if os.path.isfile(os.path.join(workspace_dir, subject, 'segmentation_spm', 'TISSUE_CLASS_1_GM_prob.nii')):
            print 'Brain already segmented......... moving on'

        else:
            print '..... Segmenting Brain with SPM12-NewSegment'

            # define destination directory for spm segmentation outputs
            mkdir_path(os.path.join(subject_dir, 'segmentation_spm'))
            out_seg_dir  = str(os.path.join(subject_dir, 'segmentation_spm'))

            # run SPM segmentation
            print '..... Starting matlab no splash to run segmentation'
            seg                      = spm.NewSegment()
            seg.inputs.channel_files = anatomical_file
            seg.inputs.channel_info  = (0.0001, 60, (True, True))
            seg.out_dir              = out_seg_dir
            seg.run()

            # rename output files
            print '..... Renaming outputs and dumping into SPM segmenation dir'

            shutil.move(str(os.path.join(anatomical_dir, 'c1ANATOMICAL.nii')),
                        str(os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_prob.nii')))

            shutil.move(str(os.path.join(anatomical_dir, 'c2ANATOMICAL.nii')),
                        str(os.path.join(out_seg_dir, 'TISSUE_CLASS_2_WM_prob.nii')))

            shutil.move(str(os.path.join(anatomical_dir, 'c3ANATOMICAL.nii')),
                        str(os.path.join(out_seg_dir, 'TISSUE_CLASS_3_CSF_prob.nii')))

            shutil.move(str(os.path.join(anatomical_dir, 'c4ANATOMICAL.nii')),
                        str(os.path.join(out_seg_dir, '___Skull.nii')))

            shutil.move(str(os.path.join(anatomical_dir, 'c5ANATOMICAL.nii')),
                        str(os.path.join(out_seg_dir, '___SoftTissue.nii')))

            shutil.move((os.path.join(anatomical_dir, 'BiasField_ANATOMICAL.nii')),
                        (os.path.join(out_seg_dir, '___BiasFieldMap.nii')))

            shutil.move((os.path.join(anatomical_dir, 'mANATOMICAL.nii')),
                        (os.path.join(out_seg_dir, '___mFile.nii')))

            shutil.move((os.path.join(anatomical_dir, 'ANATOMICAL_seg8.mat')),
                        (os.path.join(out_seg_dir, '___seg8.mat')))

            '###########################################'
            # threshold and biniarize spm tissue masks
            print '..... Thresholding and binazing tissue probablity maps '
            out_seg_dir  = str(os.path.join(subject_dir, 'segmentation_spm'))
            gm_mask  = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_prob.nii'))
            wm_mask  = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_2_WM_prob.nii'))
            csf_mask = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_3_CSF_prob.nii'))

            thr_hbin_GM1                          = fsl.Threshold()
            thr_hbin_GM1.inputs.in_file           = gm_mask
            thr_hbin_GM1.inputs.thresh            = 0.5
            thr_hbin_GM1.inputs.args              = '-bin'
            thr_hbin_GM1.inputs.ignore_exception  = True
            thr_hbin_GM1.inputs.out_file          = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_BIN.nii.gz'))
            thr_hbin_GM1.run()

            thr_hbin_WM1                          = fsl.Threshold()
            thr_hbin_WM1.inputs.in_file           = wm_mask
            thr_hbin_WM1.inputs.thresh            = 0.5
            thr_hbin_WM1.inputs.args              = '-bin'
            thr_hbin_WM1.inputs.ignore_exception  = True
            thr_hbin_WM1.inputs.out_file          = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_2_WM_BIN.nii.gz'))
            thr_hbin_WM1.run()

            thr_hbin_CSF1                         = fsl.Threshold()
            thr_hbin_CSF1.inputs.in_file          = csf_mask
            thr_hbin_CSF1.inputs.thresh           = 0.5
            thr_hbin_CSF1.inputs.args             = '-bin'
            thr_hbin_CSF1.inputs.ignore_exception = True
            thr_hbin_CSF1.inputs.out_file         = str(os.path.join(out_seg_dir, 'TISSUE_CLASS_3_CSF_BIN.nii.gz'))
            thr_hbin_CSF1.run()

        '###########################################'


        out_seg_dir = os.path.join(subject_dir, 'segmentation_spm')
        if os.path.isfile(os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_OPTIMIZED.nii.gz')):
            print 'Optimized Tissue masks already created......... moving on'

        else:
            print '..... Segmentatiing Subcortex and creating optimized tissue masks'
            # create brain mask from GM, WM, CSF
            gm_bin = os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_BIN.nii.gz')
            wm_bin = os.path.join(out_seg_dir, 'TISSUE_CLASS_2_WM_BIN.nii.gz')
            cm_bin = os.path.join(out_seg_dir, 'TISSUE_CLASS_3_CSF_BIN.nii.gz')
            brain_mask = os.path.join(anatomical_dir, 'ANATOMICAL_brain_mask.nii.gz')
            os.system('fslmaths %s -add %s -add %s -fillh -dilM %s'%(gm_bin,wm_bin, cm_bin,brain_mask))

            # # deskull anatomical
            anatomical_deskull = os.path.join(anatomical_dir, 'ANATOMICAL_DESKULL.nii.gz')
            anatomical_deskull_rpi = os.path.join(anatomical_dir, 'ANATOMICAL_DESKULL_RPI.nii.gz')
            os.system('fslmaths %s -mul %s %s' %(anatomical_file, brain_mask, anatomical_deskull))
            os.system('fslswapdim %s RL PA IS %s'%(anatomical_deskull, anatomical_deskull_rpi))

            # run FLIRT and FIRST
            mkdir_path(os.path.join(out_seg_dir, 'FIRST_subcortical'))
            out_first_dir  = os.path.join(out_seg_dir, 'FIRST_subcortical')
            first_seg = os.path.join(out_first_dir, 'FIRST_all_fast_firstseg.nii.gz')

            if not os.path.isfile(first_seg):
            #if not os.path.isfile(os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_OPTIMIZED.nii.gz')):
                ref = '/usr/share/fsl/5.0/data/standard/MNI152_T1_1mm_brain.nii.gz'
                omat = os.path.join(anatomical_dir, 'ANATOMICAL_DESKULL_RPI_MNI.mat')
                anat2mni = os.path.join(anatomical_dir, 'ANATOMICAL_DESKULL_RPI_MNI.nii.gz')
                print 'running flirt'
                os.system('flirt -in %s -ref %s -out %s -omat %s -cost mutualinfo -dof 12'%(anatomical_deskull_rpi, ref, anat2mni, omat))
                print 'running first'
                os.system('run_first_all -v -i %s -a %s -o %s/FIRST'%(anatomical_deskull_rpi, omat, out_first_dir))

                # flip back to anatomical orientation
                first_seg = os.path.join(out_first_dir, 'FIRST_all_fast_firstseg.nii.gz')
                first_seg_ail = os.path.join(out_first_dir, 'FIRST_all_fast_firstseg_AIL.nii.gz')
                os.system('fslswapdim %s AP IS LR %s' %(first_seg, first_seg_ail))

                # create subcortically corrected tissue masks and flip them back to correct orientation
                first_seg_ail_bin = os.path.join(out_first_dir, 'FIRST_all_fast_firstseg_AIL_BIN.nii.gz')
                gm_combined = os.path.join(out_seg_dir, 'TISSUE_CLASS_1_GM_OPTIMIZED.nii.gz')
                wm_combined = os.path.join(out_seg_dir, 'TISSUE_CLASS_2_WM_OPTIMIZED.nii.gz')
                cm_combined = os.path.join(out_seg_dir, 'TISSUE_CLASS_3_CSF_OPTIMIZED.nii.gz')

                os.system('fslmaths %s -bin %s' %(first_seg_ail, first_seg_ail_bin))
                os.system('fslmaths %s -add %s -bin %s' %(first_seg_ail_bin, gm_bin, gm_combined))
                os.system('fslmaths %s -sub %s -bin %s' %(wm_bin, first_seg_ail_bin, wm_combined))
                os.system('fslmaths %s -sub %s -bin %s' %(cm_bin,first_seg_ail_bin, cm_combined))

            print 'done'
Exemplo n.º 20
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import platform
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as seg_utils
        import clinica.pipelines.t1_volume_create_dartel.t1_volume_create_dartel_utils as dartel_utils
        import clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils as dartel2mni_utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError('MCR_HOME variable not in environnement. Althought, '
                                       + 'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print('You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                          + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.')
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    if platform.system() == 'Darwin':
                        tissue_map = os.path.join(str(spm_home), 'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii')
                    else:
                        tissue_map = os.path.join(str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM standalone version not supported. Please upgrade SPM standalone.')
        else:
            raise RuntimeError('SPM could not be found. Please verify your SPM_HOME environment variable.')

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node', iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters['affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters['sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters['warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters['write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(tissue_map,
                                                                 self.parameters['tissue_classes'],
                                                                 self.parameters['dartel_tissues'],
                                                                 self.parameters['save_warped_unmodulated'],
                                                                 self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:

            t1_to_mni = npe.MapNode(seg_utils.ApplySegmentationDeformation(),
                                    name='t1_to_mni',
                                    iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field', 'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # DARTEL template
        # ===============================
        dartel_template = npe.Node(spm.DARTEL(),
                                   name='dartel_template')

        if self.parameters['iteration_parameters'] is not None:
            dartel_template.inputs.iteration_parameters = self.parameters['iteration_parameters']
        if self.parameters['optimization_parameters'] is not None:
            dartel_template.inputs.optimization_parameters = self.parameters['optimization_parameters']
        if self.parameters['regularization_form'] is not None:
            dartel_template.inputs.regularization_form = self.parameters['regularization_form']

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(spm.DARTELNorm2MNI(),
                                      name='dartel2MNI',
                                      iterfield=['apply_to_files', 'flowfield_files'])

        if self.parameters['bounding_box'] is not None:
            dartel2mni_node.inputs.bounding_box = self.parameters['bounding_box']
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size']
        dartel2mni_node.inputs.modulate = self.parameters['modulation']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['fwhm'] is not None and len(self.parameters['fwhm']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [('fwhm', [[x, x, x] for x in self.parameters['fwhm']]),
                                        ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(interface=nutil.Function(input_names=['smoothed_normalized_files'],
                                                                        output_names=['smoothed_normalized_files'],
                                                                        function=dartel2mni_utils.join_smoothed_files),
                                               joinsource='smoothing_node',
                                               joinfield='smoothed_normalized_files',
                                               name='join_smoothing_node')
            self.connect([
                (dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]),
                (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]),
                (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])
            ])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(input_names=['in_image',
                                                                   'in_atlas_list'],
                                                      output_names=['atlas_statistics'],
                                                      function=dartel2mni_utils.atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list']

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node, [('bias_corrected_images', 'bias_corrected_images'),
                                             ('bias_field_images', 'bias_field_images'),
                                             ('dartel_input_images', 'dartel_input_images'),
                                             ('forward_deformation_field', 'forward_deformation_field'),
                                             ('inverse_deformation_field', 'inverse_deformation_field'),
                                             ('modulated_class_images', 'modulated_class_images'),
                                             ('native_class_images', 'native_class_images'),
                                             ('normalized_class_images', 'normalized_class_images'),
                                             ('transformation_mat', 'transformation_mat')]),
            (new_segment, dartel_template, [(('dartel_input_images', dartel_utils.get_class_images,
                                              self.parameters['dartel_tissues']), 'image_files')]),
            (dartel_template, self.output_node, [('dartel_flow_fields', 'dartel_flow_fields'),
                                                 ('final_template_file', 'final_template_file'),
                                                 ('template_files', 'template_files')]),
            (new_segment, dartel2mni_node, [(('native_class_images', seg_utils.group_nested_images_by_subject),
                                             'apply_to_files')]),
            (dartel_template, dartel2mni_node, [(('dartel_flow_fields', dartel2mni_utils.prepare_flowfields,
                                                  self.parameters['tissue_classes']), 'flowfield_files')]),
            (dartel_template, dartel2mni_node, [('final_template_file', 'template_file')]),
            (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]),
            (dartel2mni_node, atlas_stats_node, [(('normalized_files', dartel2mni_utils.select_gm_images),
                                                  'in_image')]),
            (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')])
        ])
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.io as nio
        import nipype.interfaces.spm as spm
        from ..t1_volume_tissue_segmentation import t1_volume_tissue_segmentation_utils as seg_utils
        from clinica.utils.filemanip import unzip_nii, zip_nii
        from clinica.utils.nipype import fix_join
        from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone

        if spm_standalone_is_available():
            use_spm_standalone()

        # Get <subject_id> (e.g. sub-CLNC01_ses-M00) from input_node
        # and print begin message
        # =======================
        init_node = npe.Node(interface=nutil.Function(
            input_names=self.get_input_fields(),
            output_names=['subject_id'] + self.get_input_fields(),
            function=seg_utils.init_input_node),
                             name='0-InitNode')

        # Unzipping
        # =========
        unzip_node = npe.Node(nutil.Function(input_names=['in_file'],
                                             output_names=['out_file'],
                                             function=unzip_nii),
                              name='1-UnzipT1w')

        # Unified Segmentation
        # ====================
        new_segment = npe.Node(spm.NewSegment(), name='2-SpmSegmentation')
        new_segment.inputs.write_deformation_fields = [True, True]
        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(
            self.parameters['tissue_probability_maps'],
            self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # =====================================================
        t1_to_mni = npe.Node(seg_utils.ApplySegmentationDeformation(),
                             name='3-T1wToMni')

        # Print end message
        # =================
        print_end_message = npe.Node(interface=nutil.Function(
            input_names=['subject_id', 'final_file'],
            function=seg_utils.print_end_pipeline),
                                     name='WriteEndMessage')

        # Connection
        # ==========
        self.connect([
            (self.input_node, init_node, [('t1w', 't1w')]),
            (init_node, unzip_node, [('t1w', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (init_node, print_end_message, [('subject_id', 'subject_id')]),
            (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
            (new_segment, t1_to_mni, [('forward_deformation_field',
                                       'deformation_field')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')]),
            (t1_to_mni, self.output_node, [('out_files', 't1_mni')]),
            (self.output_node, print_end_message, [('t1_mni', 'final_file')]),
        ])

        # Find container path from t1w filename
        # =====================================
        container_path = npe.Node(nutil.Function(
            input_names=['t1w_filename'],
            output_names=['container'],
            function=seg_utils.t1w_container_from_filename),
                                  name='ContainerPath')

        # Writing CAPS
        # ============
        write_node = npe.Node(name='WriteCAPS', interface=nio.DataSink())
        write_node.inputs.base_directory = self.caps_directory
        write_node.inputs.parameterization = False
        write_node.inputs.regexp_substitutions = [
            (r'(.*)c1(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-graymatter\3'),
            (r'(.*)c2(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-whitematter\3'),
            (r'(.*)c3(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-csf\3'),
            (r'(.*)c4(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-bone\3'),
            (r'(.*)c5(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-softtissue\3'),
            (r'(.*)c6(sub-.*)(\.nii(\.gz)?)$', r'\1\2_segm-background\3'),
            (r'(.*)(/native_space/sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_probability\3'),
            (r'(.*)(/([a-z]+)_deformation_field/)i?y_(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\4_target-Ixi549Space_transformation-\3_deformation\5'
             ),
            (r'(.*)(/t1_mni/)w(sub-.*)_T1w(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_T1w\4'),
            (r'(.*)(/modulated_normalized/)mw(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-on_probability\4'
             ),
            (r'(.*)(/normalized/)w(sub-.*)(\.nii(\.gz)?)$',
             r'\1/normalized_space/\3_space-Ixi549Space_modulated-off_probability\4'
             ),
            (r'(.*/dartel_input/)r(sub-.*)(\.nii(\.gz)?)$',
             r'\1\2_dartelinput\3'),
            # Will remove trait_added empty folder
            (r'trait_added', r'')
        ]

        self.connect([
            (self.input_node, container_path, [('t1w', 't1w_filename')]),
            (container_path, write_node, [(('container', fix_join, ''),
                                           'container')]),
            (self.output_node, write_node,
             [(('native_class_images', seg_utils.zip_list_files, True),
               'native_space'),
              (('dartel_input_images', seg_utils.zip_list_files, True),
               'dartel_input')]),
            (self.output_node, write_node, [(('inverse_deformation_field',
                                              zip_nii, True),
                                             'inverse_deformation_field')]),
            (self.output_node, write_node, [(('forward_deformation_field',
                                              zip_nii, True),
                                             'forward_deformation_field')]),
            (self.output_node, write_node, [(('t1_mni', zip_nii, True),
                                             't1_mni')]),
        ])
        if self.parameters['save_warped_unmodulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('normalized_class_images', seg_utils.zip_list_files, True),
                   'normalized')]),
            ])
        if self.parameters['save_warped_modulated']:
            self.connect([
                (self.output_node, write_node,
                 [(('modulated_class_images', seg_utils.zip_list_files, True),
                   'modulated_normalized')]),
            ])
Exemplo n.º 22
0
print('hello world')

import os
import nipype.interfaces.spm as spm

# define tissue path
path_tissue = '/home/marian/Documents/spm12/tpm/TPM.nii'

# get new segmentation module from spm
print('Importing NewSegment module...')
seg = spm.NewSegment()
print('... done')

# Define subject name
str_path_subj = 'sub-02'

# Define channels
str_path_filename = os.path.join(os.environ['parent_path'], 'data',
                                 'shared_data', 'data_mprage', 'derivatives',
                                 str_path_subj, 'unbiased',
                                 str_path_subj + '_T1wDivPD_unbiased.nii')
print(str_path_filename)
seg.inputs.channel_files = str_path_filename
seg.inputs.channel_info = (0.001, 60, (False, True))

# Define tissues
tissue1 = ((path_tissue, 1), 3, (True, False), (False, False))
tissue2 = ((path_tissue, 2), 2, (True, False), (False, False))
tissue3 = ((path_tissue, 3), 2, (True, False), (False, False))
tissue4 = ((path_tissue, 4), 3, (True, False), (False, False))
tissue5 = ((path_tissue, 5), 4, (True, False), (False, False))
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import platform
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd,
                                              use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError(
                    'MCR_HOME variable not in environnement. Althought, ' +
                    'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print(
                        'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                        + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                    )
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    if platform.system() == 'Darwin':
                        tissue_map = os.path.join(
                            str(spm_home),
                            'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii'
                        )
                    else:
                        tissue_map = os.path.join(
                            str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM standalone version not supported. Please upgrade SPM standalone.'
                    )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node',
                                 iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters[
                'affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters[
                'sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters[
                'warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters[
                'write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = utils.get_tissue_tuples(
            tissue_map, self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:

            t1_to_mni = npe.MapNode(
                utils.ApplySegmentationDeformation(),
                name='t1_to_mni',
                iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field',
                                           'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')])
        ])