示例#1
0
def create_full_spm_subpipes(params_template,
                             params={},
                             name='full_spm_subpipes'):
    """
    """

    print("Full pipeline name: ", name)

    # Creating pipeline
    seg_pipe = pe.Workflow(name=name)

    # Creating input node
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['list_T1', 'indiv_params']),
        name='inputnode')

    # preprocessing
    data_preparation_pipe = create_short_preparation_pipe(
        params=parse_key(params, "short_preparation_pipe"))

    seg_pipe.connect(inputnode, 'list_T1', data_preparation_pipe,
                     'inputnode.list_T1')
    seg_pipe.connect(inputnode, 'list_T1', data_preparation_pipe,
                     'inputnode.list_T2')
    seg_pipe.connect(inputnode, 'indiv_params', data_preparation_pipe,
                     'inputnode.indiv_params')

    # Bias correction of cropped images
    debias = NodeParams(T1xT2BiasFieldCorrection(),
                        params=parse_key(params, "debias"),
                        name='debias')

    seg_pipe.connect(data_preparation_pipe, 'outputnode.preproc_T1', debias,
                     't1_file')
    seg_pipe.connect(data_preparation_pipe, 'outputnode.preproc_T1', debias,
                     't2_file')
    seg_pipe.connect(data_preparation_pipe, 'bet_crop.mask_file', debias, 'b')
    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "debias"), debias,
                     'indiv_params')

    # Iterative registration to the INIA19 template
    reg = NodeParams(IterREGBET(), params=parse_key(params, "reg"), name='reg')
    reg.inputs.refb_file = params_template["template_brain"]
    seg_pipe.connect(debias, 't1_debiased_file', reg, 'inw_file')
    seg_pipe.connect(debias, 't1_debiased_brain_file', reg, 'inb_file')
    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "reg"), reg,
                     'indiv_params')

    # Subject to _template (ants)
    nonlin_reg = NodeParams(ants.RegistrationSynQuick(),
                            params=parse_key(params, "nonlin_reg"),
                            name='nonlin_reg')
    nonlin_reg.inputs.fixed_image = params_template["template_brain"]
    seg_pipe.connect(reg, "warp_file", nonlin_reg, "moving_image")

    # Transform T1 (fsl)
    transform_msk = NodeParams(fsl.ApplyXFM(),
                               params=parse_key(params, "transform_mask"),
                               name='transform_others')
    seg_pipe.connect(nonlin_reg, "out_matrix", transform_msk, "in_matrix_file")
    seg_pipe.connect(debias, "debiased_mask_file", transform_msk, "in_file")
    seg_pipe.connect(debias, "t1_debiased_file", transform_msk, "reference")

    # Compute brain mask using old_segment of SPM and postprocessing on
    # tissues' masks
    if "old_segment_pipe" in params.keys():

        old_segment_pipe = create_old_segment_pipe(params_template,
                                                   params=parse_key(
                                                       params,
                                                       "old_segment_pipe"))

        seg_pipe.connect(nonlin_reg, ('warped_image', gunzip),
                         old_segment_pipe, 'inputnode.T1')

        seg_pipe.connect(inputnode, 'indiv_params', old_segment_pipe,
                         'inputnode.indiv_params')

    return seg_pipe
示例#2
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.
        """
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from clinica.utils.filemanip import get_filename_no_ext
        from nipype.interfaces import ants
        from .t1_linear_utils import crop_nifti

        image_id_node = npe.Node(
                interface=nutil.Function(
                    input_names=['filename'],
                    output_names=['image_id'],
                    function=get_filename_no_ext),
                name='ImageID'
                )

        # The core (processing) nodes
        # =====================================

        # 1. N4biascorrection by ANTS. It uses nipype interface.
        n4biascorrection = npe.Node(
                name='n4biascorrection',
                interface=ants.N4BiasFieldCorrection(
                    dimension=3,
                    save_bias=True,
                    bspline_fitting_distance=600
                    )
                )

        # 2. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
        ants_registration_node = npe.Node(
                name='antsRegistrationSynQuick',
                interface=ants.RegistrationSynQuick()
                )
        ants_registration_node.inputs.fixed_image = self.ref_template
        ants_registration_node.inputs.transform_type = 'a'
        ants_registration_node.inputs.dimension = 3

        # 3. Crop image (using nifti). It uses custom interface, from utils file

        cropnifti = npe.Node(
                name='cropnifti',
                interface=nutil.Function(
                    function=crop_nifti,
                    input_names=['input_img', 'ref_crop'],
                    output_names=['output_img', 'crop_template']
                    )
                )
        cropnifti.inputs.ref_crop = self.ref_crop

        # Connection
        # ==========
        self.connect([
            (self.input_node, image_id_node, [('t1w', 'filename')]),
            (self.input_node, n4biascorrection, [('t1w', 'input_image')]),
            (n4biascorrection, ants_registration_node, [('output_image', 'moving_image')]),
            (image_id_node , ants_registration_node, [('image_id', 'output_prefix')]),

            # Connect to DataSink
            (image_id_node, self.output_node, [('image_id', 'image_id')]),
            (ants_registration_node, self.output_node, [('out_matrix', 'affine_mat')]),
            (n4biascorrection, self.output_node, [('output_image', 'outfile_corr')]),
            (ants_registration_node, self.output_node, [('warped_image', 'outfile_reg')]),
            ])
        if (self.parameters.get('crop_image')):
            self.connect([
                (ants_registration_node, cropnifti, [('warped_image', 'input_img')]),
                (cropnifti, self.output_node, [('output_img', 'outfile_crop')]),
                ])
示例#3
0
def preprocessing_t1w(bids_directory,
                      caps_directory,
                      tsv,
                      working_directory=None):
    """
     This preprocessing pipeline includes globally three steps:
     1) N4 bias correction (performed with ANTS).
     2) Linear registration to MNI (MNI icbm152 nlinear sym template)
        (performed with ANTS) - RegistrationSynQuick.
     3) Cropping the background (in order to save computational power).
     4) Histogram-based intensity normalization. This is a custom function
        performed by the binary ImageMath included with ANTS.

     Parameters
     ----------
     bids_directory: str
        Folder with BIDS structure.
     caps_directory: str
        Folder where CAPS structure will be stored.
     working_directory: str
        Folder containing a temporary space to save intermediate results.
    """

    from os.path import dirname, join, abspath, split, exists
    from os import pardir, makedirs
    from pathlib import Path
    from clinica.utils.inputs import check_bids_folder
    from clinica.utils.participant import get_subject_session_list
    from clinica.utils.filemanip import get_subject_id
    from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
    from clinica.utils.inputs import clinica_file_reader
    from clinica.utils.input_files import T1W_NII
    from clinica.utils.check_dependency import check_ants
    from clinicadl.tools.inputs.input import fetch_file
    from clinicadl.tools.inputs.input import RemoteFileStructure
    import nipype.pipeline.engine as npe
    import nipype.interfaces.utility as nutil
    from nipype.interfaces import ants

    check_ants()
    check_bids_folder(bids_directory)
    input_dir = abspath(bids_directory)
    caps_directory = abspath(caps_directory)
    is_bids_dir = True
    base_dir = abspath(working_directory)

    home = str(Path.home())
    cache_clinicadl = join(home, '.cache', 'clinicadl', 'ressources', 'masks')
    url_aramis = 'https://aramislab.paris.inria.fr/files/data/img_t1_linear/'
    FILE1 = RemoteFileStructure(
        filename='ref_cropped_template.nii.gz',
        url=url_aramis,
        checksum=
        '67e1e7861805a8fd35f7fcf2bdf9d2a39d7bcb2fd5a201016c4d2acdd715f5b3')
    FILE2 = RemoteFileStructure(
        filename='mni_icbm152_t1_tal_nlin_sym_09c.nii',
        url=url_aramis,
        checksum=
        '93359ab97c1c027376397612a9b6c30e95406c15bf8695bd4a8efcb2064eaa34')

    if not (exists(cache_clinicadl)):
        makedirs(cache_clinicadl)

    ref_template = join(cache_clinicadl, FILE2.filename)
    ref_crop = join(cache_clinicadl, FILE1.filename)

    if not (exists(ref_template)):
        try:
            ref_template = fetch_file(FILE2, cache_clinicadl)
        except IOError as err:
            print(
                'Unable to download required template (mni_icbm152) for processing:',
                err)

    if not (exists(ref_crop)):
        try:
            ref_crop = fetch_file(FILE1, cache_clinicadl)
        except IOError as err:
            print(
                'Unable to download required template (ref_crop) for processing:',
                err)

    sessions, subjects = get_subject_session_list(input_dir, tsv, is_bids_dir,
                                                  False, base_dir)

    # Use hash instead of parameters for iterables folder names
    # Otherwise path will be too long and generate OSError
    from nipype import config
    cfg = dict(execution={'parameterize_dirs': False})
    config.update_config(cfg)

    # Inputs from anat/ folder
    # ========================
    # T1w file:
    try:
        t1w_files = clinica_file_reader(subjects, sessions, bids_directory,
                                        T1W_NII)
    except ClinicaException as e:
        err = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' + str(
            e)
        raise ClinicaBIDSError(err)

    def get_input_fields():
        """"Specify the list of possible inputs of this pipelines.
        Returns:
        A list of (string) input fields name.
        """
        return ['t1w']

    read_node = npe.Node(
        name="ReadingFiles",
        iterables=[
            ('t1w', t1w_files),
        ],
        synchronize=True,
        interface=nutil.IdentityInterface(fields=get_input_fields()))

    image_id_node = npe.Node(interface=nutil.Function(
        input_names=['bids_or_caps_file'],
        output_names=['image_id'],
        function=get_subject_id),
                             name='ImageID')

    # The core (processing) nodes

    # 1. N4biascorrection by ANTS. It uses nipype interface.
    n4biascorrection = npe.Node(name='n4biascorrection',
                                interface=ants.N4BiasFieldCorrection(
                                    dimension=3,
                                    save_bias=True,
                                    bspline_fitting_distance=600))

    # 2. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
    ants_registration_node = npe.Node(name='antsRegistrationSynQuick',
                                      interface=ants.RegistrationSynQuick())
    ants_registration_node.inputs.fixed_image = ref_template
    ants_registration_node.inputs.transform_type = 'a'
    ants_registration_node.inputs.dimension = 3

    # 3. Crop image (using nifti). It uses custom interface, from utils file
    from .T1_linear_utils import crop_nifti

    cropnifti = npe.Node(name='cropnifti',
                         interface=nutil.Function(
                             function=crop_nifti,
                             input_names=['input_img', 'ref_crop'],
                             output_names=['output_img', 'crop_template']))
    cropnifti.inputs.ref_crop = ref_crop

    # ********* Deprecrecated ********** #
    # ** This step was not used in the final version ** #
    # 4. Histogram-based intensity normalization. This is a custom function
    #    performed by the binary `ImageMath` included with *ANTS*.

    #   from .T1_linear_utils import ants_histogram_intensity_normalization
    #
    #   # histogram-based intensity normalization
    #   intensitynorm = npe.Node(
    #           name='intensitynormalization',
    #           interface=nutil.Function(
    #               input_names=['image_dimension', 'crop_template', 'input_img'],
    #               output_names=['output_img'],
    #               function=ants_histogram_intensity_normalization
    #               )
    #           )
    #   intensitynorm.inputs.image_dimension = 3

    # DataSink and the output node

    from .T1_linear_utils import (container_from_filename, get_data_datasink)
    # Create node to write selected files into the CAPS
    from nipype.interfaces.io import DataSink

    get_ids = npe.Node(interface=nutil.Function(
        input_names=['image_id'],
        output_names=['image_id_out', 'subst_ls'],
        function=get_data_datasink),
                       name="GetIDs")

    # Find container path from t1w filename
    # =====================================
    container_path = npe.Node(nutil.Function(
        input_names=['bids_or_caps_filename'],
        output_names=['container'],
        function=container_from_filename),
                              name='ContainerPath')

    write_node = npe.Node(name="WriteCaps", interface=DataSink())
    write_node.inputs.base_directory = caps_directory
    write_node.inputs.parameterization = False

    # Connectiong the workflow
    from clinica.utils.nipype import fix_join

    wf = npe.Workflow(name='t1_linear_dl', base_dir=working_directory)

    wf.connect([
        (read_node, image_id_node, [('t1w', 'bids_or_caps_file')]),
        (read_node, container_path, [('t1w', 'bids_or_caps_filename')]),
        (image_id_node, ants_registration_node, [('image_id', 'output_prefix')
                                                 ]),
        (read_node, n4biascorrection, [("t1w", "input_image")]),
        (n4biascorrection, ants_registration_node, [('output_image',
                                                     'moving_image')]),
        (ants_registration_node, cropnifti, [('warped_image', 'input_img')]),
        (ants_registration_node, write_node, [('out_matrix', '@affine_mat')]),
        # Connect to DataSink
        (container_path, write_node, [(('container', fix_join, 't1_linear'),
                                       'container')]),
        (image_id_node, get_ids, [('image_id', 'image_id')]),
        (get_ids, write_node, [('image_id_out', '@image_id')]),
        (get_ids, write_node, [('subst_ls', 'substitutions')]),
        # (get_ids, write_node, [('regexp_subst_ls', 'regexp_substitutions')]),
        (n4biascorrection, write_node, [('output_image', '@outfile_corr')]),
        (ants_registration_node, write_node, [('warped_image', '@outfile_reg')
                                              ]),
        (cropnifti, write_node, [('output_img', '@outfile_crop')]),
    ])

    return wf
示例#4
0
    def run(self, do_skullstrip=True, n_ants_jobs=1, n_pipeline_jobs=1):
        """Run queued registration jobs.

        Args:
            do_skullstrip (bool, optional): whether to skullstrip images prior to registration. Defaults to True.
            n_ants_jobs (int, optional): number of parallel threads for ANTs registration. Defaults to 1.
            n_pipeline_jobs (int, optional): number of parallel processing jobs, this should be at least equal to n_ants_jobs. Defaults to 1.
        """
        if n_pipeline_jobs == 1:
            n_ants_jobs = 1

        if not os.path.exists(self.strOutputDir):
            os.makedirs(self.strOutputDir)
        strJobListPath = os.path.join(self.strOutputDir, 'joblist.csv')
        self.dfConfig.to_csv(strJobListPath)

        datanode = Node(utility.csv.CSVReader(
            in_file=os.path.abspath(strJobListPath), header=True),
                        name='0_datanode')

        augment = Workflow('augmentation',
                           base_dir=os.path.join(self.strOutputDir,
                                                 'working_dir'))

        reorientFunc = MapNode(image.Reorient(),
                               name='0_reorient_func',
                               iterfield=['in_file'])
        augment.connect(datanode, 'func', reorientFunc, 'in_file')
        reorientAnat = MapNode(image.Reorient(),
                               name='0_reorient_anat',
                               iterfield=['in_file'])
        augment.connect(datanode, 'anat', reorientAnat, 'in_file')
        reorientTargetFunc = MapNode(image.Reorient(),
                                     name='0_reorient_targetfunc',
                                     iterfield=['in_file'])
        augment.connect(datanode, 'target_func', reorientTargetFunc, 'in_file')
        reorientTargetAnat = MapNode(image.Reorient(),
                                     name='0_reorient_targetanat',
                                     iterfield=['in_file'])
        augment.connect(datanode, 'target_anat', reorientTargetAnat, 'in_file')

        meanFunc = MapNode(fsl.MeanImage(),
                           name='1_mean_func',
                           iterfield=['in_file'])
        augment.connect(reorientFunc, 'out_file', meanFunc, 'in_file')
        meanTargetFunc = MapNode(fsl.MeanImage(),
                                 name='1_mean_targetfunc',
                                 iterfield=['in_file'])
        augment.connect(reorientTargetFunc, 'out_file', meanTargetFunc,
                        'in_file')

        if do_skullstrip:
            # Skull strip the anatomical images with ROBEX
            skullstripSourceAnat = MapNode(Robex(),
                                           name='1_source_anat_skullstrip',
                                           iterfield=['in_file'])
            augment.connect(reorientAnat, 'out_file', skullstripSourceAnat,
                            'in_file')
            skullstripTargetAnat = MapNode(Robex(),
                                           name='1_target_anat_skullstrip',
                                           iterfield=['in_file'])
            augment.connect(reorientTargetAnat, 'out_file',
                            skullstripTargetAnat, 'in_file')

            # Skull strip the functional image with FSL BET and AFNI Automask
            skullstripSourceFunc = make_func_mask_workflow(
                base_dir=os.path.join(self.strOutputDir, 'working_dir'))
            augment.connect(meanFunc, 'out_file', skullstripSourceFunc,
                            'inputnode.mean_file')

        # First, perform a quick registration of the functional skull-stripped mean image to the anatomical
        # skull-stripped image. Use the SynQuick tool which does a rigid->affine->syn registration with some preset params
        func2Anat = MapNode(ants.RegistrationSynQuick(dimension=3,
                                                      num_threads=n_ants_jobs),
                            name='2_func2Anat',
                            mem_gb=16,
                            n_procs=n_ants_jobs,
                            iterfield=['fixed_image', 'moving_image'])

        if do_skullstrip:
            augment.connect(skullstripSourceAnat, 'out_file', func2Anat,
                            'fixed_image')
            augment.connect(skullstripSourceFunc, 'outputnode.masked_file',
                            func2Anat, 'moving_image')
        else:
            augment.connect(reorientAnat, 'out_file', func2Anat, 'fixed_image')
            augment.connect(reorientFunc, 'out_file', func2Anat,
                            'moving_image')

        # Now register the source anatomical image to the target anatomical image. Use a more precise registration for
        # this step. These parameters come from the antsRegistrationSyn script included in ANTS
        anat2Anat = MapNode(
            ants.Registration(metric=['MI', 'MI', 'CC'],
                              metric_weight=[1, 1, 1],
                              transforms=['Rigid', 'Affine', 'SyN'],
                              smoothing_sigmas=[[3, 2, 1, 0]] * 3,
                              shrink_factors=[[8, 4, 2, 1]] * 3,
                              dimension=3,
                              initial_moving_transform_com=1,
                              radius_or_number_of_bins=[32, 32, 4],
                              sampling_strategy=['Regular', 'Regular', None],
                              sampling_percentage=[0.25, 0.25, None],
                              use_histogram_matching=True,
                              collapse_output_transforms=True,
                              write_composite_transform=True,
                              transform_parameters=[(0.1, ), (0.1, ),
                                                    (0.1, 3, 0)],
                              number_of_iterations=[[1000, 500, 250, 100],
                                                    [1000, 500, 250, 100],
                                                    [100, 70, 50, 20]],
                              sigma_units=['vox'] * 4,
                              winsorize_upper_quantile=0.995,
                              winsorize_lower_quantile=0.005,
                              num_threads=n_ants_jobs,
                              verbose=False),
            name='3_anat2Anat',
            iterfield=['fixed_image', 'moving_image', 'output_warped_image'],
            mem_gb=16,
            n_procs=n_ants_jobs)

        if do_skullstrip:
            augment.connect(skullstripSourceAnat, 'out_file', anat2Anat,
                            'moving_image')
            augment.connect(skullstripTargetAnat, 'out_file', anat2Anat,
                            'fixed_image')
        else:
            augment.connect(reorientAnat, 'out_file', anat2Anat,
                            'moving_image')
            augment.connect(reorientTargetAnat, 'out_file', anat2Anat,
                            'fixed_image')
        augment.connect(datanode, 'output_anat', anat2Anat,
                        'output_warped_image')

        # Finally, use the func-to-anat transform, then the anat-to-anat transform on the source functional image
        concat = MapNode(utility.Merge(3),
                         name='4_concat_transforms',
                         iterfield=['in1', 'in2', 'in3'])
        # Ants applies transforms in reverse order. The first transform is the affine func-to-anat
        augment.connect(func2Anat, 'out_matrix', concat, 'in3')
        # then the nonlinear func-to-anat
        augment.connect(func2Anat, 'forward_warp_field', concat, 'in2')
        # and lastly the composite anat-to-anat
        augment.connect(anat2Anat, 'composite_transform', concat, 'in1')

        transform = MapNode(
            ants.ApplyTransforms(input_image_type=3,
                                 interpolation='BSpline',
                                 dimension=3,
                                 interpolation_parameters=(5, ),
                                 num_threads=n_ants_jobs),
            name='4_apply_transforms',
            iterfield=[
                'input_image', 'transforms', 'output_image', 'reference_image'
            ],
            mem_gb=16,
            n_procs=n_ants_jobs)
        augment.connect(concat, 'out', transform, 'transforms')
        augment.connect(reorientFunc, 'out_file', transform, 'input_image')
        augment.connect(meanTargetFunc, 'out_file', transform,
                        'reference_image')
        augment.connect(datanode, 'output_func', transform, 'output_image')

        if n_pipeline_jobs == 1:
            augment.run()
        else:
            augment.run(plugin='MultiProc',
                        plugin_args={'n_procs': n_pipeline_jobs})
示例#5
0
文件: nodedefs.py 项目: p3proc/p3
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['func', 'refimg', 'func_aligned'])
        self.set_output(['warp_fmc', 'refimg'])

        # define datasink substitutions
        self.set_subs([('_roi', '_reference'), ('_Warped_mean', '_moco'),
                       ('_Warped', '_realign')])

        # define regex substitutions
        self.set_resubs([(r'_avg_epi\d{1,3}', ''),
                         (r'_applyantsunwarp\d{1,3}', ''),
                         (r'_realign\d{1,3}', '')])

        # get magnitude and phase
        self.get_metadata = MapNode(Function(
            input_names=['epi_file', 'bids_dir'],
            output_names=[
                'magnitude', 'phasediff', 'TE', 'echospacing', 'ped'
            ],
            function=get_metadata),
                                    iterfield=['epi_file'],
                                    name='get_metadata')
        self.get_metadata.inputs.bids_dir = settings['bids_dir']

        # get skullstrip of magnitude image
        self.skullstrip_magnitude = MapNode(fsl.BET(robust=True,
                                                    output_type='NIFTI_GZ'),
                                            iterfield=['in_file'],
                                            name='skullstrip_magnitude')

        # erode skullstripped magnitude image (3x)
        self.erode_magnitude = []
        for n in range(3):
            self.erode_magnitude.append(
                MapNode(fsl.ErodeImage(output_type='NIFTI_GZ', ),
                        iterfield=['in_file'],
                        name='erode_magnitude{}'.format(n)))

        # create mask from eroded magnitude image
        self.create_mask = MapNode(fsl.maths.MathsCommand(
            args='-bin', output_type='NIFTI_GZ'),
                                   iterfield=['in_file'],
                                   name='create_mask')

        # calculate fieldmap image (rad/s)
        self.calculate_fieldmap = MapNode(
            Function(input_names=['phasediff', 'magnitude', 'TE'],
                     output_names=['out_file'],
                     function=fsl_prepare_fieldmap),
            iterfield=['phasediff', 'magnitude', 'TE'],
            name='calculate_fieldmap')

        # apply mask to fieldmap image
        self.apply_mask = MapNode(fsl.ApplyMask(output_type='NIFTI_GZ'),
                                  iterfield=['in_file', 'mask_file'],
                                  name='apply_mask')

        # unmask fieldmap image through interpolation
        self.unmask = MapNode(fsl.FUGUE(save_unmasked_fmap=True,
                                        output_type='NIFTI_GZ'),
                              iterfield=['fmap_in_file', 'mask_file'],
                              name='unmask')

        # average epi image
        self.avg_epi = MapNode(fsl.MeanImage(output_type='NIFTI_GZ'),
                               iterfield=['in_file'],
                               name='avg_epi')

        # skullstrip average epi image
        self.skullstrip_avg_epi = MapNode(fsl.BET(
            robust=True,
            output_type="NIFTI_GZ",
        ),
                                          iterfield=['in_file'],
                                          name='skullstrip_avg_epi')

        # register field map images to the averaged epi image
        self.register_magnitude = MapNode(fsl.FLIRT(output_type='NIFTI_GZ',
                                                    dof=6),
                                          iterfield=['in_file', 'reference'],
                                          name='register_magnitude')
        self.register_fieldmap = MapNode(
            fsl.FLIRT(output_type='NIFTI_GZ', apply_xfm=True),
            iterfield=['in_file', 'reference', 'in_matrix_file'],
            name='register_fieldmap')
        self.register_mask = MapNode(
            fsl.FLIRT(output_type='NIFTI_GZ',
                      apply_xfm=True,
                      interp='nearestneighbour'),
            iterfield=['in_file', 'reference', 'in_matrix_file'],
            name='register_mask')

        # unwarp epis fieldmap
        self.unwarp_epis = MapNode(fsl.FUGUE(save_shift=True,
                                             output_type='NIFTI_GZ'),
                                   iterfield=[
                                       'in_file', 'dwell_time', 'fmap_in_file',
                                       'mask_file', 'unwarp_direction'
                                   ],
                                   name='unwarp_epis')

        # Convert vsm to ANTS warp
        self.convertvsm2antswarp = MapNode(Function(
            input_names=['in_file', 'ped'],
            output_names=['out_file'],
            function=convertvsm2ANTSwarp),
                                           iterfield=['in_file', 'ped'],
                                           name='convertvsm2antswarp')

        # apply fmc ant warp
        self.applyantsunwarp = MapNode(
            ants.ApplyTransforms(out_postfix='_unwarped',
                                 num_threads=settings['num_threads']),
            iterfield=['input_image', 'reference_image', 'transforms'],
            name='applyantsunwarp')
        self.applyantsunwarp.n_procs = settings['num_threads']

        # get refimg transform
        self.get_refimg_transform = Node(Function(
            input_names=['transforms', 'run'],
            output_names=['transform'],
            function=lambda transforms, run: transforms[run]),
                                         name='get_refimg_transform')
        self.get_refimg_transform.inputs.run = settings['func_reference_run']

        # apply fmc ant warp to refimg
        self.applyantsunwarprefimg = Node(ants.ApplyTransforms(
            out_postfix='_unwarped', num_threads=settings['num_threads']),
                                          name='applyantsunwarprefimg')
        self.applyantsunwarprefimg.n_procs = settings['num_threads']

        # create the output name for the realignment
        self.create_prefix = MapNode(Function(input_names=['filename'],
                                              output_names=['basename'],
                                              function=get_prefix),
                                     iterfield=['filename'],
                                     name='create_prefix')

        # realiqn unwarped to refimgs
        self.realign = MapNode(ants.RegistrationSynQuick(
            transform_type='a', num_threads=settings['num_threads']),
                               iterfield=['moving_image', 'output_prefix'],
                               name='realign')
        self.realign.n_procs = settings['num_threads']

        # combine transforms
        self.combine_transforms = MapNode(
            Function(input_names=['avgepi', 'reference', 'unwarp', 'realign'],
                     output_names=['fmc_warp'],
                     function=combinetransforms),
            iterfield=['avgepi', 'reference', 'unwarp', 'realign'],
            name='combine_transforms')
        self.combine_transforms.n_procs = settings['num_threads']
示例#6
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from nipype.interfaces import ants

        from clinica.utils.filemanip import get_filename_no_ext

        from .t1_linear_utils import crop_nifti, print_end_pipeline

        image_id_node = npe.Node(
            interface=nutil.Function(
                input_names=["filename"],
                output_names=["image_id"],
                function=get_filename_no_ext,
            ),
            name="ImageID",
        )

        # The core (processing) nodes
        # =====================================

        # 1. N4biascorrection by ANTS. It uses nipype interface.
        n4biascorrection = npe.Node(
            name="n4biascorrection",
            interface=ants.N4BiasFieldCorrection(dimension=3,
                                                 save_bias=True,
                                                 bspline_fitting_distance=600),
        )

        # 2. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
        ants_registration_node = npe.Node(
            name="antsRegistrationSynQuick",
            interface=ants.RegistrationSynQuick())
        ants_registration_node.inputs.fixed_image = self.ref_template
        ants_registration_node.inputs.transform_type = "a"
        ants_registration_node.inputs.dimension = 3

        # 3. Crop image (using nifti). It uses custom interface, from utils file

        cropnifti = npe.Node(
            name="cropnifti",
            interface=nutil.Function(
                function=crop_nifti,
                input_names=["input_img", "ref_crop"],
                output_names=["output_img", "crop_template"],
            ),
        )
        cropnifti.inputs.ref_crop = self.ref_crop

        # 4. Print end message
        print_end_message = npe.Node(
            interface=nutil.Function(input_names=["t1w", "final_file"],
                                     function=print_end_pipeline),
            name="WriteEndMessage",
        )

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, image_id_node, [("t1w", "filename")]),
            (self.input_node, n4biascorrection, [("t1w", "input_image")]),
            (n4biascorrection, ants_registration_node, [("output_image",
                                                         "moving_image")]),
            (image_id_node, ants_registration_node, [("image_id",
                                                      "output_prefix")]),
            # Connect to DataSink
            (image_id_node, self.output_node, [("image_id", "image_id")]),
            (ants_registration_node, self.output_node, [("out_matrix",
                                                         "affine_mat")]),
            (n4biascorrection, self.output_node, [("output_image",
                                                   "outfile_corr")]),
            (ants_registration_node, self.output_node, [("warped_image",
                                                         "outfile_reg")]),
            (self.input_node, print_end_message, [("t1w", "t1w")]),
        ])
        if not (self.parameters.get("uncropped_image")):
            self.connect([
                (ants_registration_node, cropnifti, [("warped_image",
                                                      "input_img")]),
                (cropnifti, self.output_node, [("output_img", "outfile_crop")
                                               ]),
                (cropnifti, print_end_message, [("output_img", "final_file")]),
            ])
        else:
            self.connect([
                (ants_registration_node, print_end_message, [("warped_image",
                                                              "final_file")]),
            ])
示例#7
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""

        import nipype.interfaces.utility as nutil
        import nipype.pipeline.engine as npe
        from nipype.interfaces import ants

        import clinica.pipelines.pet_linear.pet_linear_utils as utils

        # Utilitary nodes
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=["pet"],
                output_names=["pet"],
                function=utils.init_input_node,
            ),
            name="initPipeline",
        )
        concatenate_node = npe.Node(
            interface=nutil.Function(
                input_names=["pet_to_t1w_tranform", "t1w_to_mni_tranform"],
                output_names=["transforms_list"],
                function=utils.concatenate_transforms,
            ),
            name="concatenateTransforms",
        )

        # The core (processing) nodes

        # 1. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
        ants_registration_node = npe.Node(
            name="antsRegistration", interface=ants.RegistrationSynQuick())
        ants_registration_node.inputs.dimension = 3
        ants_registration_node.inputs.transform_type = "r"

        # 2. `ApplyTransforms` by *ANTS*. It uses nipype interface. PET to MRI
        ants_applytransform_node = npe.Node(name="antsApplyTransformPET2MNI",
                                            interface=ants.ApplyTransforms())
        ants_applytransform_node.inputs.dimension = 3
        ants_applytransform_node.inputs.reference_image = self.ref_template

        # 3. Normalize the image (using nifti). It uses custom interface, from utils file
        ants_registration_nonlinear_node = npe.Node(
            name="antsRegistrationT1W2MNI", interface=ants.Registration())
        ants_registration_nonlinear_node.inputs.fixed_image = self.ref_template
        ants_registration_nonlinear_node.inputs.metric = ["MI"]
        ants_registration_nonlinear_node.inputs.metric_weight = [1.0]
        ants_registration_nonlinear_node.inputs.transforms = ["SyN"]
        ants_registration_nonlinear_node.inputs.transform_parameters = [(0.1,
                                                                         3, 0)]
        ants_registration_nonlinear_node.inputs.dimension = 3
        ants_registration_nonlinear_node.inputs.shrink_factors = [[8, 4, 2]]
        ants_registration_nonlinear_node.inputs.smoothing_sigmas = [[3, 2, 1]]
        ants_registration_nonlinear_node.inputs.sigma_units = ["vox"]
        ants_registration_nonlinear_node.inputs.number_of_iterations = [[
            200, 50, 10
        ]]
        ants_registration_nonlinear_node.inputs.convergence_threshold = [1e-05]
        ants_registration_nonlinear_node.inputs.convergence_window_size = [10]
        ants_registration_nonlinear_node.inputs.radius_or_number_of_bins = [32]
        ants_registration_nonlinear_node.inputs.winsorize_lower_quantile = 0.005
        ants_registration_nonlinear_node.inputs.winsorize_upper_quantile = 0.995
        ants_registration_nonlinear_node.inputs.collapse_output_transforms = True
        ants_registration_nonlinear_node.inputs.use_histogram_matching = False
        ants_registration_nonlinear_node.inputs.verbose = True

        ants_applytransform_nonlinear_node = npe.Node(
            name="antsApplyTransformNonLinear",
            interface=ants.ApplyTransforms())
        ants_applytransform_nonlinear_node.inputs.dimension = 3
        ants_applytransform_nonlinear_node.inputs.reference_image = self.ref_template

        normalize_intensity_node = npe.Node(
            name="intensityNormalization",
            interface=nutil.Function(
                function=utils.suvr_normalization,
                input_names=["input_img", "norm_img", "ref_mask"],
                output_names=["output_img"],
            ),
        )
        normalize_intensity_node.inputs.ref_mask = self.ref_mask

        # 4. Crop image (using nifti). It uses custom interface, from utils file
        crop_nifti_node = npe.Node(
            name="cropNifti",
            interface=nutil.Function(
                function=utils.crop_nifti,
                input_names=["input_img", "ref_crop"],
                output_names=["output_img"],
            ),
        )
        crop_nifti_node.inputs.ref_crop = self.ref_crop

        # 5. Print end message
        print_end_message = npe.Node(
            interface=nutil.Function(input_names=["pet", "final_file"],
                                     function=utils.print_end_pipeline),
            name="WriteEndMessage",
        )

        # 6. Optionnal node: compute PET image in T1w
        ants_applytransform_optional_node = npe.Node(
            name="antsApplyTransformPET2T1w", interface=ants.ApplyTransforms())
        ants_applytransform_optional_node.inputs.dimension = 3

        # Connection
        # ==========
        # fmt: off
        self.connect([
            (self.input_node, init_node, [("pet", "pet")]),
            # STEP 1
            (self.input_node, ants_registration_node, [("t1w", "fixed_image")]
             ),
            (init_node, ants_registration_node, [("pet", "moving_image")]),
            # STEP 2
            (ants_registration_node, concatenate_node,
             [("out_matrix", "pet_to_t1w_tranform")]),
            (self.input_node, concatenate_node, [("t1w_to_mni",
                                                  "t1w_to_mni_tranform")]),
            (self.input_node, ants_applytransform_node, [("pet", "input_image")
                                                         ]),
            (concatenate_node, ants_applytransform_node, [("transforms_list",
                                                           "transforms")]),
            # STEP 3
            (self.input_node, ants_registration_nonlinear_node,
             [("t1w", "moving_image")]),
            (ants_registration_nonlinear_node,
             ants_applytransform_nonlinear_node, [
                 ("reverse_forward_transforms", "transforms")
             ]),
            (ants_applytransform_node, ants_applytransform_nonlinear_node,
             [("output_image", "input_image")]),
            (ants_applytransform_node, normalize_intensity_node,
             [("output_image", "input_img")]),
            (ants_applytransform_nonlinear_node, normalize_intensity_node,
             [("output_image", "norm_img")]),
            # Connect to DataSink
            (ants_registration_node, self.output_node, [("out_matrix",
                                                         "affine_mat")]),
            (normalize_intensity_node, self.output_node, [("output_img",
                                                           "suvr_pet")]),
            (self.input_node, print_end_message, [("pet", "pet")]),
        ])
        # STEP 4
        if not (self.parameters.get("uncropped_image")):
            self.connect([
                (normalize_intensity_node, crop_nifti_node, [("output_img",
                                                              "input_img")]),
                (crop_nifti_node, self.output_node, [("output_img",
                                                      "outfile_crop")]),
                (crop_nifti_node, print_end_message, [("output_img",
                                                       "final_file")]),
            ])
        else:
            self.connect([
                (normalize_intensity_node, print_end_message,
                 [("output_img", "final_file")]),
            ])
        # STEP 6: Optionnal argument
        if self.parameters.get("save_PETinT1w"):
            self.connect([
                (self.input_node, ants_applytransform_optional_node,
                 [("pet", "input_image"), ("t1w", "reference_image")]),
                (ants_registration_node, ants_applytransform_optional_node,
                 [("out_matrix", "transforms")]),
                (ants_applytransform_optional_node, self.output_node,
                 [("output_image", "PETinT1w")]),
            ])