def run(self, n_pipeline_jobs=1):
        """Perform transformations.

        Args:
            n_pipeline_jobs (int, optional): number of parallel processing jobs. Defaults to 1.
        """        
        if not os.path.exists(self.strOutputDir):
            os.makedirs(self.strOutputDir)
        strJobListPath = os.path.join(self.strOutputDir, 'joblist.csv')
        self.dfConfig.to_csv(strJobListPath)

        datanode = Node(utility.csv.CSVReader(in_file=os.path.abspath(strJobListPath), header=True),
                        name='datanode')

        augment = Workflow('augmentation_affinereg', base_dir=os.path.join(self.strOutputDir, 'working_dir'))

        transformFunc = MapNode(fsl.ApplyXFM(interp='spline', apply_xfm=True), name='transform_func',
                                iterfield=['in_file', 'reference', 'in_matrix_file', 'out_file'])
        augment.connect(datanode, 'func', transformFunc, 'in_file')
        augment.connect(datanode, 'func', transformFunc, 'reference')
        augment.connect(datanode, 'affine', transformFunc, 'in_matrix_file')
        augment.connect(datanode, 'output_func', transformFunc, 'out_file')

        transformAnat = MapNode(fsl.ApplyXFM(interp='spline', apply_xfm=True), name='transform_anat',
                                iterfield=['in_file', 'reference', 'in_matrix_file', 'out_file'])
        augment.connect(datanode, 'anat', transformAnat, 'in_file')
        augment.connect(datanode, 'anat', transformAnat, 'reference')
        augment.connect(datanode, 'affine', transformAnat, 'in_matrix_file')
        augment.connect(datanode, 'output_anat', transformAnat, 'out_file')

        if n_pipeline_jobs == 1:
            augment.run()
        else:
            augment.run(plugin='MultiProc', plugin_args={'n_procs': n_pipeline_jobs})
Пример #2
0
    def series_coreg_pipeline(self, **name_maps):

        pipeline = self.new_pipeline(
            'series_coreg',
            desc="Applies coregistration transform to DW series",
            citations=[],
            name_maps=name_maps)

        if self.provided('coreg_ref'):
            coreg_ref = 'coreg_ref'
        elif self.provided('coreg_ref_brain'):
            coreg_ref = 'coreg_ref_brain'
        else:
            raise BananaUsageError(
                "Cannot coregister DW series as reference ('coreg_ref' or "
                "'coreg_ref_brain') has not been provided to {}".format(self))

        # Apply co-registration transformation to DW series
        pipeline.add(
            'mask_transform',
            fsl.ApplyXFM(
                output_type='NIFTI_GZ',
                apply_xfm=True),
            inputs={
                'in_matrix_file': ('coreg_fsl_mat', text_matrix_format),
                'in_file': ('series_preproc', nifti_gz_format),
                'reference': (coreg_ref, nifti_gz_format)},
            outputs={
                'series_coreg': ('out_file', nifti_gz_format)},
            requirements=[fsl_req.v('5.0.10')],
            wall_time=10)

        return pipeline
Пример #3
0
def array_to_mni_map(array, fname, data_path, mask, threshold=None, **kwargs):
    '''Transforms array to mni space and saves it as mni map
    IN:
        array       -       ndarray, array of statistics in group sapce to transform to mni space
        fname       -       string, name under which to save the mni map
        data_path   -       string, path to Forrest Gump directory
        mask        -       string, filename of the mask to unmask array.
        threshold   -       float, optional, threshold for mni map'''
    from nipype.interfaces import fsl
    import os
    from nilearn.masking import unmask
    if threshold is not None:
        array[np.abs(array) < threshold] = 0
    unmasked = unmask(array, mask)
    unmasked.to_filename(fname)
    flirt = fsl.ApplyXFM()
    flirt.inputs.in_file = fname
    flirt.inputs.out_file = fname
    flirt.inputs.padding_size = 0
    flirt.inputs.interp = 'nearestneighbour'
    flirt.inputs.reference = os.path.join(data_path, 'templates',
                                          'grpbold7Tp1', 'in_mni',
                                          'brain_12dof.nii.gz')
    flirt.inputs.in_matrix_file = os.path.join(data_path, 'templates',
                                               'grpbold7Tp1', 'xfm',
                                               'tmpl2mni_12dof.mat')
    flirt.run()
Пример #4
0
    def brain_coreg_pipeline(self, **name_maps):
        if self.branch('coreg_method', 'epireg'):
            pipeline = self.coreg_pipeline(
                name='brain_coreg',
                name_maps=dict(
                    input_map={
                        'mag_preproc': 'brain',
                        'coreg_ref': 'coreg_ref_brain'},
                    output_map={
                        'mag_coreg': 'brain_coreg'},
                    name_maps=name_maps))

            pipeline.add(
                'mask_transform',
                fsl.ApplyXFM(
                    output_type='NIFTI_GZ',
                    apply_xfm=True),
                inputs={
                    'in_matrix_file': (pipeline.node('epireg'), 'epi2str_mat'),
                    'in_file': ('brain_mask', nifti_gz_format),
                    'reference': ('coreg_ref_brain', nifti_gz_format)},
                outputs={
                    'brain_mask_coreg': ('out_file', nifti_gz_format)},
                requirements=[fsl_req.v('5.0.10')],
                wall_time=10)
        else:
            pipeline = super().brain_coreg_pipeline(**name_maps)

        return pipeline
Пример #5
0
 def _run_interface(self, runtime):
     for in_file in self.inputs.in_files:
         ax = fsl.ApplyXFM(in_file=in_file,
                           in_matrix_file=self.inputs.xfm_file,
                           apply_xfm=True,
                           interp=self.inputs.interp,
                           reference=self.inputs.reference)
         ax.run()
     return runtime
Пример #6
0
 def apply_affine(self, in_file, affine, out_file, ref):
     xfm = fsl.ApplyXFM()
     xfm.inputs.in_file = in_file
     xfm.inputs.reference = ref
     xfm.inputs.in_matrix_file = affine
     xfm.inputs.apply_xfm = True
     xfm.inputs.out_file = out_file
     xfm.inputs.out_matrix_file = out_file.replace("nii.gz", "mat")
     print("Applyting linear transformation on atlas:")
     print(xfm.cmdline)
     xfm.run()
Пример #7
0
 def resample_atlas(self, atlas_file: str, lowers, aff):
     applyxfm = fsl.ApplyXFM()
     applyxfm.inputs.in_file = atlas_file
     applyxfm.inputs.reference = lowers
     applyxfm.inputs.in_matrix_file = aff
     applyxfm.inputs.out_file = atlas_file.replace(".nii.gz", "_resampled.nii")
     applyxfm.inputs.apply_xfm = True
     applyxfm.inputs.output_type = "NIFTI"
     print(applyxfm.cmdline)
     applyxfm.run()
     return applyxfm.inputs.out_file
Пример #8
0
def apply_xfm(in_file, xfm_file, ref_file):

    applyxfm = fsl.ApplyXFM()
    applyxfm.inputs.in_file = in_file
    applyxfm.inputs.in_matrix_file = xfm_file
    applyxfm.inputs.reference = ref_file
    applyxfm.inputs.apply_xfm = True

    output_file = applyxfm.run().outputs.out_file
    print(output_file)
    return output_file
def apply_affine(in_file: Path, ref: Path, aff: Path, out_file: Path):
    """
    apply affine file to preform linear registration from one image to another
    Arguments:
        ref {Path} -- [reference image]
        aff {Path} -- [affine matrix file]
        out_file {Path} -- [output file]
        in_file {Path} -- [file to apply affine matrix on]
    """
    ax = fsl.ApplyXFM()
    ax.inputs.in_file = in_file
    ax.inputs.in_matrix_file = aff
    ax.inputs.out_file = out_file
    ax.inputs.reference = ref
    return ax
Пример #10
0
def create_segments_2func_workflow(threshold=0.5,
                                   name='segments_2func_workflow'):
    segments_2func_workflow = Workflow(name=name)

    # Input Node
    inputspec = Node(
        utility.IdentityInterface(fields=['segments', 'premat', 'func_file']),
        name='inputspec')

    # Calculate inverse matrix of EPI to T1
    anat_2func_matrix = Node(fsl.ConvertXFM(invert_xfm=True),
                             name='anat_2func_matrix')

    # Transform segments to EPI space
    segments_2func_apply = MapNode(fsl.ApplyXFM(),
                                   iterfield=['in_file'],
                                   name='segments_2func_apply')

    # Threshold segments
    segments_threshold = MapNode(
        fsl.ImageMaths(op_string='-thr {0} -bin'.format(threshold)),
        iterfield=['in_file'],
        name='segments_threshold')

    # Output Node
    outputspec = Node(utility.IdentityInterface(
        fields=['segments_2func_files', 'anat_2func_matrix_file']),
                      name='outputspec')

    segments_2func_workflow.connect(inputspec, 'premat', anat_2func_matrix,
                                    'in_file')
    segments_2func_workflow.connect(inputspec, 'segments',
                                    segments_2func_apply, 'in_file')
    segments_2func_workflow.connect(inputspec, 'func_file',
                                    segments_2func_apply, 'reference')
    segments_2func_workflow.connect(anat_2func_matrix, 'out_file',
                                    segments_2func_apply, 'in_matrix_file')
    segments_2func_workflow.connect(segments_2func_apply, 'out_file',
                                    segments_threshold, 'in_file')
    segments_2func_workflow.connect(anat_2func_matrix, 'out_file', outputspec,
                                    'anat_2func_matrix_file')
    segments_2func_workflow.connect(segments_threshold, 'out_file', outputspec,
                                    'segments_2func_files')

    return segments_2func_workflow
Пример #11
0
def doApplyXFM(infile, inmat, ref, outfile, intertype,
               tag):  # Doing transformation using existing mat files
    '''
    Parameters
    ----------
    infile : str
        path containing the input image.
    inmat : str
        path containing the input transformation matrix.
    ref : str
        path containing the standard MNI_T1_1mm template.
    outfile : str
        path to save the output image.
    intertype : str
        string containing the interpolation type during transformation. e.g. spline, etc.
    tag : str
        tag to identify the image type.e.g. T1, T2, FLAIR etc.
    
    Returns
    -------
    an image with input transformation applied
    '''
    print('doing transformation/cropping using trasformation/cropping matrix',
          tag, infile)
    applyxfm = fsl.ApplyXFM()
    applyxfm.inputs.apply_xfm = True
    applyxfm.inputs.reference = ref
    applyxfm.inputs.in_file = infile
    applyxfm.inputs.out_file = outfile
    applyxfm.inputs.in_matrix_file = inmat
    applyxfm.inputs.out_matrix_file = inmat  # There will be no change in the in_matrix_file since it is applyxfm
    applyxfm.inputs.no_resample = True
    applyxfm.inputs.no_resample_blur = True
    applyxfm.inputs.interp = intertype
    applyxfm.run()
    print(tag, 'is transformed/cropped', outfile, '\n')
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline."""
        import nipype.interfaces.ants as ants
        import nipype.interfaces.fsl as fsl
        import nipype.interfaces.mrtrix3 as mrtrix3
        import nipype.interfaces.utility as nutil
        import nipype.interfaces.utility as niu
        import nipype.pipeline.engine as npe
        from nipype.interfaces.fsl.epi import Eddy

        from clinica.utils.dwi import (
            compute_average_b0,
            generate_acq_file,
            generate_index_file,
        )

        from .dwi_preprocessing_using_phasediff_fmap_utils import (
            get_grad_fsl,
            init_input_node,
            print_end_pipeline,
        )
        from .dwi_preprocessing_using_phasediff_fmap_workflows import (
            prepare_phasediff_fmap, )

        # Step 0: Initialization
        # ======================
        # Initialize input parameters and print begin message
        init_node = npe.Node(
            interface=nutil.Function(
                input_names=self.get_input_fields(),
                output_names=[
                    "image_id",
                    "dwi",
                    "bvec",
                    "bval",
                    "total_readout_time",
                    "phase_encoding_direction",
                    "fmap_magnitude",
                    "fmap_phasediff",
                    "delta_echo_time",
                ],
                function=init_input_node,
            ),
            name="0-InitNode",
        )

        # Generate (bvec, bval) tuple for MRtrix interfaces
        get_grad_fsl = npe.Node(
            nutil.Function(
                input_names=["bval", "bvec"],
                output_names=["grad_fsl"],
                function=get_grad_fsl,
            ),
            name="0-GetFslGrad",
        )

        # Generate <image_id>_acq.txt for eddy
        gen_acq_txt = npe.Node(
            nutil.Function(
                input_names=[
                    "in_dwi",
                    "fsl_phase_encoding_direction",
                    "total_readout_time",
                    "image_id",
                ],
                output_names=["out_acq"],
                function=generate_acq_file,
            ),
            name="0-GenerateAcqFile",
        )

        # Generate <image_id>_index.txt for eddy
        gen_index_txt = npe.Node(
            nutil.Function(
                input_names=["in_bval", "low_bval", "image_id"],
                output_names=["out_index"],
                function=generate_index_file,
            ),
            name="0-GenerateIndexFile",
        )
        gen_index_txt.inputs.low_bval = self.parameters["low_bval"]

        # Step 1: Computation of the reference b0 (i.e. average b0 but with EPI distortions)
        # =======================================
        # Compute whole brain mask
        pre_mask_b0 = npe.Node(mrtrix3.BrainMask(), name="1a-PreMaskB0")
        pre_mask_b0.inputs.out_file = (
            "brainmask.nii.gz"  # On default, .mif file is generated
        )

        # Run eddy without calibrated fmap
        pre_eddy = npe.Node(name="1b-PreEddy", interface=Eddy())
        pre_eddy.inputs.repol = True
        pre_eddy.inputs.use_cuda = self.parameters["use_cuda"]
        pre_eddy.inputs.initrand = self.parameters["initrand"]

        # Compute the reference b0
        compute_ref_b0 = npe.Node(
            niu.Function(
                input_names=["in_dwi", "in_bval"],
                output_names=["out_b0_average"],
                function=compute_average_b0,
            ),
            name="1c-ComputeReferenceB0",
        )
        compute_ref_b0.inputs.low_bval = self.parameters["low_bval"]

        # Compute brain mask from reference b0
        mask_ref_b0 = npe.Node(fsl.BET(mask=True, robust=True),
                               name="1d-MaskReferenceB0")

        # Step 2: Calibrate and register FMap
        # ===================================
        # Bias field correction of the magnitude image
        bias_mag_fmap = npe.Node(ants.N4BiasFieldCorrection(dimension=3),
                                 name="2a-N4MagnitudeFmap")
        # Brain extraction of the magnitude image
        bet_mag_fmap = npe.Node(fsl.BET(frac=0.4, mask=True),
                                name="2b-BetN4MagnitudeFmap")

        # Calibrate FMap
        calibrate_fmap = prepare_phasediff_fmap(name="2c-CalibrateFMap")

        # Register the BET magnitude fmap onto the BET b0
        bet_mag_fmap2b0 = npe.Node(interface=fsl.FLIRT(),
                                   name="2d-RegistrationBetMagToB0")
        bet_mag_fmap2b0.inputs.dof = 6
        bet_mag_fmap2b0.inputs.output_type = "NIFTI_GZ"

        # Apply the transformation on the calibrated fmap
        fmap2b0 = npe.Node(interface=fsl.ApplyXFM(), name="2e-1-FMapToB0")
        fmap2b0.inputs.output_type = "NIFTI_GZ"

        # Apply the transformation on the magnitude image
        mag_fmap2b0 = fmap2b0.clone("2e-2-MagFMapToB0")

        # Smooth the registered (calibrated) fmap
        smoothing = npe.Node(interface=fsl.maths.IsotropicSmooth(),
                             name="2f-Smoothing")
        smoothing.inputs.sigma = 4.0

        # Step 3: Run FSL eddy
        # ====================
        eddy = pre_eddy.clone("3-Eddy")

        # Step 4: Bias correction
        # =======================
        # Use implementation detailed in (Jeurissen et al., 2014)
        bias = npe.Node(mrtrix3.DWIBiasCorrect(use_ants=True),
                        name="4-RemoveBias")

        # Step 5: Final brainmask
        # =======================
        # Compute average b0 on corrected dataset (for brain mask extraction)
        compute_avg_b0 = compute_ref_b0.clone("5a-ComputeB0Average")

        # Compute b0 mask on corrected avg b0
        mask_avg_b0 = mask_ref_b0.clone("5b-MaskB0")

        # Print end message
        print_end_message = npe.Node(
            interface=nutil.Function(input_names=["image_id", "final_file"],
                                     function=print_end_pipeline),
            name="99-WriteEndMessage",
        )

        # Connection
        # ==========
        # fmt: off
        self.connect([
            # Step 0: Initialization
            # ======================
            # Initialize input parameters and print begin message
            (self.input_node, init_node, [("dwi", "dwi"), ("bvec", "bvec"),
                                          ("bval", "bval"),
                                          ("dwi_json", "dwi_json"),
                                          ("fmap_magnitude", "fmap_magnitude"),
                                          ("fmap_phasediff", "fmap_phasediff"),
                                          ("fmap_phasediff_json",
                                           "fmap_phasediff_json")]),
            # Generate (bvec, bval) tuple for MRtrix interfaces
            (init_node, get_grad_fsl, [("bval", "bval"), ("bvec", "bvec")]),
            # Generate <image_id>_acq.txt for eddy
            (init_node, gen_acq_txt,
             [("dwi", "in_dwi"), ("total_readout_time", "total_readout_time"),
              ("phase_encoding_direction", "fsl_phase_encoding_direction"),
              ("image_id", "image_id")]),
            # Generate <image_id>_index.txt for eddy
            (init_node, gen_index_txt, [("bval", "in_bval"),
                                        ("image_id", "image_id")]),

            # Step 1: Computation of the reference b0 (i.e. average b0 but with EPI distortions)
            # =======================================
            # Compute whole brain mask
            (get_grad_fsl, pre_mask_b0, [("grad_fsl", "grad_fsl")]),
            (init_node, pre_mask_b0, [("dwi", "in_file")]),
            # Run eddy without calibrated fmap
            (init_node, pre_eddy, [("dwi", "in_file"), ("bval", "in_bval"),
                                   ("bvec", "in_bvec"),
                                   ("image_id", "out_base")]),
            (gen_acq_txt, pre_eddy, [("out_acq", "in_acqp")]),
            (gen_index_txt, pre_eddy, [("out_index", "in_index")]),
            (pre_mask_b0, pre_eddy, [("out_file", "in_mask")]),
            # Compute the reference b0
            (init_node, compute_ref_b0, [("bval", "in_bval")]),
            (pre_eddy, compute_ref_b0, [("out_corrected", "in_dwi")]),
            # Compute brain mask from reference b0
            (compute_ref_b0, mask_ref_b0, [("out_b0_average", "in_file")]),

            # Step 2: Calibrate and register FMap
            # ===================================
            # Bias field correction of the magnitude image
            (init_node, bias_mag_fmap, [("fmap_magnitude", "input_image")]),
            # Brain extraction of the magnitude image
            (bias_mag_fmap, bet_mag_fmap, [("output_image", "in_file")]),
            # Calibration of the FMap
            (bet_mag_fmap, calibrate_fmap,
             [("mask_file", "input_node.fmap_mask"),
              ("out_file", "input_node.fmap_magnitude")]),
            (init_node, calibrate_fmap,
             [("fmap_phasediff", "input_node.fmap_phasediff"),
              ("delta_echo_time", "input_node.delta_echo_time")]),
            # Register the BET magnitude fmap onto the BET b0
            (bet_mag_fmap, bet_mag_fmap2b0, [("out_file", "in_file")]),
            (mask_ref_b0, bet_mag_fmap2b0, [("out_file", "reference")]),
            # Apply the transformation on the magnitude image
            (bet_mag_fmap2b0, mag_fmap2b0, [("out_matrix_file",
                                             "in_matrix_file")]),
            (bias_mag_fmap, mag_fmap2b0, [("output_image", "in_file")]),
            (mask_ref_b0, mag_fmap2b0, [("out_file", "reference")]),
            # Apply the transformation on the calibrated fmap
            (bet_mag_fmap2b0, fmap2b0, [("out_matrix_file", "in_matrix_file")]
             ),
            (calibrate_fmap, fmap2b0, [("output_node.calibrated_fmap",
                                        "in_file")]),
            (mask_ref_b0, fmap2b0, [("out_file", "reference")]),
            # # Smooth the registered (calibrated) fmap
            (fmap2b0, smoothing, [("out_file", "in_file")]),

            # Step 3: Run FSL eddy
            # ====================
            (init_node, eddy, [("dwi", "in_file"), ("bval", "in_bval"),
                               ("bvec", "in_bvec"), ("image_id", "out_base")]),
            (gen_acq_txt, eddy, [("out_acq", "in_acqp")]),
            (gen_index_txt, eddy, [("out_index", "in_index")]),
            (smoothing, eddy, [("out_file", "field")]),
            (pre_mask_b0, eddy, [("out_file", "in_mask")]),

            # Step 4: Bias correction
            # =======================
            (init_node, bias, [("bval", "in_bval")]),
            (eddy, bias, [("out_rotated_bvecs", "in_bvec"),
                          ("out_corrected", "in_file")]),
            # Step 5: Final brainmask
            # =======================
            # Compute average b0 on corrected dataset (for brain mask extraction)
            (init_node, compute_avg_b0, [("bval", "in_bval")]),
            (bias, compute_avg_b0, [("out_file", "in_dwi")]),
            # Compute b0 mask on corrected avg b0
            (compute_avg_b0, mask_avg_b0, [("out_b0_average", "in_file")]),

            # Print end message
            (init_node, print_end_message, [("image_id", "image_id")]),
            (mask_avg_b0, print_end_message, [("mask_file", "final_file")]),

            # Output node
            (init_node, self.output_node, [("bval", "preproc_bval")]),
            (eddy, self.output_node, [("out_rotated_bvecs", "preproc_bvec")]),
            (bias, self.output_node, [("out_file", "preproc_dwi")]),
            (mask_avg_b0, self.output_node, [("mask_file", "b0_mask")]),
            (bet_mag_fmap2b0, self.output_node, [("out_file",
                                                  "magnitude_on_b0")]),
            (fmap2b0, self.output_node, [("out_file", "calibrated_fmap_on_b0")
                                         ]),
            (smoothing, self.output_node, [("out_file", "smoothed_fmap_on_b0")
                                           ]),
        ])
Пример #13
0
def build_correlation_wf(Registration=True,
                         use_Ankita_Function=False,
                         name='pearsonCorrcalc'):
    corr_wf = Workflow(name=name)
    if Registration:
        inputnode = Node(interface=util.IdentityInterface(fields=[
            'in_files', 'atlas_files', 'func2std', 'reference', 'mask_file'
        ]),
                         name='inputspec')
        outputnode = Node(
            interface=util.IdentityInterface(fields=['pearsonCorr_files']),
            name='outputspec')

        if use_Ankita_Function:
            coff_matrix = MapNode(util.Function(
                function=pearson_corr_Ankita,
                input_names=['in_file', 'atlas_file'],
                output_names=['coff_matrix_file']),
                                  iterfield=['in_file', 'atlas_file'],
                                  name='coff_matrix')
            transform_corr = MapNode(interface=fsl.ApplyXFM(interp='spline'),
                                     iterfield=['in_file', 'in_matrix_file'],
                                     name='transform_corr')
            maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked',
                                                            op_string='-mas'),
                                   iterfield=['in_file'],
                                   name='maskWarpFile')
            make_npy_from_Corr = MapNode(util.Function(
                function=make_npy_from_CorrFile,
                input_names=['Corr_file', 'mask_file'],
                output_names=['coff_matrix_file']),
                                         iterfield=['Corr_file'],
                                         name='coff_matrix_in_npy')

        else:
            coff_matrix = MapNode(util.Function(
                function=pearsonr_with_roi_mean_w_reg,
                input_names=['in_file', 'atlas_file'],
                output_names=['coff_matrix_file']),
                                  iterfield=['in_file', 'atlas_file'],
                                  name='coff_matrix')
            transform_corr = MapNode(interface=fsl.ApplyXFM(interp='spline'),
                                     iterfield=['in_file', 'in_matrix_file'],
                                     name='transform_corr')
            maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked',
                                                            op_string='-mas'),
                                   iterfield=['in_file'],
                                   name='maskWarpFile')
            make_npy_from_Corr = MapNode(util.Function(
                function=make_npy_from_CorrFile,
                input_names=['Corr_file', 'mask_file'],
                output_names=['coff_matrix_file']),
                                         iterfield=['Corr_file'],
                                         name='coff_matrix_in_npy')
        datasink = Node(interface=DataSink(), name='datasink')

        corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file')
        corr_wf.connect(inputnode, 'atlas_files', coff_matrix, 'atlas_file')
        corr_wf.connect(coff_matrix, 'coff_matrix_file', transform_corr,
                        'in_file')
        corr_wf.connect(inputnode, 'func2std', transform_corr,
                        'in_matrix_file')
        corr_wf.connect(inputnode, 'reference', transform_corr, 'reference')
        corr_wf.connect(transform_corr, 'out_file', maskCorrFile, 'in_file')
        corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2')

        corr_wf.connect(maskCorrFile, 'out_file', make_npy_from_Corr,
                        'Corr_file')
        corr_wf.connect(inputnode, 'mask_file', make_npy_from_Corr,
                        'mask_file')
        corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode,
                        'pearsonCorr_files')
        corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file')

    else:

        inputnode = Node(interface=util.IdentityInterface(
            fields=['in_files', 'atlas_file', 'mask_file']),
                         name='inputspec')
        outputnode = Node(interface=util.IdentityInterface(
            fields=['pearsonCorr_files', 'pearsonCorr_files_in_nii']),
                          name='outputspec')
        if use_Ankita_Function:
            coff_matrix = MapNode(util.Function(
                function=pearson_corr_Ankita,
                input_names=['in_file', 'atlas_file'],
                output_names=['coff_matrix_file']),
                                  iterfield=['in_file'],
                                  name='coff_matrix')
            maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked',
                                                            op_string='-mas'),
                                   iterfield=['in_file'],
                                   name='maskWarpFile')
            make_npy_from_Corr = MapNode(util.Function(
                function=make_npy_from_CorrFile,
                input_names=['Corr_file', 'mask_file'],
                output_names=['coff_matrix_file']),
                                         iterfield=['Corr_file'],
                                         name='coff_matrix_in_npy')
            datasink = Node(interface=DataSink(), name='datasink')

            corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file')
            corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file')
            corr_wf.connect(coff_matrix, 'coff_matrix_file', maskCorrFile,
                            'in_file')
            corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2')

            corr_wf.connect(maskCorrFile, 'out_file', make_npy_from_Corr,
                            'Corr_file')
            corr_wf.connect(inputnode, 'mask_file', make_npy_from_Corr,
                            'mask_file')
            corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode,
                            'pearsonCorr_files')
            corr_wf.connect(outputnode, 'pearsonCorr_files', datasink,
                            'out_file')
        else:
            coff_matrix = MapNode(util.Function(
                function=pearsonr_with_roi_mean,
                input_names=['in_file', 'atlas_file', 'mask_file'],
                output_names=['coff_matrix_file', 'coff_matrix_file_in_nii']),
                                  iterfield=['in_file'],
                                  name='coff_matrix')
            datasink = Node(interface=DataSink(), name='datasink')
            # selectfile = MapNode(interface=util.Select(index=[0]), iterfield = ['inlist'],name='select')
            corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file')
            corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file')
            corr_wf.connect(inputnode, 'mask_file', coff_matrix, 'mask_file')

            corr_wf.connect(coff_matrix, 'coff_matrix_file', outputnode,
                            'pearsonCorr_files')
            corr_wf.connect(coff_matrix, 'coff_matrix_file_in_nii', outputnode,
                            'pearsonCorr_files_in_nii')
            corr_wf.connect(outputnode, 'pearsonCorr_files', datasink,
                            'out_file')
        # coff_matrix = MapNode(util.Function(function=pearsonr_with_roi_mean_w_reg,
        #                             input_names=['in_file','atlas_file'],
        #                             output_names=['coff_matrix_file']),
        #                   iterfield=['in_file'],
        #                   name = 'coff_matrix')
        # maskCorrFile = MapNode(interface=fsl.ImageMaths(suffix='_masked',
        #                                        op_string='-mas'),
        #               iterfield=['in_file'],
        #               name = 'maskWarpFile')
        # make_npy_from_Corr = MapNode(util.Function(function=make_npy_from_CorrFile,
        #                             input_names=['Corr_file','mask_file'],
        #                             output_names=['coff_matrix_file']),
        #                   iterfield=['Corr_file'],
        #                   name = 'coff_matrix_in_npy')
        # datasink = Node(interface=DataSink(), name='datasink')

        # corr_wf.connect(inputnode, 'in_files', coff_matrix, 'in_file')
        # corr_wf.connect(inputnode, 'atlas_file', coff_matrix, 'atlas_file')
        # corr_wf.connect(coff_matrix,'coff_matrix_file', maskCorrFile, 'in_file')
        # corr_wf.connect(inputnode, 'mask_file', maskCorrFile, 'in_file2')

        # corr_wf.connect(maskCorrFile,'out_file', make_npy_from_Corr, 'Corr_file')
        # corr_wf.connect(inputnode,'mask_file', make_npy_from_Corr, 'mask_file')
        # corr_wf.connect(make_npy_from_Corr, 'coff_matrix_file', outputnode, 'pearsonCorr_files')
        # corr_wf.connect(outputnode, 'pearsonCorr_files', datasink, 'out_file')

    return corr_wf
Пример #14
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.

        Notes:
            - If `FSLOUTPUTTYPE` environment variable is not set, `nipype` takes
            NIFTI by default.

        Todo:
            - [x] Detect space automatically.
            - [ ] Allow for custom parcellations (See TODOs in utils).

        """
        import nipype.interfaces.freesurfer as fs
        import nipype.interfaces.fsl as fsl
        import nipype.interfaces.mrtrix3 as mrtrix3
        import nipype.interfaces.utility as niu
        import nipype.pipeline.engine as npe
        from nipype.interfaces.mrtrix3.tracking import Tractography
        from nipype.interfaces.mrtrix.preprocess import MRTransform

        import clinica.pipelines.dwi_connectome.dwi_connectome_utils as utils
        from clinica.lib.nipype.interfaces.mrtrix3.reconst import EstimateFOD
        from clinica.utils.exceptions import ClinicaCAPSError
        from clinica.utils.mri_registration import (
            convert_flirt_transformation_to_mrtrix_transformation,
        )

        # Nodes
        # =====
        # B0 Extraction (only if space=b0)
        # -------------
        split_node = npe.Node(name="Reg-0-DWI-B0Extraction", interface=fsl.Split())
        split_node.inputs.output_type = "NIFTI_GZ"
        split_node.inputs.dimension = "t"
        select_node = npe.Node(name="Reg-0-DWI-B0Selection", interface=niu.Select())
        select_node.inputs.index = 0

        # B0 Brain Extraction (only if space=b0)
        # -------------------
        mask_node = npe.Node(name="Reg-0-DWI-BrainMasking", interface=fsl.ApplyMask())
        mask_node.inputs.output_type = "NIFTI_GZ"

        # T1-to-B0 Registration (only if space=b0)
        # ---------------------
        t12b0_reg_node = npe.Node(
            name="Reg-1-T12B0Registration",
            interface=fsl.FLIRT(
                dof=6,
                interp="spline",
                cost="normmi",
                cost_func="normmi",
            ),
        )
        t12b0_reg_node.inputs.output_type = "NIFTI_GZ"

        # MGZ File Conversion (only if space=b0)
        # -------------------
        t1_brain_conv_node = npe.Node(
            name="Reg-0-T1-T1BrainConvertion", interface=fs.MRIConvert()
        )
        wm_mask_conv_node = npe.Node(
            name="Reg-0-T1-WMMaskConvertion", interface=fs.MRIConvert()
        )

        # WM Transformation (only if space=b0)
        # -----------------
        wm_transform_node = npe.Node(
            name="Reg-2-WMTransformation", interface=fsl.ApplyXFM()
        )
        wm_transform_node.inputs.apply_xfm = True

        # Nodes Generation
        # ----------------
        label_convert_node = npe.MapNode(
            name="0-LabelsConversion",
            iterfield=["in_file", "in_config", "in_lut", "out_file"],
            interface=mrtrix3.LabelConvert(),
        )
        label_convert_node.inputs.in_config = utils.get_conversion_luts()
        label_convert_node.inputs.in_lut = utils.get_luts()

        # FSL flirt matrix to MRtrix matrix Conversion (only if space=b0)
        # --------------------------------------------
        fsl2mrtrix_conv_node = npe.Node(
            name="Reg-2-FSL2MrtrixConversion",
            interface=niu.Function(
                input_names=[
                    "in_source_image",
                    "in_reference_image",
                    "in_flirt_matrix",
                    "name_output_matrix",
                ],
                output_names=["out_mrtrix_matrix"],
                function=convert_flirt_transformation_to_mrtrix_transformation,
            ),
        )

        # Parc. Transformation (only if space=b0)
        # --------------------
        parc_transform_node = npe.MapNode(
            name="Reg-2-ParcTransformation",
            iterfield=["in_files", "out_filename"],
            interface=MRTransform(),
        )

        # Response Estimation
        # -------------------
        resp_estim_node = npe.Node(
            name="1a-ResponseEstimation", interface=mrtrix3.ResponseSD()
        )
        resp_estim_node.inputs.algorithm = "tournier"

        # FOD Estimation
        # --------------
        fod_estim_node = npe.Node(name="1b-FODEstimation", interface=EstimateFOD())
        fod_estim_node.inputs.algorithm = "csd"

        # Tracts Generation
        # -----------------
        tck_gen_node = npe.Node(name="2-TractsGeneration", interface=Tractography())
        tck_gen_node.inputs.select = self.parameters["n_tracks"]
        tck_gen_node.inputs.algorithm = "iFOD2"

        # Connectome Generation
        # ---------------------
        # only the parcellation and output filename should be iterable, the tck
        # file stays the same.
        conn_gen_node = npe.MapNode(
            name="3-ConnectomeGeneration",
            iterfield=["in_parc", "out_file"],
            interface=mrtrix3.BuildConnectome(),
        )

        # Print begin message
        # -------------------
        print_begin_message = npe.MapNode(
            interface=niu.Function(
                input_names=["in_bids_or_caps_file"],
                function=utils.print_begin_pipeline,
            ),
            iterfield="in_bids_or_caps_file",
            name="WriteBeginMessage",
        )

        # Print end message
        # -----------------
        print_end_message = npe.MapNode(
            interface=niu.Function(
                input_names=["in_bids_or_caps_file", "final_file"],
                function=utils.print_end_pipeline,
            ),
            iterfield=["in_bids_or_caps_file"],
            name="WriteEndMessage",
        )

        # CAPS File names Generation
        # --------------------------
        caps_filenames_node = npe.Node(
            name="CAPSFilenamesGeneration",
            interface=niu.Function(
                input_names="dwi_file",
                output_names=self.get_output_fields(),
                function=utils.get_caps_filenames,
            ),
        )

        # Connections
        # ===========
        # Computation of the diffusion model, tractography & connectome
        # -------------------------------------------------------------
        # fmt: off
        self.connect(
            [
                (self.input_node, print_begin_message, [("dwi_file", "in_bids_or_caps_file")]),
                (self.input_node, caps_filenames_node, [("dwi_file", "dwi_file")]),
                # Response Estimation
                (self.input_node, resp_estim_node, [("dwi_file", "in_file")]),  # Preproc. DWI
                (self.input_node, resp_estim_node, [("dwi_brainmask_file", "in_mask")]),  # B0 brain mask
                (self.input_node, resp_estim_node, [("grad_fsl", "grad_fsl")]),  # bvecs and bvals
                (caps_filenames_node, resp_estim_node, [("response", "wm_file")]),  # output response filename
                # FOD Estimation
                (self.input_node, fod_estim_node, [("dwi_file", "in_file")]),  # Preproc. DWI
                (resp_estim_node, fod_estim_node, [("wm_file", "wm_txt")]),  # Response (txt file)
                (self.input_node, fod_estim_node, [("dwi_brainmask_file", "mask_file")]),  # B0 brain mask
                (self.input_node, fod_estim_node, [("grad_fsl", "grad_fsl")]),  # T1-to-B0 matrix file
                (caps_filenames_node, fod_estim_node, [("fod", "wm_odf")]),  # output odf filename
                # Tracts Generation
                (fod_estim_node, tck_gen_node, [("wm_odf", "in_file")]),  # ODF file
                (caps_filenames_node, tck_gen_node, [("tracts", "out_file")]),  # output tck filename
                # Label Conversion
                (self.input_node, label_convert_node, [("atlas_files", "in_file")]),  # atlas image files
                (caps_filenames_node, label_convert_node, [("nodes", "out_file")]),  # converted atlas image filenames
                # Connectomes Generation
                (tck_gen_node, conn_gen_node, [("out_file", "in_file")]),
                (caps_filenames_node, conn_gen_node, [("connectomes", "out_file")]),
            ]
        )
        # Registration T1-DWI (only if space=b0)
        # -------------------
        if self.parameters["dwi_space"] == "b0":
            self.connect(
                [
                    # MGZ Files Conversion
                    (self.input_node, t1_brain_conv_node, [("t1_brain_file", "in_file")]),
                    (self.input_node, wm_mask_conv_node, [("wm_mask_file", "in_file")]),
                    # B0 Extraction
                    (self.input_node, split_node, [("dwi_file", "in_file")]),
                    (split_node, select_node, [("out_files", "inlist")]),
                    # Masking
                    (select_node, mask_node, [("out", "in_file")]),  # B0
                    (self.input_node, mask_node, [("dwi_brainmask_file", "mask_file")]),  # Brain mask
                    # T1-to-B0 Registration
                    (t1_brain_conv_node, t12b0_reg_node, [("out_file", "in_file")]),  # Brain
                    (mask_node, t12b0_reg_node, [("out_file", "reference")]),  # B0 brain-masked
                    # WM Transformation
                    (wm_mask_conv_node, wm_transform_node, [("out_file", "in_file")]),  # Brain mask
                    (mask_node, wm_transform_node, [("out_file", "reference")]),  # BO brain-masked
                    (t12b0_reg_node, wm_transform_node, [("out_matrix_file", "in_matrix_file")]),  # T1-to-B0 matrix file
                    # FSL flirt matrix to MRtrix matrix Conversion
                    (t1_brain_conv_node, fsl2mrtrix_conv_node, [("out_file", "in_source_image")]),
                    (mask_node, fsl2mrtrix_conv_node, [("out_file", "in_reference_image")]),
                    (t12b0_reg_node, fsl2mrtrix_conv_node, [("out_matrix_file", "in_flirt_matrix")]),
                    # Apply registration without resampling on parcellations
                    (label_convert_node, parc_transform_node, [("out_file", "in_files")]),
                    (fsl2mrtrix_conv_node, parc_transform_node, [("out_mrtrix_matrix", "linear_transform")]),
                    (caps_filenames_node, parc_transform_node, [("nodes", "out_filename")]),
                ]
            )
        # Special care for Parcellation & WM mask
        # ---------------------------------------
        if self.parameters["dwi_space"] == "b0":
            self.connect(
                [
                    (wm_transform_node, tck_gen_node, [("out_file", "seed_image")]),
                    (parc_transform_node, conn_gen_node, [("out_file", "in_parc")]),
                    (parc_transform_node, self.output_node, [("out_file", "nodes")]),
                ]
            )
        elif self.parameters["dwi_space"] == "T1w":
            self.connect(
                [
                    (self.input_node, tck_gen_node, [("wm_mask_file", "seed_image")]),
                    (label_convert_node, conn_gen_node, [("out_file", "in_parc")]),
                    (label_convert_node, self.output_node, [("out_file", "nodes")]),
                ]
            )
        else:
            raise ClinicaCAPSError(
                "Bad preprocessed DWI space. Please check your CAPS folder."
            )
        # Outputs
        # -------
        self.connect(
            [
                (resp_estim_node, self.output_node, [("wm_file", "response")]),
                (fod_estim_node, self.output_node, [("wm_odf", "fod")]),
                (tck_gen_node, self.output_node, [("out_file", "tracts")]),
                (conn_gen_node, self.output_node, [("out_file", "connectomes")]),
                (self.input_node, print_end_message, [("dwi_file", "in_bids_or_caps_file")]),
                (conn_gen_node, print_end_message, [("out_file", "final_file")]),
            ]
        )
Пример #15
0
def dwi_flirt(name='DWICoregistration', excl_nodiff=False, flirt_param={}):
    """
    Generates a workflow for linear registration of dwi volumes
    """
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['reference', 'in_file', 'ref_mask', 'in_xfms', 'in_bval']),
                        name='inputnode')

    initmat = pe.Node(niu.Function(
        input_names=['in_bval', 'in_xfms', 'excl_nodiff'],
        output_names=['init_xfms'],
        function=_checkinitxfm),
                      name='InitXforms')
    initmat.inputs.excl_nodiff = excl_nodiff
    dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True,
                                            args='-kernel sphere 5 -dilM'),
                     name='MskDilate')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias')
    enhb0 = pe.Node(niu.Function(
        input_names=['in_file', 'in_mask', 'clip_limit'],
        output_names=['out_file'],
        function=enhance),
                    name='B0Equalize')
    enhb0.inputs.clip_limit = 0.015
    enhdw = pe.MapNode(niu.Function(input_names=['in_file', 'in_mask'],
                                    output_names=['out_file'],
                                    function=enhance),
                       name='DWEqualize',
                       iterfield=['in_file'])
    flirt = pe.MapNode(fsl.FLIRT(**flirt_param),
                       name='CoRegistration',
                       iterfield=['in_file', 'in_matrix_file'])
    apply_xfms = pe.MapNode(fsl.ApplyXFM(apply_xfm=True,
                                         interp='spline',
                                         bgvalue=0),
                            name='ApplyXFMs',
                            iterfield=['in_file', 'in_matrix_file'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'out_xfms']),
        name='outputnode')
    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, split, [('in_file', 'in_file')]),
                (inputnode, dilate, [('ref_mask', 'in_file')]),
                (inputnode, enhb0, [('ref_mask', 'in_mask')]),
                (inputnode, initmat, [('in_xfms', 'in_xfms'),
                                      ('in_bval', 'in_bval')]),
                (inputnode, n4, [('reference', 'input_image'),
                                 ('ref_mask', 'mask_image')]),
                (dilate, flirt, [('out_file', 'ref_weight'),
                                 ('out_file', 'in_weight')]),
                (n4, enhb0, [('output_image', 'in_file')]),
                (split, enhdw, [('out_files', 'in_file')]),
                (split, apply_xfms, [('out_files', 'in_file')]),
                (dilate, enhdw, [('out_file', 'in_mask')]),
                (enhb0, flirt, [('out_file', 'reference')]),
                (enhb0, apply_xfms, [('out_file', 'reference')]),
                (enhdw, flirt, [('out_file', 'in_file')]),
                (initmat, flirt, [('init_xfms', 'in_matrix_file')]),
                (flirt, apply_xfms, [('out_matrix_file', 'in_matrix_file')]),
                (apply_xfms, thres, [('out_file', 'in_file')]),
                (thres, merge, [('out_file', 'in_files')]),
                (merge, outputnode, [('merged_file', 'out_file')]),
                (flirt, outputnode, [('out_matrix_file', 'out_xfms')])])
    return wf
def susceptibility_distortion_correction_using_t1(
        name='susceptibility_distortion_correction_using_t1'):
    """
    Susceptibility distortion correction using the T1w image.

    This workflow allows to correct for echo-planar induced susceptibility
    artifacts without fieldmap (e.g. ADNI Database) by elastically register
    DWIs to their respective baseline T1-weighted structural scans using an
    inverse consistent registration algorithm with a mutual information cost
    function (SyN algorithm).

    Args:
        name (Optional[str]): Name of the workflow.

    Inputnode:
        in_t1 (str): T1w image.
        in_dwi (str): DWI dataset

    Outputnode:
        out_dwi (str): Corrected DWI dataset
        out_warp (str): Out warp allowing DWI to T1 registration and
            susceptibilty induced artifacts correction
        out_b0_to_t1_rigid_body_matrix (str): B0 to T1 image FLIRT rigid body
            FSL coregistration matrix
        out_t1_to_b0_rigid_body_matrix (str): T1 to B0 image FLIRT rigid body
            FSL coregistration matrix
        out_t1_coregistered_to_b0 (str): T1 image rigid body coregistered to
            the B0 image
        out_b0_to_t1_syn_deformation_field (str): B0 to T1 image ANTs SyN
            ITK warp
        out_b0_to_t1_affine_matrix (str): B0 to T1 image ANTs affine ITK
            coregistration matrix

    References:
      .. Nir et al. (Neurobiology of Aging 2015): Connectivity network measures
        predict volumetric atrophy in mild cognitive impairment

      .. Leow et al. (IEEE Trans Med Imaging 2007): Statistical Properties of
        Jacobian Maps and the Realization of Unbiased Large Deformation
        Nonlinear Image Registration


    Returns:
        The workflow

    Example:
        >>> epi = susceptibility_distortion_correction_using_t1()
        >>> epi.inputs.inputnode.in_dwi = 'dwi.nii'
        >>> epi.inputs.inputnode.in_t1 = 'T1w.nii'
        >>> epi.run() # doctest: +SKIP
    """
    import nipype
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.fsl as fsl
    import clinica.pipelines.dwi_preprocessing_using_t1.dwi_preprocessing_using_t1_utils as utils

    def expend_matrix_list(in_matrix, in_bvec):
        import numpy as np

        bvecs = np.loadtxt(in_bvec).T
        out_matrix_list = [in_matrix]

        out_matrix_list = out_matrix_list * len(bvecs)

        return out_matrix_list

    def rotate_bvecs(in_bvec, in_matrix):
        """
        Rotates the input bvec file accordingly with a list of matrices.
        .. note:: the input affine matrix transforms points in the destination
          image to their corresponding coordinates in the original image.
          Therefore, this matrix should be inverted first, as we want to know
          the target position of :math:`\\vec{r}`.
        """
        import os
        import numpy as np

        name, fext = os.path.splitext(os.path.basename(in_bvec))
        if fext == '.gz':
            name, _ = os.path.splitext(name)
        out_file = os.path.abspath('%s_rotated.bvec' % name)
        bvecs = np.loadtxt(
            in_bvec).T  # Warning, bvecs.txt are not in the good configuration, need to put '.T'
        new_bvecs = []

        if len(bvecs) != len(in_matrix):
            raise RuntimeError(('Number of b-vectors (%d) and rotation '
                                'matrices (%d) should match.') %
                               (len(bvecs), len(in_matrix)))

        for bvec, mat in zip(bvecs, in_matrix):
            if np.all(bvec == 0.0):
                new_bvecs.append(bvec)
            else:
                invrot = np.linalg.inv(np.loadtxt(mat))[:3, :3]
                newbvec = invrot.dot(bvec)
                new_bvecs.append((newbvec / np.linalg.norm(newbvec)))

        np.savetxt(out_file, np.array(new_bvecs).T, fmt='%0.15f')
        return out_file

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_t1', 'in_dwi', 'in_bvec']),
        name='inputnode')

    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    pick_ref = pe.Node(niu.Select(), name='Pick_b0')
    pick_ref.inputs.index = [0]

    flirt_b0_to_t1 = pe.Node(interface=fsl.FLIRT(dof=6),
                            name='flirt_b0_to_t1')
    flirt_b0_to_t1.inputs.interp = "spline"
    flirt_b0_to_t1.inputs.cost = 'normmi'
    flirt_b0_to_t1.inputs.cost_func = 'normmi'

    if nipype.__version__.split('.') < ['0', '13', '0']:
        apply_xfm = pe.Node(interface=fsl.ApplyXfm(),
                            name='apply_xfm')
    else:
        apply_xfm = pe.Node(interface=fsl.ApplyXFM(),
                            name='apply_xfm')
    apply_xfm.inputs.apply_xfm = True

    expend_matrix = pe.Node(
        interface=niu.Function(input_names=['in_matrix', 'in_bvec'],
                               output_names=['out_matrix_list'],
                               function=expend_matrix_list),
        name='expend_matrix')

    rot_bvec = pe.Node(niu.Function(input_names=['in_matrix', 'in_bvec'],
                                    output_names=['out_file'],
                                    function=rotate_bvecs),
                       name='Rotate_Bvec')

    ants_registration_syn_quick = pe.Node(interface=niu.Function(
        input_names=['fix_image', 'moving_image'],
        output_names=['image_warped', 'affine_matrix',
                      'warp', 'inverse_warped', 'inverse_warp'],
        function=utils.ants_registration_syn_quick),
        name='ants_registration_syn_quick')

    merge_transform = pe.Node(niu.Merge(2), name='MergeTransforms')

    combine_warp = pe.Node(interface=niu.Function(
        input_names=['in_file', 'transforms_list', 'reference'],
        output_names=['out_warp'],
        function=utils.ants_combine_transform), name='combine_warp')

    coeffs = pe.Node(fsl.WarpUtils(out_format='spline'), name='CoeffComp')

    fsl_transf = pe.Node(fsl.WarpUtils(out_format='field'),
                         name='fsl_transf')

    warp_epi = pe.Node(fsl.ConvertWarp(), name='warp_epi')

    apply_warp = pe.MapNode(interface=fsl.ApplyWarp(),
                            iterfield=['in_file'], name='apply_warp')
    apply_warp.inputs.interp = 'spline'

    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')

    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['dwi_to_t1_coregistration_matrix',
                'itk_dwi_t1_coregistration_matrix',
                'epi_correction_deformation_field',
                'epi_correction_affine_transform',
                'merge_epi_transform', 'out_dwi', 'out_warp',
                'out_bvec']), name='outputnode')

    wf = pe.Workflow(name=name)

    wf.connect([
        (inputnode, split, [('in_dwi', 'in_file')]),  # noqa

        (split, pick_ref, [('out_files', 'inlist')]),  # noqa

        (pick_ref,  flirt_b0_to_t1, [('out',     'in_file')]),  # noqa
        (inputnode, flirt_b0_to_t1, [('in_t1', 'reference')]),  # noqa

        (flirt_b0_to_t1, expend_matrix, [('out_matrix_file', 'in_matrix')]),  # noqa
        (inputnode,      expend_matrix, [('in_bvec',           'in_bvec')]),  # noqa

        (inputnode,     rot_bvec, [('in_bvec', 'in_bvec')]),  # noqa
        (expend_matrix, rot_bvec, [('out_matrix_list', 'in_matrix')]),  # noqa
        
        (inputnode,                   ants_registration_syn_quick, [('in_t1',       'fix_image')]),  # noqa
        (flirt_b0_to_t1,              ants_registration_syn_quick, [('out_file', 'moving_image')]),  # noqa

        (ants_registration_syn_quick, merge_transform, [('affine_matrix', 'in2'),  # noqa
                                                        ('warp',          'in1')]),  # noqa

        (flirt_b0_to_t1,  combine_warp, [('out_file',    'in_file')]),  # noqa
        (merge_transform, combine_warp, [('out', 'transforms_list')]),  # noqa
        (inputnode,       combine_warp, [('in_t1',     'reference')]),  # noqa

        (inputnode,    coeffs, [('in_t1',  'reference')]),  # noqa
        (combine_warp, coeffs, [('out_warp', 'in_file')]),  # noqa

        (coeffs,    fsl_transf, [('out_file', 'in_file')]),  # noqa
        (inputnode, fsl_transf, [('in_t1',  'reference')]),  # noqa

        (inputnode,      warp_epi, [('in_t1',        'reference')]),  # noqa
        (flirt_b0_to_t1, warp_epi, [('out_matrix_file', 'premat')]),  # noqa
        (fsl_transf,     warp_epi, [('out_file',         'warp1')]),  # noqa

        (warp_epi,  apply_warp, [('out_file', 'field_file')]),  # noqa
        (split,     apply_warp, [('out_files',   'in_file')]),  # noqa
        (inputnode, apply_warp, [('in_t1',      'ref_file')]),  # noqa

        (apply_warp, thres, [('out_file', 'in_file')]),  # noqa

        (thres, merge, [('out_file', 'in_files')]),  # noqa
        # Outputnode
        (merge,                       outputnode, [('merged_file',                             'out_dwi')]),  # noqa
        (flirt_b0_to_t1,              outputnode, [('out_matrix_file', 'dwi_to_t1_coregistration_matrix')]),  # noqa
        (ants_registration_syn_quick, outputnode, [('warp',             'epi_correction_deformation_field'),  # noqa
                                                   ('affine_matrix',   'epi_correction_affine_transform')]),  # noqa
        (warp_epi,                    outputnode, [('out_file',                               'out_warp')]),  # noqa
        (rot_bvec,                    outputnode, [('out_file',                               'out_bvec')]),  # noqa
    ])
    return wf
Пример #17
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipeline.

        Notes:
            - If `FSLOUTPUTTYPE` environment variable is not set, `nipype` takes
            NIFTI by default.

        Todo:
            - [x] Detect space automatically.
            - [ ] Allow for custom parcellations (See TODOs in utils).

        """
        import nipype.interfaces.utility as niu
        import nipype.pipeline.engine as npe
        import nipype.interfaces.fsl as fsl
        import nipype.interfaces.freesurfer as fs
        import nipype.interfaces.mrtrix3 as mrtrix3
        from clinica.lib.nipype.interfaces.mrtrix.preprocess import MRTransform
        from clinica.lib.nipype.interfaces.mrtrix3.reconst import EstimateFOD
        from clinica.lib.nipype.interfaces.mrtrix3.tracking import Tractography
        from clinica.utils.exceptions import ClinicaException, ClinicaCAPSError
        from clinica.utils.stream import cprint
        import clinica.pipelines.dwi_connectome.dwi_connectome_utils as utils
        from clinica.utils.mri_registration import convert_flirt_transformation_to_mrtrix_transformation

        # cprint('Building the pipeline...')

        # Nodes
        # =====

        # B0 Extraction (only if space=b0)
        # -------------
        split_node = npe.Node(name="Reg-0-DWI-B0Extraction",
                              interface=fsl.Split())
        split_node.inputs.output_type = "NIFTI_GZ"
        split_node.inputs.dimension = 't'
        select_node = npe.Node(name="Reg-0-DWI-B0Selection",
                               interface=niu.Select())
        select_node.inputs.index = 0

        # B0 Brain Extraction (only if space=b0)
        # -------------------
        mask_node = npe.Node(name="Reg-0-DWI-BrainMasking",
                             interface=fsl.ApplyMask())
        mask_node.inputs.output_type = "NIFTI_GZ"

        # T1-to-B0 Registration (only if space=b0)
        # ---------------------
        t12b0_reg_node = npe.Node(name="Reg-1-T12B0Registration",
                                  interface=fsl.FLIRT(
                                      dof=6,
                                      interp='spline',
                                      cost='normmi',
                                      cost_func='normmi',
                                  ))
        t12b0_reg_node.inputs.output_type = "NIFTI_GZ"

        # MGZ File Conversion (only if space=b0)
        # -------------------
        t1_brain_conv_node = npe.Node(name="Reg-0-T1-T1BrainConvertion",
                                      interface=fs.MRIConvert())
        wm_mask_conv_node = npe.Node(name="Reg-0-T1-WMMaskConvertion",
                                     interface=fs.MRIConvert())

        # WM Transformation (only if space=b0)
        # -----------------
        wm_transform_node = npe.Node(name="Reg-2-WMTransformation",
                                     interface=fsl.ApplyXFM())
        wm_transform_node.inputs.apply_xfm = True

        # Nodes Generation
        # ----------------
        label_convert_node = npe.MapNode(
            name="0-LabelsConversion",
            iterfield=['in_file', 'in_config', 'in_lut', 'out_file'],
            interface=mrtrix3.LabelConvert())
        label_convert_node.inputs.in_config = utils.get_conversion_luts()
        label_convert_node.inputs.in_lut = utils.get_luts()

        # FSL flirt matrix to MRtrix matrix Conversion (only if space=b0)
        # --------------------------------------------
        fsl2mrtrix_conv_node = npe.Node(
            name='Reg-2-FSL2MrtrixConversion',
            interface=niu.Function(
                input_names=[
                    'in_source_image', 'in_reference_image', 'in_flirt_matrix',
                    'name_output_matrix'
                ],
                output_names=['out_mrtrix_matrix'],
                function=convert_flirt_transformation_to_mrtrix_transformation)
        )

        # Parc. Transformation (only if space=b0)
        # --------------------
        parc_transform_node = npe.MapNode(
            name="Reg-2-ParcTransformation",
            iterfield=["in_files", "out_filename"],
            interface=MRTransform())

        # Response Estimation
        # -------------------
        resp_estim_node = npe.Node(name="1a-ResponseEstimation",
                                   interface=mrtrix3.ResponseSD())
        resp_estim_node.inputs.algorithm = 'tournier'

        # FOD Estimation
        # --------------
        fod_estim_node = npe.Node(name="1b-FODEstimation",
                                  interface=EstimateFOD())
        fod_estim_node.inputs.algorithm = 'csd'

        # Tracts Generation
        # -----------------
        tck_gen_node = npe.Node(name="2-TractsGeneration",
                                interface=Tractography())
        tck_gen_node.inputs.n_tracks = self.parameters['n_tracks']
        tck_gen_node.inputs.algorithm = 'iFOD2'

        # BUG: Info package does not exist
        # from nipype.interfaces.mrtrix3.base import Info
        # from distutils.version import LooseVersion
        #
        # if Info.looseversion() >= LooseVersion("3.0"):
        #     tck_gen_node.inputs.select = self.parameters['n_tracks']
        # elif Info.looseversion() <= LooseVersion("0.4"):
        #     tck_gen_node.inputs.n_tracks = self.parameters['n_tracks']
        # else:
        #     from clinica.utils.exceptions import ClinicaException
        #     raise ClinicaException("Your MRtrix version is not supported.")

        # Connectome Generation
        # ---------------------
        # only the parcellation and output filename should be iterable, the tck
        # file stays the same.
        conn_gen_node = npe.MapNode(name="3-ConnectomeGeneration",
                                    iterfield=['in_parc', 'out_file'],
                                    interface=mrtrix3.BuildConnectome())

        # Print begin message
        # -------------------
        print_begin_message = npe.MapNode(interface=niu.Function(
            input_names=['in_bids_or_caps_file'],
            function=utils.print_begin_pipeline),
                                          iterfield='in_bids_or_caps_file',
                                          name='WriteBeginMessage')

        # Print end message
        # -----------------
        print_end_message = npe.MapNode(interface=niu.Function(
            input_names=['in_bids_or_caps_file', 'final_file'],
            function=utils.print_end_pipeline),
                                        iterfield=['in_bids_or_caps_file'],
                                        name='WriteEndMessage')

        # CAPS File names Generation
        # --------------------------
        caps_filenames_node = npe.Node(
            name='CAPSFilenamesGeneration',
            interface=niu.Function(input_names='dwi_file',
                                   output_names=self.get_output_fields(),
                                   function=utils.get_caps_filenames))

        # Connections
        # ===========
        # Computation of the diffusion model, tractography & connectome
        # -------------------------------------------------------------
        self.connect([
            (self.input_node, print_begin_message,
             [('dwi_file', 'in_bids_or_caps_file')]),  # noqa
            (self.input_node, caps_filenames_node, [('dwi_file', 'dwi_file')]),
            # Response Estimation
            (self.input_node, resp_estim_node, [('dwi_file', 'in_file')]
             ),  # Preproc. DWI # noqa
            (self.input_node, resp_estim_node,
             [('dwi_brainmask_file', 'in_mask')]),  # B0 brain mask # noqa
            (self.input_node, resp_estim_node, [('grad_fsl', 'grad_fsl')
                                                ]),  # bvecs and bvals # noqa
            (caps_filenames_node, resp_estim_node,
             [('response', 'wm_file')]),  # output response filename # noqa
            # FOD Estimation
            (self.input_node, fod_estim_node, [('dwi_file', 'in_file')]
             ),  # Preproc. DWI # noqa
            (resp_estim_node, fod_estim_node,
             [('wm_file', 'wm_txt')]),  # Response (txt file) # noqa
            (self.input_node, fod_estim_node,
             [('dwi_brainmask_file', 'mask_file')]),  # B0 brain mask # noqa
            (self.input_node, fod_estim_node,
             [('grad_fsl', 'grad_fsl')]),  # T1-to-B0 matrix file # noqa
            (caps_filenames_node, fod_estim_node,
             [('fod', 'wm_odf')]),  # output odf filename # noqa
            # Tracts Generation
            (fod_estim_node, tck_gen_node, [('wm_odf', 'in_file')]
             ),  # ODF file # noqa
            (caps_filenames_node, tck_gen_node,
             [('tracts', 'out_file')]),  # output tck filename # noqa
            # Label Conversion
            (self.input_node, label_convert_node, [('atlas_files', 'in_file')]
             ),  # atlas image files # noqa
            (caps_filenames_node, label_convert_node, [
                ('nodes', 'out_file')
            ]),  # converted atlas image filenames # noqa
            # Connectomes Generation
            (tck_gen_node, conn_gen_node, [('out_file', 'in_file')]),  # noqa
            (caps_filenames_node, conn_gen_node, [('connectomes', 'out_file')
                                                  ]),  # noqa
        ])
        # Registration T1-DWI (only if space=b0)
        # -------------------
        if self.parameters['dwi_space'] == 'b0':
            self.connect([
                # MGZ Files Conversion
                (self.input_node, t1_brain_conv_node, [('t1_brain_file',
                                                        'in_file')]),  # noqa
                (self.input_node, wm_mask_conv_node, [('wm_mask_file',
                                                       'in_file')]),  # noqa
                # B0 Extraction
                (self.input_node, split_node, [('dwi_file', 'in_file')]
                 ),  # noqa
                (split_node, select_node, [('out_files', 'inlist')]),  # noqa
                # Masking
                (select_node, mask_node, [('out', 'in_file')]),  # B0 # noqa
                (self.input_node, mask_node,
                 [('dwi_brainmask_file', 'mask_file')]),  # Brain mask # noqa
                # T1-to-B0 Registration
                (t1_brain_conv_node, t12b0_reg_node, [('out_file', 'in_file')]
                 ),  # Brain # noqa
                (mask_node, t12b0_reg_node, [('out_file', 'reference')
                                             ]),  # B0 brain-masked # noqa
                # WM Transformation
                (wm_mask_conv_node, wm_transform_node,
                 [('out_file', 'in_file')]),  # Brain mask # noqa
                (mask_node, wm_transform_node, [('out_file', 'reference')
                                                ]),  # BO brain-masked # noqa
                (t12b0_reg_node, wm_transform_node, [
                    ('out_matrix_file', 'in_matrix_file')
                ]),  # T1-to-B0 matrix file # noqa
                # FSL flirt matrix to MRtrix matrix Conversion
                (t1_brain_conv_node, fsl2mrtrix_conv_node,
                 [('out_file', 'in_source_image')]),  # noqa
                (mask_node, fsl2mrtrix_conv_node,
                 [('out_file', 'in_reference_image')]),  # noqa
                (t12b0_reg_node, fsl2mrtrix_conv_node,
                 [('out_matrix_file', 'in_flirt_matrix')]),  # noqa
                # Apply registration without resampling on parcellations
                (label_convert_node, parc_transform_node,
                 [('out_file', 'in_files')]),  # noqa
                (fsl2mrtrix_conv_node, parc_transform_node,
                 [('out_mrtrix_matrix', 'linear_transform')]),  # noqa
                (caps_filenames_node, parc_transform_node,
                 [('nodes', 'out_filename')]),  # noqa
            ])
        # Special care for Parcellation & WM mask
        # ---------------------------------------
        if self.parameters['dwi_space'] == 'b0':
            self.connect([
                (wm_transform_node, tck_gen_node, [('out_file', 'seed_image')
                                                   ]),  # noqa
                (parc_transform_node, conn_gen_node, [('out_file', 'in_parc')
                                                      ]),  # noqa
                (parc_transform_node, self.output_node, [('out_file', 'nodes')
                                                         ]),  # noqa
            ])
        elif self.parameters['dwi_space'] == 'T1w':
            self.connect([
                (self.input_node, tck_gen_node, [('wm_mask_file', 'seed_image')
                                                 ]),  # noqa
                (label_convert_node, conn_gen_node, [('out_file', 'in_parc')
                                                     ]),  # noqa
                (label_convert_node, self.output_node, [('out_file', 'nodes')
                                                        ]),  # noqa
            ])
        else:
            raise ClinicaCAPSError(
                'Bad preprocessed DWI space. Please check your CAPS '
                'folder.')
        # Outputs
        # -------
        self.connect([
            (resp_estim_node, self.output_node, [('wm_file', 'response')]),
            (fod_estim_node, self.output_node, [('wm_odf', 'fod')]),
            (tck_gen_node, self.output_node, [('out_file', 'tracts')]),
            (conn_gen_node, self.output_node, [('out_file', 'connectomes')]),
            (self.input_node, print_end_message, [('dwi_file',
                                                   'in_bids_or_caps_file')]),
            (conn_gen_node, print_end_message, [('out_file', 'final_file')]),
        ])
Пример #18
0
def create_native_old_segment_pipe(params_template,
                                   params={},
                                   name="native_old_segment_pipe"):
    """
    Description: Extract brain using tissues masks output by SPM's old_segment
        function:

        - Segment the T1 using given priors;
        - Threshold GM, WM and CSF maps;
        - Compute union of those 3 tissues with indexes;

    Params:

    - segment (see `Segment <https://nipype.readthedocs.io/en/0.12.1/\
    interfaces/generated/nipype.interfaces.spm.preprocess.html#segment>`_)
    - threshold_gm, threshold_wm, threshold_csf (see `Threshold \
    <https://nipype.readthedocs.io/en/0.12.1/interfaces/generated/nipype.\
    interfaces.fsl.maths.html#threshold>`_ for arguments) - also available \
    as :ref:`indiv_params <indiv_params>`


    Inputs:

        inputnode:
            T1: T1 file name

        arguments:
            priors: list of file names

            params: dictionary of node sub-parameters (from a json file)

            name: pipeline name (default = "old_segment_pipe")

    Outputs:

        fill_holes.out_file:
            filled mask after erode

        fill_holes_dil.out_file
            filled mask after dilate

        threshold_gm, threshold_wm, threshold_csf.out_file:
            resp grey matter, white matter, and csf after thresholding

    """
    # creating pipeline
    seg_pipe = pe.Workflow(name=name)

    # Creating inputnode
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['T1', 'indiv_params', "native_T1", "inv_transfo_file"]),
                        name='inputnode')

    assert set_spm(), \
        "Error, SPM was not found, cannot run SPM old segment pipeline"

    unzip = pe.Node(interface=niu.Function(input_names=['zipped_file'],
                                           output_names=["unzipped_file"],
                                           function=gunzip),
                    name="unzip")

    seg_pipe.connect(inputnode, 'T1', unzip, 'zipped_file')

    # Segment in to 6 tissues
    segment = NodeParams(spm.Segment(),
                         params=parse_key(params, "segment"),
                         name="old_segment")

    segment.inputs.tissue_prob_maps = [
        params_template["template_gm"], params_template["template_wm"],
        params_template["template_csf"]
    ]

    seg_pipe.connect(unzip, 'unzipped_file', segment, 'data')

    # gm
    register_gm_to_nat = pe.Node(fsl.ApplyXFM(), name="register_gm_to_nat")
    register_gm_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    seg_pipe.connect(segment, 'native_gm_image', register_gm_to_nat, 'in_file')

    seg_pipe.connect(inputnode, 'native_T1', register_gm_to_nat, 'reference')

    seg_pipe.connect(inputnode, 'inv_transfo_file', register_gm_to_nat,
                     "in_matrix_file")

    # wm
    register_wm_to_nat = pe.Node(fsl.ApplyXFM(), name="register_wm_to_nat")
    register_wm_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    seg_pipe.connect(segment, 'native_wm_image', register_wm_to_nat, 'in_file')

    seg_pipe.connect(inputnode, 'native_T1', register_wm_to_nat, 'reference')

    seg_pipe.connect(inputnode, 'inv_transfo_file', register_wm_to_nat,
                     "in_matrix_file")

    # csf
    register_csf_to_nat = pe.Node(fsl.ApplyXFM(), name="register_csf_to_nat")
    register_csf_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    seg_pipe.connect(segment, 'native_csf_image', register_csf_to_nat,
                     'in_file')

    seg_pipe.connect(inputnode, 'native_T1', register_csf_to_nat, 'reference')

    seg_pipe.connect(inputnode, 'inv_transfo_file', register_csf_to_nat,
                     "in_matrix_file")

    # threshold_gm
    threshold_gm = NodeParams(fsl.Threshold(),
                              params=parse_key(params, "threshold_gm"),
                              name="threshold_gm")

    seg_pipe.connect(register_gm_to_nat, 'out_file', threshold_gm, 'in_file')

    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "threshold_gm"),
                     threshold_gm, "indiv_params")

    # threshold_wm
    threshold_wm = NodeParams(fsl.Threshold(),
                              params=parse_key(params, "threshold_wm"),
                              name="threshold_wm")

    seg_pipe.connect(register_wm_to_nat, 'out_file', threshold_wm, 'in_file')

    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "threshold_wm"),
                     threshold_wm, "indiv_params")

    # threshold_csf
    threshold_csf = NodeParams(fsl.Threshold(),
                               params=parse_key(params, "threshold_csf"),
                               name="threshold_csf")

    seg_pipe.connect(register_csf_to_nat, 'out_file', threshold_csf, 'in_file')

    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "threshold_csf"),
                     threshold_csf, "indiv_params")

    # outputnode
    outputnode = pe.Node(niu.IdentityInterface(
        fields=["threshold_gm", "threshold_wm", "threshold_csf"]),
                         name='outputnode')

    seg_pipe.connect(threshold_gm, 'out_file', outputnode, 'threshold_gm')
    seg_pipe.connect(threshold_wm, 'out_file', outputnode, 'threshold_wm')
    seg_pipe.connect(threshold_csf, 'out_file', outputnode, 'threshold_csf')

    return seg_pipe
Пример #19
0
def create_compcor_workflow(name='compcor'):
    """ Creates A/T compcor workflow. """

    input_node = pe.Node(interface=IdentityInterface(fields=[
        'in_file', 'fast_files', 'highres2epi_mat', 'n_comp_tcompcor',
        'n_comp_acompcor', 'output_directory', 'sub_id'
    ]),
                         name='inputspec')

    output_node = pe.Node(interface=IdentityInterface(
        fields=['tcompcor_file', 'acompcor_file', 'epi_mask']),
                          name='outputspec')

    extract_task = pe.MapNode(interface=Extract_task,
                              iterfield=['in_file'],
                              name='extract_task')

    rename_acompcor = pe.MapNode(interface=Rename(
        format_string='task-%(task)s_acompcor.tsv', keepext=True),
                                 iterfield=['task', 'in_file'],
                                 name='rename_acompcor')

    datasink = pe.Node(DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    average_func = pe.MapNode(interface=fsl.maths.MeanImage(dimension='T'),
                              name='average_func',
                              iterfield=['in_file'])

    epi_mask = pe.MapNode(interface=fsl.BET(frac=.3,
                                            mask=True,
                                            no_output=True,
                                            robust=True),
                          iterfield=['in_file'],
                          name='epi_mask')

    wm2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                        iterfield=['reference'],
                        name='wm2epi')

    csf2epi = pe.MapNode(fsl.ApplyXFM(interp='nearestneighbour'),
                         iterfield=['reference'],
                         name='csf2epi')

    erode_csf = pe.MapNode(interface=Erode_mask,
                           name='erode_csf',
                           iterfield=['epi_mask', 'in_file'])
    erode_csf.inputs.erosion_mm = 0
    erode_csf.inputs.epi_mask_erosion_mm = 30

    erode_wm = pe.MapNode(interface=Erode_mask,
                          name='erode_wm',
                          iterfield=['epi_mask', 'in_file'])

    erode_wm.inputs.erosion_mm = 6
    erode_wm.inputs.epi_mask_erosion_mm = 10

    merge_wm_and_csf_masks = pe.MapNode(Merge(2),
                                        name='merge_wm_and_csf_masks',
                                        iterfield=['in1', 'in2'])

    # This should be fit on the 30mm eroded mask from CSF
    tcompcor = pe.MapNode(TCompCor(components_file='tcomcor_comps.txt'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='tcompcor')

    # WM + CSF mask
    acompcor = pe.MapNode(ACompCor(components_file='acompcor_comps.txt',
                                   merge_method='union'),
                          iterfield=['realigned_file', 'mask_files'],
                          name='acompcor')

    compcor_wf = pe.Workflow(name=name)
    compcor_wf.connect(input_node, 'in_file', extract_task, 'in_file')
    compcor_wf.connect(extract_task, 'task_name', rename_acompcor, 'task')
    compcor_wf.connect(acompcor, 'components_file', rename_acompcor, 'in_file')

    compcor_wf.connect(input_node, 'sub_id', datasink, 'container')
    compcor_wf.connect(input_node, 'output_directory', datasink,
                       'base_directory')

    compcor_wf.connect(input_node, ('fast_files', pick_wm), wm2epi, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', wm2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', wm2epi, 'in_matrix_file')

    compcor_wf.connect(input_node, ('fast_files', pick_csf), csf2epi,
                       'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', csf2epi, 'reference')
    compcor_wf.connect(input_node, 'highres2epi_mat', csf2epi,
                       'in_matrix_file')

    compcor_wf.connect(input_node, 'n_comp_tcompcor', tcompcor,
                       'num_components')
    compcor_wf.connect(input_node, 'n_comp_acompcor', acompcor,
                       'num_components')

    compcor_wf.connect(input_node, 'in_file', average_func, 'in_file')
    compcor_wf.connect(average_func, 'out_file', epi_mask, 'in_file')
    compcor_wf.connect(epi_mask, 'mask_file', erode_csf, 'epi_mask')
    compcor_wf.connect(epi_mask, 'mask_file', erode_wm, 'epi_mask')

    compcor_wf.connect(wm2epi, 'out_file', erode_wm, 'in_file')
    compcor_wf.connect(csf2epi, 'out_file', erode_csf, 'in_file')

    compcor_wf.connect(erode_wm, 'roi_eroded', merge_wm_and_csf_masks, 'in1')
    compcor_wf.connect(erode_csf, 'roi_eroded', merge_wm_and_csf_masks, 'in2')
    compcor_wf.connect(merge_wm_and_csf_masks, 'out', acompcor, 'mask_files')

    compcor_wf.connect(input_node, 'in_file', acompcor, 'realigned_file')
    compcor_wf.connect(input_node, 'in_file', tcompcor, 'realigned_file')
    compcor_wf.connect(erode_csf, 'epi_mask_eroded', tcompcor, 'mask_files')

    #compcor_wf.connect(tcompcor, 'components_file', output_node, 'acompcor_file')
    #compcor_wf.connect(acompcor, 'components_file', output_node, 'tcompcor_file')
    compcor_wf.connect(epi_mask, 'mask_file', output_node, 'epi_mask')

    compcor_wf.connect(rename_acompcor, 'out_file', datasink, 'acompcor_file')

    #compcor_wf.connect(tcompcor, 'components_file', combine_files, 'tcomp')
    #compcor_wf.connect(acompcor, 'components_file', combine_files, 'acomp')
    #compcor_wf.connect(combine_files, 'out_file', datasink, 'confounds')

    return compcor_wf
Пример #20
0
def get_prep_flair_wf(name="prep_flair", omp_nthreads=1):
    wf = Workflow(name=name)

    inputnode = Node(niu.IdentityInterface(fields=[
        "t1w", "t1w_brain", "t1w_brainmask", "t1w_to_MNI_xfm", "flair_file",
        "vent_mask", "wm_mask", "distancemap", "perivent_mask",
        "perivent_mask", "deepWM_mask"
    ]),
                     name='inputnode')

    # t1w -> flair
    flair = Node(fsl.Reorient2Std(), name="flair")
    wf.connect(inputnode, "flair_file", flair, "in_file")

    flair_biascorr = Node(ants.N4BiasFieldCorrection(save_bias=False,
                                                     num_threads=omp_nthreads),
                          name="flair_biascorr")
    wf.connect(flair, "out_file", flair_biascorr, "input_image")

    flirt_t1w_to_flair = Node(fsl.FLIRT(dof=6), name="flirt_t1w_to_flair")
    wf.connect(inputnode, "t1w", flirt_t1w_to_flair, "in_file")
    wf.connect(flair_biascorr, "output_image", flirt_t1w_to_flair, "reference")

    # bring t1w data to flair space
    t1w_brain_flairSp = Node(fsl.ApplyXFM(), name="t1w_brain_flairSp")
    wf.connect(inputnode, "t1w_brain", t1w_brain_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", t1w_brain_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", t1w_brain_flairSp, "reference")

    brainmask_flairSp = Node(fsl.ApplyXFM(interp="nearestneighbour"),
                             name="brainmask_flairSp")
    wf.connect(inputnode, "t1w_brainmask", brainmask_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", brainmask_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", brainmask_flairSp, "reference")

    wm_mask_flairSp = Node(fsl.ApplyXFM(interp="nearestneighbour"),
                           name="wm_mask_flairSp")
    wf.connect(inputnode, "wm_mask", wm_mask_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", wm_mask_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", wm_mask_flairSp, "reference")

    vent_mask_flairSp = Node(fsl.ApplyXFM(interp="nearestneighbour"),
                             name="vent_mask_flairSp")
    wf.connect(inputnode, "vent_mask", vent_mask_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", vent_mask_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", vent_mask_flairSp, "reference")

    # since there might be some missalignment between the (nn resampled) brain mask and the distancemap, there might
    # be some distance values outside the flair space brain mask --> re-threshold to get rid of them
    # also, the distancemap was created with a dilated brainmaks
    distancemap_flairSp_init = Node(fsl.ApplyXFM(),
                                    name="distancemap_flairSp_init")
    wf.connect(inputnode, "distancemap", distancemap_flairSp_init, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", distancemap_flairSp_init,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", distancemap_flairSp_init,
               "reference")

    distancemap_flairSp = Node(fsl.ApplyMask(), name="distancemap_flairSp")
    wf.connect(distancemap_flairSp_init, "out_file", distancemap_flairSp,
               "in_file")
    wf.connect(brainmask_flairSp, "out_file", distancemap_flairSp, "mask_file")

    perivent_mask_flairSp = Node(fsl.ApplyXFM(interp="nearestneighbour"),
                                 name="perivent_mask_flairSp")
    wf.connect(inputnode, "perivent_mask", perivent_mask_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", perivent_mask_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", perivent_mask_flairSp,
               "reference")

    deepWM_mask_flairSp = Node(fsl.ApplyXFM(interp="nearestneighbour"),
                               name="deepWM_mask_flairSp")
    wf.connect(inputnode, "deepWM_mask", deepWM_mask_flairSp, "in_file")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", deepWM_mask_flairSp,
               "in_matrix_file")
    wf.connect(flair_biascorr, "output_image", deepWM_mask_flairSp,
               "reference")

    # MNI
    flair_to_t1w = Node(fsl.ConvertXFM(invert_xfm=True), name="flair_to_t1w")
    wf.connect(flirt_t1w_to_flair, "out_matrix_file", flair_to_t1w, "in_file")

    flair_to_mni = Node(fsl.ConvertXFM(concat_xfm=True), name="flair_to_mni")
    wf.connect(flair_to_t1w, "out_file", flair_to_mni, "in_file")
    wf.connect(inputnode, "t1w_to_MNI_xfm", flair_to_mni, "in_file2")

    flair_mniSp = Node(fsl.ApplyXFM(), name="flair_mniSp")
    wf.connect(flair_biascorr, "output_image", flair_mniSp, "in_file")
    wf.connect(flair_to_mni, "out_file", flair_mniSp, "in_matrix_file")
    flair_mniSp.inputs.reference = fsl.Info.standard_image(
        "MNI152_T1_1mm.nii.gz")

    #
    outputnode = Node(niu.IdentityInterface(fields=[
        "flair_biascorr", "t1w", "t1w_brain", "brain_mask", "brainmask",
        "wm_mask", "vent_mask", "distancemap", "perivent_mask", "deepWM_mask",
        "distancemap", "t1w_to_flair", "flair_mniSp", "flair_to_mni"
    ]),
                      name='outputnode')

    wf.connect(flair_biascorr, "output_image", outputnode, "flair_biascorr")

    wf.connect(t1w_brain_flairSp, "out_file", outputnode, "t1w_brain")
    wf.connect(brainmask_flairSp, "out_file", outputnode, "brainmask")
    wf.connect(wm_mask_flairSp, "out_file", outputnode, "wm_mask")
    wf.connect(vent_mask_flairSp, "out_file", outputnode, "vent_mask")

    wf.connect(distancemap_flairSp, "out_file", outputnode, "distancemap")
    wf.connect(perivent_mask_flairSp, "out_file", outputnode, "perivent_mask")
    wf.connect(deepWM_mask_flairSp, "out_file", outputnode, "deepWM_mask")

    wf.connect(flirt_t1w_to_flair, "out_matrix_file", outputnode,
               "t1w_to_flair")

    wf.connect(flair_mniSp, "out_file", outputnode, "flair_mniSp")
    wf.connect(flair_to_mni, "out_file", outputnode, "flair_to_mni")

    return wf
Пример #21
0
def create_short_preparation_MD_pipe(params, name="short_preparation_MD_pipe"):
    """Description: apply transfo on MD (no reorient so far)

    Processing steps;

    - init coreg b0mean on SS_T2
    - coreg b0mean on T2 using bbr and native_wm
    - apply coreg transfo on MD

    Params:

    Inputs:

        inputnode:

            orig_T2:
                T2 files (from BIDSDataGrabber)

            SS_T2:
                After Skull strip

            MD:
                MD file

            b0mean:
                B0 mean file

            indiv_params (opt):
                dict with individuals parameters for some nodes

        arguments:

            params:
                dictionary of node sub-parameters (from a json file)

            name:
                pipeline name (default = "long_multi_preparation_pipe")

    Outputs:

        outputnode:

            coreg_MD:
                preprocessed MD file with init

            coreg_better_MD:
                preprocessed MD file with init and flirt

    """

    # creating pipeline
    data_preparation_pipe = pe.Workflow(name=name)

    # Creating input node
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['orig_T2', 'SS_T2', 'MD', 'b0mean', 'native_wm_mask']),
                        name='inputnode')

    # init_align_b0mean_on_T2
    init_align_b0mean_on_T2 = pe.Node(fsl.FLIRT(),
                                      name="init_align_b0mean_on_T2")
    init_align_b0mean_on_T2.inputs.dof = 6

    data_preparation_pipe.connect(inputnode, 'orig_T2',
                                  init_align_b0mean_on_T2, 'reference')
    data_preparation_pipe.connect(inputnode, 'b0mean', init_align_b0mean_on_T2,
                                  'in_file')

    # align_b0mean_on_T2
    align_b0mean_on_T2 = pe.Node(fsl.FLIRT(), name="align_b0mean_on_T2")
    align_b0mean_on_T2.inputs.dof = 6
    align_b0mean_on_T2.inputs.cost = "bbr"

    data_preparation_pipe.connect(inputnode, 'SS_T2', align_b0mean_on_T2,
                                  'reference')
    data_preparation_pipe.connect(inputnode, 'b0mean', align_b0mean_on_T2,
                                  'in_file')
    data_preparation_pipe.connect(inputnode, 'native_wm_mask',
                                  align_b0mean_on_T2, 'wm_seg')
    data_preparation_pipe.connect(init_align_b0mean_on_T2, 'out_matrix_file',
                                  align_b0mean_on_T2, 'in_matrix_file')

    # Apply transfo computed on b0 on MD (init)
    align_MD_on_T2_with_b0 = pe.Node(fsl.ApplyXFM(),
                                     name="align_MD_on_T2_with_b0")

    data_preparation_pipe.connect(inputnode, 'SS_T2', align_MD_on_T2_with_b0,
                                  'reference')
    data_preparation_pipe.connect(inputnode, 'MD', align_MD_on_T2_with_b0,
                                  'in_file')
    data_preparation_pipe.connect(init_align_b0mean_on_T2, 'out_matrix_file',
                                  align_MD_on_T2_with_b0, 'in_matrix_file')

    # Apply transfo computed on b0 on MD (second_flirt with GM)
    align_better_MD_on_T2_with_b0 = pe.Node(
        fsl.ApplyXFM(), name="align_better_MD_on_T2_with_b0")

    data_preparation_pipe.connect(inputnode, 'SS_T2',
                                  align_better_MD_on_T2_with_b0, 'reference')
    data_preparation_pipe.connect(inputnode, 'MD',
                                  align_better_MD_on_T2_with_b0, 'in_file')
    data_preparation_pipe.connect(align_b0mean_on_T2, 'out_matrix_file',
                                  align_better_MD_on_T2_with_b0,
                                  'in_matrix_file')

    # Creating output node
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['coreg_MD', 'coreg_better_MD']),
        name='outputnode')

    data_preparation_pipe.connect(align_MD_on_T2_with_b0, 'out_file',
                                  outputnode, 'coreg_MD')

    data_preparation_pipe.connect(align_better_MD_on_T2_with_b0, 'out_file',
                                  outputnode, 'coreg_better_MD')

    return data_preparation_pipe
Пример #22
0
def create_full_spm_subpipes(params_template,
                             params={},
                             name='full_spm_subpipes'):
    """
    """

    print("Full pipeline name: ", name)

    # Creating pipeline
    seg_pipe = pe.Workflow(name=name)

    # Creating input node
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['list_T1', 'indiv_params']),
        name='inputnode')

    # preprocessing
    data_preparation_pipe = create_short_preparation_pipe(
        params=parse_key(params, "short_preparation_pipe"))

    seg_pipe.connect(inputnode, 'list_T1', data_preparation_pipe,
                     'inputnode.list_T1')
    seg_pipe.connect(inputnode, 'list_T1', data_preparation_pipe,
                     'inputnode.list_T2')
    seg_pipe.connect(inputnode, 'indiv_params', data_preparation_pipe,
                     'inputnode.indiv_params')

    # Bias correction of cropped images
    debias = NodeParams(T1xT2BiasFieldCorrection(),
                        params=parse_key(params, "debias"),
                        name='debias')

    seg_pipe.connect(data_preparation_pipe, 'outputnode.preproc_T1', debias,
                     't1_file')
    seg_pipe.connect(data_preparation_pipe, 'outputnode.preproc_T1', debias,
                     't2_file')
    seg_pipe.connect(data_preparation_pipe, 'bet_crop.mask_file', debias, 'b')
    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "debias"), debias,
                     'indiv_params')

    # Iterative registration to the INIA19 template
    reg = NodeParams(IterREGBET(), params=parse_key(params, "reg"), name='reg')
    reg.inputs.refb_file = params_template["template_brain"]
    seg_pipe.connect(debias, 't1_debiased_file', reg, 'inw_file')
    seg_pipe.connect(debias, 't1_debiased_brain_file', reg, 'inb_file')
    seg_pipe.connect(inputnode, ('indiv_params', parse_key, "reg"), reg,
                     'indiv_params')

    # Subject to _template (ants)
    nonlin_reg = NodeParams(ants.RegistrationSynQuick(),
                            params=parse_key(params, "nonlin_reg"),
                            name='nonlin_reg')
    nonlin_reg.inputs.fixed_image = params_template["template_brain"]
    seg_pipe.connect(reg, "warp_file", nonlin_reg, "moving_image")

    # Transform T1 (fsl)
    transform_msk = NodeParams(fsl.ApplyXFM(),
                               params=parse_key(params, "transform_mask"),
                               name='transform_others')
    seg_pipe.connect(nonlin_reg, "out_matrix", transform_msk, "in_matrix_file")
    seg_pipe.connect(debias, "debiased_mask_file", transform_msk, "in_file")
    seg_pipe.connect(debias, "t1_debiased_file", transform_msk, "reference")

    # Compute brain mask using old_segment of SPM and postprocessing on
    # tissues' masks
    if "old_segment_pipe" in params.keys():

        old_segment_pipe = create_old_segment_pipe(params_template,
                                                   params=parse_key(
                                                       params,
                                                       "old_segment_pipe"))

        seg_pipe.connect(nonlin_reg, ('warped_image', gunzip),
                         old_segment_pipe, 'inputnode.T1')

        seg_pipe.connect(inputnode, 'indiv_params', old_segment_pipe,
                         'inputnode.indiv_params')

    return seg_pipe
Пример #23
0
def create_indnet_workflow(hp_cutoff=100,
                           smoothing=5,
                           smm_threshold=0.5,
                           binarise_threshold=0.5,
                           melodic_seed=None,
                           aggr_aroma=False,
                           name="indnet"):

    indnet = Workflow(name=name)

    # Input node
    inputspec = Node(utility.IdentityInterface(
        fields=['anat_file', 'func_file', 'templates', 'networks']),
                     name='inputspec')

    # T1 skullstrip
    anat_bet = Node(fsl.BET(), name="anat_bet")

    # EPI preprocessing
    func_realignsmooth = create_featreg_preproc(highpass=False,
                                                whichvol='first',
                                                name='func_realignsmooth')
    func_realignsmooth.inputs.inputspec.fwhm = smoothing

    # Transform EPI to MNI space
    func_2mni = create_reg_workflow(name='func_2mni')
    func_2mni.inputs.inputspec.target_image = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')
    func_2mni.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
        'MNI152_T1_2mm_brain.nii.gz')
    func_2mni.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    # Segmentation of T1
    anat_segmentation = Node(fsl.FAST(output_biascorrected=True),
                             name='anat_segmentation')

    # Transfrom segments to EPI space
    segments_2func = create_segments_2func_workflow(
        threshold=binarise_threshold, name='segments_2func')

    # Transform templates to EPI space
    templates_2func = create_templates_2func_workflow(
        threshold=binarise_threshold, name='templates_2func')

    # Mask network templates with GM
    gm_mask_templates = MapNode(fsl.ImageMaths(op_string='-mul'),
                                iterfield=['in_file2'],
                                name='gm_mask_templates')

    # Mask for ICA-AROMA and statistics
    func_brainmask = Node(fsl.BET(frac=0.3,
                                  mask=True,
                                  no_output=True,
                                  robust=True),
                          name='func_brainmask')

    # Melodic ICA
    if melodic_seed != None:
        func_melodic = Node(fsl.MELODIC(args='--seed={}'.format(melodic_seed),
                                        out_stats=True),
                            name='func_melodic')

    # ICA-AROMA
    func_aroma = Node(fsl.ICA_AROMA(), name='func_aroma')
    if aggr_aroma:
        func_aroma.inputs.denoise_type = 'aggr'

    else:
        func_aroma.inputs.denoise_type = 'nonaggr'

    # Highpass filter ICA results
    func_highpass = create_highpass_filter(cutoff=hp_cutoff,
                                           name='func_highpass')

    # Calculate mean CSF sgnal
    csf_meansignal = Node(fsl.ImageMeants(), name='csf_meansignal')

    # Calculate mean WM signal
    wm_meansignal = Node(fsl.ImageMeants(), name='wm_meansignal')

    # Calculate mean non-brain signal
    nonbrain_meansignal = create_nonbrain_meansignal(
        name='nonbrain_meansignal')

    # Calculate first Eigenvariates
    firsteigenvariates = MapNode(fsl.ImageMeants(show_all=True, eig=True),
                                 iterfield=['mask'],
                                 name='firsteigenvariates')

    # Combine first eigenvariates and wm/csf/non-brain signals
    regressors = Node(utility.Merge(4), name='regressors')

    # z-transform regressors
    ztransform = MapNode(Ztransform(),
                         iterfield=['in_file'],
                         name='ztransform')

    # Create design matrix
    designmatrix = Node(DesignMatrix(), name='designmatrix')

    # Create contrasts
    contrasts = Node(Contrasts(), name='contrasts')

    # GLM
    glm = Node(fsl.GLM(), name='glm')
    glm.inputs.out_z_name = 'z_stats.nii.gz'
    glm.inputs.demean = True

    # Split z-maps
    zmaps = Node(fsl.Split(), name='zmaps')
    zmaps.inputs.dimension = 't'

    # Spatial Mixture Modelling
    smm = MapNode(fsl.SMM(), iterfield=['spatial_data_file'], name='smm')

    # Transform probability maps to native (anat) space
    actmaps_2anat = MapNode(fsl.ApplyXFM(),
                            iterfield=['in_file'],
                            name='actmaps_2anat')

    # Transform probability maps to MNI space
    actmaps_2mni = MapNode(fsl.ApplyWarp(),
                           iterfield=['in_file'],
                           name='actmaps_2mni')
    actmaps_2mni.inputs.ref_file = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')

    # Create network masks in native (func) space
    network_masks_func = create_network_masks_workflow(
        name='network_masks_func', smm_threshold=smm_threshold)

    # Create network masks in native (anat) space
    network_masks_anat = create_network_masks_workflow(
        name='network_masks_anat', smm_threshold=smm_threshold)

    # Create network masks in MNI space
    network_masks_mni = create_network_masks_workflow(
        name='network_masks_mni', smm_threshold=smm_threshold)

    # Output node
    outputspec = Node(utility.IdentityInterface(fields=[
        'network_masks_func_main', 'network_masks_func_exclusive',
        'network_masks_anat_main', 'network_masks_anat_exclusive',
        'network_masks_mni_main', 'network_masks_mni_exclusive',
        'preprocessed_func_file', 'preprocessed_anat_file',
        'motion_parameters', 'func2anat_transform', 'anat2mni_transform'
    ]),
                      name='outputspec')

    # Helper functions
    def get_first_item(x):
        try:
            return x[0]
        except:
            return x

    def get_second_item(x):
        return x[1]

    def get_third_item(x):
        return x[2]

    def get_components(x):
        return [y['components'] for y in x]

    # Connect the nodes

    # anat_bet
    indnet.connect(inputspec, 'anat_file', anat_bet, 'in_file')

    # func_realignsmooth
    indnet.connect(inputspec, 'func_file', func_realignsmooth,
                   'inputspec.func')

    # func_2mni
    indnet.connect(func_realignsmooth,
                   ('outputspec.smoothed_files', get_first_item), func_2mni,
                   'inputspec.source_files')
    indnet.connect(inputspec, 'anat_file', func_2mni,
                   'inputspec.anatomical_image')
    indnet.connect(func_realignsmooth, 'outputspec.reference', func_2mni,
                   'inputspec.mean_image')

    # anat_segmentation
    indnet.connect(anat_bet, 'out_file', anat_segmentation, 'in_files')

    # segments_2func
    indnet.connect(anat_segmentation, 'partial_volume_files', segments_2func,
                   'inputspec.segments')
    indnet.connect(func_2mni, 'outputspec.func2anat_transform', segments_2func,
                   'inputspec.premat')
    indnet.connect(func_realignsmooth, 'outputspec.mean', segments_2func,
                   'inputspec.func_file')

    # templates_2func
    indnet.connect(func_realignsmooth, 'outputspec.mean', templates_2func,
                   'inputspec.func_file')
    indnet.connect(func_2mni, 'outputspec.func2anat_transform',
                   templates_2func, 'inputspec.premat')
    indnet.connect(func_2mni, 'outputspec.anat2target_transform',
                   templates_2func, 'inputspec.warp')
    indnet.connect(inputspec, 'templates', templates_2func,
                   'inputspec.templates')

    # gm_mask_templates
    indnet.connect(segments_2func,
                   ('outputspec.segments_2func_files', get_second_item),
                   gm_mask_templates, 'in_file')
    indnet.connect(templates_2func, 'outputspec.templates_2func_files',
                   gm_mask_templates, 'in_file2')

    # func_brainmask
    indnet.connect(func_realignsmooth, 'outputspec.mean', func_brainmask,
                   'in_file')

    # func_melodic
    if melodic_seed != None:
        indnet.connect(func_realignsmooth,
                       ('outputspec.smoothed_files', get_first_item),
                       func_melodic, 'in_files')
        indnet.connect(func_brainmask, 'mask_file', func_melodic, 'mask')

    # func_aroma
    indnet.connect(func_realignsmooth,
                   ('outputspec.smoothed_files', get_first_item), func_aroma,
                   'in_file')
    indnet.connect(func_2mni, 'outputspec.func2anat_transform', func_aroma,
                   'mat_file')
    indnet.connect(func_2mni, 'outputspec.anat2target_transform', func_aroma,
                   'fnirt_warp_file')
    indnet.connect(func_realignsmooth,
                   ('outputspec.motion_parameters', get_first_item),
                   func_aroma, 'motion_parameters')
    indnet.connect(func_brainmask, 'mask_file', func_aroma, 'mask')
    if melodic_seed != None:
        indnet.connect(func_melodic, 'out_dir', func_aroma, 'melodic_dir')

    # func_highpass
    if aggr_aroma:
        indnet.connect(func_aroma, 'aggr_denoised_file', func_highpass,
                       'inputspec.in_file')
    else:
        indnet.connect(func_aroma, 'nonaggr_denoised_file', func_highpass,
                       'inputspec.in_file')

    # csf_meansignal
    indnet.connect(segments_2func,
                   ('outputspec.segments_2func_files', get_first_item),
                   csf_meansignal, 'mask')
    indnet.connect(func_highpass, 'outputspec.filtered_file', csf_meansignal,
                   'in_file')

    # wm_meansignal
    indnet.connect(segments_2func,
                   ('outputspec.segments_2func_files', get_third_item),
                   wm_meansignal, 'mask')
    indnet.connect(func_highpass, 'outputspec.filtered_file', wm_meansignal,
                   'in_file')

    # nonbrain_meansignal
    indnet.connect(inputspec, 'func_file', nonbrain_meansignal,
                   'inputspec.func_file')

    # firsteigenvariates
    indnet.connect(gm_mask_templates, 'out_file', firsteigenvariates, 'mask')
    indnet.connect(func_highpass, 'outputspec.filtered_file',
                   firsteigenvariates, 'in_file')

    # regressors
    indnet.connect(firsteigenvariates, 'out_file', regressors, 'in1')
    indnet.connect(wm_meansignal, 'out_file', regressors, 'in2')
    indnet.connect(csf_meansignal, 'out_file', regressors, 'in3')
    indnet.connect(nonbrain_meansignal, 'outputspec.nonbrain_regressor',
                   regressors, 'in4')

    # ztransform
    indnet.connect(regressors, 'out', ztransform, 'in_file')

    # designmatrix
    indnet.connect(ztransform, 'out_file', designmatrix, 'in_files')

    # contrasts
    indnet.connect(inputspec, ('networks', get_components), contrasts,
                   'in_list')
    indnet.connect(designmatrix, 'out_file', contrasts, 'design')

    # glm
    indnet.connect(designmatrix, 'out_file', glm, 'design')
    indnet.connect(contrasts, 'out_file', glm, 'contrasts')
    indnet.connect(func_brainmask, 'mask_file', glm, 'mask')
    indnet.connect(func_highpass, 'outputspec.filtered_file', glm, 'in_file')

    # zmaps
    indnet.connect(glm, 'out_z', zmaps, 'in_file')

    # smm
    indnet.connect(zmaps, 'out_files', smm, 'spatial_data_file')
    indnet.connect(func_brainmask, 'mask_file', smm, 'mask')

    # actmaps_2anat
    indnet.connect(smm, 'activation_p_map', actmaps_2anat, 'in_file')
    indnet.connect(func_2mni, 'outputspec.func2anat_transform', actmaps_2anat,
                   'in_matrix_file')
    indnet.connect(anat_bet, 'out_file', actmaps_2anat, 'reference')

    # actmaps_2mni
    indnet.connect(smm, 'activation_p_map', actmaps_2mni, 'in_file')
    indnet.connect(templates_2func, 'outputspec.func_2mni_warp', actmaps_2mni,
                   'field_file')

    # network_masks_func
    indnet.connect(smm, 'activation_p_map', network_masks_func,
                   'inputspec.actmaps')
    indnet.connect(inputspec, 'networks', network_masks_func,
                   'inputspec.networks')

    # network_masks_anat
    indnet.connect(actmaps_2anat, 'out_file', network_masks_anat,
                   'inputspec.actmaps')
    indnet.connect(inputspec, 'networks', network_masks_anat,
                   'inputspec.networks')

    # network_masks_mni
    indnet.connect(actmaps_2mni, 'out_file', network_masks_mni,
                   'inputspec.actmaps')
    indnet.connect(inputspec, 'networks', network_masks_mni,
                   'inputspec.networks')

    # output node
    indnet.connect(network_masks_func, 'outputspec.main_masks', outputspec,
                   'network_masks_func_main')
    indnet.connect(network_masks_func, 'outputspec.exclusive_masks',
                   outputspec, 'network_masks_func_exclusive')
    indnet.connect(network_masks_anat, 'outputspec.main_masks', outputspec,
                   'network_masks_anat_main')
    indnet.connect(network_masks_anat, 'outputspec.exclusive_masks',
                   outputspec, 'network_masks_anat_exclusive')
    indnet.connect(network_masks_mni, 'outputspec.main_masks', outputspec,
                   'network_masks_mni_main')
    indnet.connect(network_masks_mni, 'outputspec.exclusive_masks', outputspec,
                   'network_masks_mni_exclusive')
    indnet.connect(func_highpass, 'outputspec.filtered_file', outputspec,
                   'preprocessed_func_file')
    indnet.connect(anat_segmentation, 'restored_image', outputspec,
                   'preprocessed_anat_file')
    indnet.connect(func_realignsmooth,
                   ('outputspec.motion_parameters', get_first_item),
                   outputspec, 'motion_parameters')
    indnet.connect(func_2mni, 'outputspec.func2anat_transform', outputspec,
                   'func2anat_transform')
    indnet.connect(func_2mni, 'outputspec.anat2target_transform', outputspec,
                   'anat2mni_transform')

    return indnet
Пример #24
0
def create_iterative_register_pipe(template_file,
                                   template_brain_file,
                                   template_mask_file,
                                   gm_prob_file,
                                   wm_prob_file,
                                   csf_prob_file,
                                   n_iter,
                                   name="register_pipe"):
    """
    Registration of template (NMT or other) according to Regis:
    - The iterative FLIRT is between NMT_SS and subject's anat after a quick
    skull-stripping but the anat is more and more refined to corresponds to the
    brain
    - there is also a FNIRT done once, for comparison of the quality of the
    template on the subject's brain

    Not used anymore: corresponds to the IterREGBET provided by Regis in bash
    and wrapped node IterREGBET in nodes/register.py

    #TODO: test if gives the same results as IterREGBET
    """
    register_pipe = pe.Workflow(name=name)

    # creating inputnode
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['anat_file_BET', 'anat_file']),
        name='inputnode')

    # register node
    register = pe.Node(niu.Function(
        input_names=[
            "anat_file", "anat_file_BET", "template_brain_file",
            "template_mask_file", 'n_iter'
        ],
        output_names=["anat_file_brain", "template_to_anat_file"],
        function=interative_flirt),
                       name="register")

    register.inputs.template_brain_file = template_brain_file
    register.inputs.template_mask_file = template_mask_file
    register.inputs.n_iter = n_iter

    register_pipe.connect(inputnode, 'anat_file', register, 'anat_file')
    register_pipe.connect(inputnode, 'anat_file_BET', register,
                          "anat_file_BET")

    # apply transfo over the 3 tissues:
    # gm
    register_gm = pe.Node(fsl.ApplyXFM(), name="register_gm")

    register_gm.inputs.in_file = gm_prob_file
    register_gm.inputs.apply_xfm = True
    register_gm.inputs.interp = "nearestneighbour"
    register_gm.inputs.output_type = "NIFTI"  # for SPM segment

    register_pipe.connect(register, 'anat_file_brain', register_gm,
                          'reference')
    register_pipe.connect(register, 'template_to_anat_file', register_gm,
                          "in_matrix_file")

    # wm
    register_wm = pe.Node(fsl.ApplyXFM(), name="register_wm")

    register_wm.inputs.in_file = wm_prob_file
    register_wm.inputs.apply_xfm = True
    register_wm.inputs.interp = "nearestneighbour"
    register_wm.inputs.output_type = "NIFTI"  # for SPM segment

    register_pipe.connect(register, 'anat_file_brain', register_wm,
                          'reference')
    register_pipe.connect(register, 'template_to_anat_file', register_wm,
                          "in_matrix_file")

    # csf
    register_csf = pe.Node(fsl.ApplyXFM(), name="register_csf")

    register_csf.inputs.in_file = csf_prob_file
    register_csf.inputs.apply_xfm = True
    register_csf.inputs.interp = "nearestneighbour"
    register_csf.inputs.output_type = "NIFTI"  # for SPM segment

    register_pipe.connect(register, 'anat_file_brain', register_csf,
                          'reference')
    register_pipe.connect(register, 'template_to_anat_file', register_csf,
                          "in_matrix_file")

    def return_list(file1, file2, file3):
        return [file1, file2, file3]

    # merge 3 outputs to a list (...)
    merge_3_files = pe.Node(niu.Function(
        input_names=["file1", "file2", "file3"],
        output_names=["list3files"],
        function=return_list),
                            name="merge_3_files")

    register_pipe.connect(register_gm, 'out_file', merge_3_files, "file1")
    register_pipe.connect(register_wm, 'out_file', merge_3_files, "file2")
    register_pipe.connect(register_csf, 'out_file', merge_3_files, "file3")

    # same with non linear
    # non linear register between anat and head
    # (FNIRT work directly on head?
    # I thought FLIRT was only skull-stripped brain, is it different? )

    nl_register = pe.Node(fsl.FNIRT(), name="nl_register")
    nl_register.inputs.in_file = template_file

    register_pipe.connect(inputnode, 'anat_file', nl_register, 'ref_file')
    register_pipe.connect(register, 'template_to_anat_file', nl_register,
                          'affine_file')

    # apply non linear warp to NMT_SS
    nl_apply = pe.Node(fsl.ApplyWarp(), name="nl_apply")
    nl_apply.inputs.in_file = template_brain_file
    register_pipe.connect(inputnode, 'anat_file', nl_apply, 'ref_file')
    register_pipe.connect(nl_register, 'fieldcoeff_file', nl_apply,
                          'field_file')  # iout from fnirt

    return register_pipe
Пример #25
0
def create_native_iter_reg_pipe(params_template, params={},
                                name="native_iter_reg_pipe"):
    """
    Description: Register template to anat with the IterREGBET and \
    apply the transfo to gm, wm and csf priors

    Processing steps:

    - IterREGBET (reg) compute transformation between T1 and template T1, \
    using skull-stripped versions of T1 and template as well
    - apply inv transfo (template -> native) using ApplyXFM \
    (register_csf_to_nat, register_gm_to_nat, register_wm_to_nat)

    Params:

        - reg (see :class:`IterREGBET <macapype.nodes.register.IterREGBET>`) \
    - also available as :ref:`indiv_params <indiv_params>`

    Inputs:

        inputnode:

            t1_debiased_file:
                debiased T1 file name

            t1_debiased_file:
                debiased skull stripped T1 file name


        arguments:

            params_template:
                dictionary of info about template

            params:
                dictionary of node sub-parameters (from a json file)

            name:
                pipeline name (default = "native_iter_reg_pipe")

    Outputs:

        register_csf_to_nat.out_file:
            csf template tissue in subject space

        register_gm_to_nat.out_file:
            grey matter template tissue in subject space

        register_wm_to_nat.out_file:
            white matter template tissue in subject space
    """

    native_iter_reg_pipe = pe.Workflow(name=name)

    # creating inputnode
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['t1_debiased_file',
                                      't1_debiased_brain_file',
                                      'indiv_params']),
        name='inputnode')

    # Iterative registration to the INIA19 template
    reg = NodeParams(IterREGBET(),
                     params=parse_key(params, "reg"),
                     name='reg')

    reg.inputs.refb_file = params_template["template_brain"]

    native_iter_reg_pipe.connect(inputnode, 't1_debiased_file',
                                 reg, 'inw_file')
    native_iter_reg_pipe.connect(inputnode, 't1_debiased_brain_file',
                                 reg, 'inb_file')

    native_iter_reg_pipe.connect(
        inputnode, ('indiv_params', parse_key, "reg"),
        reg, 'indiv_params')

    # apply inv_transfo over the 3 tissues:
    # gm
    register_gm_to_nat = pe.Node(fsl.ApplyXFM(), name="register_gm_to_nat")
    register_gm_to_nat.inputs.in_file = params_template["template_gm"]
    register_gm_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    native_iter_reg_pipe.connect(inputnode, 't1_debiased_file',
                                 register_gm_to_nat, 'reference')
    native_iter_reg_pipe.connect(reg, 'inv_transfo_file',
                                 register_gm_to_nat, "in_matrix_file")

    # wm
    register_wm_to_nat = pe.Node(fsl.ApplyXFM(), name="register_wm_to_nat")
    register_wm_to_nat.inputs.in_file = params_template["template_wm"]
    register_wm_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    native_iter_reg_pipe.connect(inputnode, 't1_debiased_file',
                                 register_wm_to_nat, 'reference')
    native_iter_reg_pipe.connect(reg, 'inv_transfo_file',
                                 register_wm_to_nat, "in_matrix_file")

    # csf
    register_csf_to_nat = pe.Node(fsl.ApplyXFM(), name="register_csf_to_nat")
    register_csf_to_nat.inputs.in_file = params_template["template_csf"]
    register_csf_to_nat.inputs.output_type = "NIFTI_GZ"  # for SPM segment

    native_iter_reg_pipe.connect(inputnode, 't1_debiased_file',
                                 register_csf_to_nat, 'reference')
    native_iter_reg_pipe.connect(reg, 'inv_transfo_file',
                                 register_csf_to_nat, "in_matrix_file")

    return native_iter_reg_pipe
Пример #26
0
reorientlabel = Node(interface=fsl.Reorient2Std(output_type='NIFTI'),
                     name="reorientlabel")

# MRI coregistration, rigid, with mutual information
pet_to_mri = Node(interface=fsl.FLIRT(cost='mutualinfo',
                                      dof=6,
                                      searchr_x=[-30, 30],
                                      searchr_y=[-30, 30],
                                      searchr_z=[-30, 30],
                                      coarse_search=15,
                                      fine_search=6),
                  name="pet_to_mri")

invertTransform = Node(interface=fsl.ConvertXFM(invert_xfm=True),
                       name="invertTransform")
mri_to_pet = Node(interface=fsl.ApplyXFM(apply_xfm=True), name="mri_to_pet")
labels_to_pet = Node(interface=fsl.ApplyXFM(apply_xfm=True,
                                            interp='nearestneighbour'),
                     name="labels_to_pet")

# this is not working as intended -- it simply binarizes the image. 8/24/2018
av1451_threshold_binarized = Node(interface=fsl.Threshold(
    nan2zeros=True, thresh=args.signal_threshold, args=' -bin'),
                                  name='av1451_threshold_binarized')
petmask = Node(interface=fsl.ImageMaths(op_string=' -mul ', suffix='_mul'),
               name='petmask')
labels_masked = Node(interface=fsl.ImageMaths(op_string=' -mul ',
                                              suffix='_mul'),
                     name='labels_masked')

coreg_qc = Node(interface=coreg_snapshots(), name="coreg_qc")
Пример #27
0
def main():
    pet_coreg_files = sorted(
        glob(
            "/data1/llevitis/DIAN/dian-pet_coregistration_suvr_space-t1w/*.nii.gz"
        ))
    dian_bids_dir = "/home/users/llevitis/ace_mount/ace_home/DIAN/Nifti/"
    dian_pup_dir = "/home/users/llevitis/ace_mount/ace_home/DIAN_PUP_output/"
    for pcf in pet_coreg_files:
        sub = pcf.split("/")[-1].split("_")[0].split("-")[1]
        raw_t1w_v00_file = glob(
            os.path.join(dian_bids_dir, "sub-" + sub, "ses-v00", "anat/",
                         "*_T1w.nii.gz"))[0]
        print(raw_t1w_v00_file)
        pup_t1w_file = glob(
            os.path.join(dian_pup_dir, "sub-" + sub, "ses-v00",
                         "*ses-v00_T1w.nii.gz"))
        if len(pup_t1w_file) == 1:
            print(sub)
            pup_t1w_file = pup_t1w_file[0]
            pup_fs_file = glob(
                os.path.join(dian_pup_dir, "sub-" + sub, "ses-v00",
                             "*_parcellation-DKT-esm-regions*.nii.gz"))[0]

            pup_t1w_file_flipped = pup_t1w_file.replace(
                "_T1w.nii.gz", "_T1w_flipped.nii.gz")
            pup_fs_file_flipped = pup_fs_file.replace(
                "_space-T1w.nii.gz", "_space-T1w_flipped.nii.gz")

            print(pup_fs_file_flipped)

            flt_out_file = pup_t1w_file.replace("_T1w.nii.gz",
                                                "_T1w_space-orig-T1w.nii.gz")

            # flip the pup images since they're for whatever reason oriented the opposite way
            # just registering them without flipping them doesn't do a perfect job

            pup_t1w_img = nib.load(pup_t1w_file)
            pup_t1w_aff = pup_t1w_img.affine
            pup_t1w_aff[0, 0] *= -1
            pup_t1w_img = resample_img(pup_t1w_img,
                                       target_affine=pup_t1w_aff,
                                       interpolation='nearest')
            nib.save(pup_t1w_img, pup_t1w_file_flipped)

            pup_fs_img = nib.load(pup_fs_file)
            pup_fs_aff = pup_fs_img.affine
            pup_fs_aff[0, 0] *= -1
            pup_fs_img = resample_img(pup_fs_img,
                                      target_affine=pup_fs_aff,
                                      interpolation='nearest')
            nib.save(pup_fs_img, pup_fs_file_flipped)

            pup_fs_file_resampled = pup_fs_file.replace(
                "_space-T1w.nii.gz", "_space-orig_T1w.nii.gz")

            if not os.path.exists(pup_fs_file_resampled):
                flt = fsl.FLIRT(bins=256)
                flt.inputs.in_file = pup_t1w_file_flipped
                flt.inputs.reference = raw_t1w_v00_file
                flt.inputs.output_type = "NIFTI_GZ"
                flt.inputs.out_file = flt_out_file
                flt.inputs.out_matrix_file = pup_t1w_file.replace(
                    "_T1w.nii.gz", "_space-orig_T1w_xfm.mat")
                flt.inputs.cost = "corratio"
                flt.inputs.searchr_x = [-90, 90]
                flt.inputs.searchr_y = [-90, 90]
                flt.inputs.searchr_z = [-90, 90]
                flt.inputs.dof = 6
                flt.inputs.interp = "trilinear"
                print(flt.cmdline)
                res = flt.run()

                # apply the transform
                applyxfm = fsl.ApplyXFM()
                applyxfm.inputs.in_file = pup_fs_file_flipped
                applyxfm.inputs.reference = raw_t1w_v00_file
                applyxfm.inputs.out_file = pup_fs_file.replace(
                    "_space-T1w.nii.gz", "_space-orig_T1w.nii.gz")
                applyxfm.inputs.in_matrix_file = pup_t1w_file.replace(
                    "_T1w.nii.gz", "_space-orig_T1w_xfm.mat")
                applyxfm.inputs.apply_xfm = True
                applyxfm.inputs.interp = "nearestneighbour"
                print(applyxfm.cmdline)
                res = applyxfm.run()
Пример #28
0
def interative_flirt(anat_file, anat_file_BET, template_brain_file,
                     template_mask_file, n_iter):
    """
    This funcion, from Regis script, aims at interatively building a better
    skull stripped version of the subject's. There is a need for an already
    computed skullstripped version to initiallized the procedure
    (anat_file_BET)

    The algo works this way:
    1) flirt skullstripped template to skullstripped subject's brain
    2) apply transfo on template's mask to have the mask in subject's space
    3) mask orig anat with compute mask to obtained a new skullstripped
    subject's brain. Use this new skullstripped subject's for the next
    iteration.
    """

    import os
    import nipype.interfaces.fsl as fsl

    from nipype.utils.filemanip import split_filename as split_f

    path_t, fname_t, ext_t = split_f(template_brain_file)
    template_to_anat_file = os.path.abspath(fname_t + "_to_anat.xfm")

    path_a, fname_a, ext_a = split_f(anat_file)

    anat_file_brain_mask = os.path.abspath(fname_a + "_brain_mask.nii.gz")
    anat_file_brain = os.path.abspath(fname_a + "_brain.nii")

    flirt_ref_file = anat_file_BET

    for i in range(n_iter):

        print('Iter flirt {}'.format(i))

        # first step = FLIRT: template brain to anat bet
        # -> transfo matrix (linear) between the two
        flirt = fsl.FLIRT()
        flirt.inputs.in_file = template_brain_file
        flirt.inputs.reference = flirt_ref_file

        flirt.inputs.out_matrix_file = template_to_anat_file
        flirt.inputs.cost = "normcorr"

        flirt.run()

        # second step = apply transfo to template's mask
        # -> brain_mask in subject's space
        print('Iter apply_flirt {}'.format(i))
        apply_flirt = fsl.ApplyXFM()
        apply_flirt.inputs.in_file = template_mask_file
        apply_flirt.inputs.reference = anat_file_BET
        apply_flirt.inputs.in_matrix_file = template_to_anat_file
        apply_flirt.inputs.apply_xfm = True
        apply_flirt.inputs.interp = "nearestneighbour"
        apply_flirt.inputs.out_file = anat_file_brain_mask
        apply_flirt.run()

        # third step = use the mask in subject's space to mask the build
        # a skull-stripped version of the subject's brain
        # -> better skullstripped version
        print('Iter apply_mask {}'.format(i))
        # apply_mask = fsl.ApplyMask() ### a voir si plus pertinent...
        apply_mask = fsl.BinaryMaths()
        apply_mask.inputs.in_file = anat_file
        apply_mask.inputs.operation = 'mul'
        apply_mask.inputs.operand_file = anat_file_brain_mask
        apply_mask.inputs.out_file = anat_file_brain
        apply_mask.inputs.output_type = "NIFTI"
        apply_mask.run()

        flirt_ref_file = anat_file_brain

    return anat_file_brain, template_to_anat_file