Пример #1
0
 def _run_interface(self, runtime):
     mni_template = os.path.join(os.environ['FSLDIR'], 'data', 'standard',
                                 'MNI152_T1_2mm.nii.gz')
     mni_template_mask = os.path.join(os.environ['FSLDIR'], 'data',
                                      'standard',
                                      'MNI152_T1_2mm_brain_mask.nii.gz')
     in_file = self.inputs.in_file
     mni2input = niftyreg.RegAladin()
     mni2input.inputs.verbosity_off_flag = True
     mni2input.inputs.ref_file = in_file
     mni2input.inputs.flo_file = mni_template
     mni2input_res = mni2input.run()
     mask_resample = niftyreg.RegResample(inter_val='NN')
     if self.inputs.use_nrr:
         mni2input_nrr = niftyreg.RegF3D()
         mni2input_nrr.inputs.verbosity_off_flag = True
         mni2input_nrr.inputs.ref_file = in_file
         mni2input_nrr.inputs.flo_file = mni_template
         mni2input_nrr.inputs.aff_file = mni2input_res.outputs.aff_file
         mni2input_nrr.inputs.vel_flag = True
         mni2input_nrr_res = mni2input_nrr.run()
         mask_resample.inputs.trans_file = mni2input_nrr_res.outputs.cpp_file
     else:
         mask_resample.inputs.trans_file = mni2input_res.outputs.aff_file
     mask_resample.inputs.ref_file = in_file
     mask_resample.inputs.flo_file = mni_template_mask
     mask_resample_res = mask_resample.run()
     fill_mask = niftyseg.UnaryMaths(operation='fill')
     fill_mask.inputs.in_file = mask_resample_res.outputs.out_file
     fill_mask.run()
     return runtime
Пример #2
0
def register_free_form(ref_path,
                       flo_path,
                       init_trsf_path,
                       trsf_path=None,
                       res_path=None,
                       ref_mask_path=None,
                       flo_mask_path=None,
                       bending_energy=const.BENDING_ENERGY_DEFAULT):
    cleanup = []
    if trsf_path is None:
        trsf_path = get_temp_path('.nii.gz')
        cleanup.append(trsf_path)
    if res_path is None:
        res_path = get_temp_path('.nii.gz')
        cleanup.append(res_path)

    reg_ff = niftyreg.RegF3D()
    reg_ff.inputs.ref_file = str(ref_path)
    reg_ff.inputs.flo_file = str(flo_path)
    reg_ff.inputs.cpp_file = str(trsf_path)
    reg_ff.inputs.res_file = str(res_path)
    reg_ff.inputs.be_val = bending_energy

    if ref_mask_path is not None:
        reg_ff.inputs.rmask_file = str(ref_mask_path)
    if flo_mask_path is not None:
        reg_ff.inputs.fmask_file = str(flo_mask_path)
    if init_trsf_path is not None:
        reg_ff.inputs.aff_file = str(init_trsf_path)
    ensure_dir(res_path)
    ensure_dir(trsf_path)
    reg_ff.run()
    for path in cleanup:
        path.unlink()
    return reg_ff
Пример #3
0
def create_gif_propagation_workflow(in_file,
                                    in_db_file,
                                    output_dir,
                                    in_mask_file=None,
                                    name='gif_propagation',
                                    use_lncc=False):
    """create_niftyseg_gif_propagation_pipeline.
    @param in_file            input target file
    @param in_db_file         input database xml file for the GIF algorithm
    @param output_dir         output directory
    @param in_mask_file       optional input mask for the target T1 file
    @param name               optional name of the pipeline
    """

    # Extract the basename of the input file
    subject_id = split_filename(os.path.basename(in_file))[1]
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    input_node = pe.Node(interface=niu.IdentityInterface(
        fields=['input_file', 'mask_file', 'database_file']),
                         name='input_node')
    input_node.inputs.input_file = in_file
    input_node.inputs.database_file = in_db_file
    input_node.inputs.mask_file = in_mask_file

    # Extract the database information
    extract_db_info = pe.Node(interface=niu.Function(
        input_names=['in_db_file'],
        output_names=['out_templates', 'group_mask'],
        function=extract_db_info_function),
                              name='extract_db_info')
    workflow.connect(input_node, 'database_file', extract_db_info,
                     'in_db_file')

    # Affine registration - All images in the database are registered to the input image
    affine_registration = pe.MapNode(interface=niftyreg.RegAladin(),
                                     iterfield='flo_file',
                                     name='affine_registration')
    workflow.connect(input_node, 'input_file', affine_registration, 'ref_file')
    workflow.connect(extract_db_info, 'out_templates', affine_registration,
                     'flo_file')

    # Extract a robust affine registration if applicable
    robust_affine = pe.Node(interface=niftyreg.RegAverage(),
                            name='robust_affine')
    workflow.connect(affine_registration, 'aff_file', robust_affine,
                     'avg_lts_files')

    # A mask is created
    propagate_mask = None
    if in_mask_file is None:
        propagate_mask = pe.Node(interface=niftyreg.RegResample(inter_val='NN',
                                                                pad_val=0),
                                 name='propagate_mask')
        workflow.connect(input_node, 'input_file', propagate_mask, 'ref_file')
        workflow.connect(extract_db_info, 'group_mask', propagate_mask,
                         'flo_file')
        workflow.connect(robust_affine, 'out_file', propagate_mask,
                         'trans_file')

    # Initial Bias correction of the input image
    bias_correction = pe.Node(interface=N4BiasCorrection(in_downsampling=2),
                              name='bias_correction')
    workflow.connect(input_node, 'input_file', bias_correction, 'in_file')
    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', bias_correction,
                         'mask_file')
    else:
        workflow.connect(input_node, 'mask_file', bias_correction, 'mask_file')

    # Non linear registration
    non_linear_registration = pe.MapNode(interface=niftyreg.RegF3D(ln_val=4),
                                         iterfield='flo_file',
                                         name='non_linear_registration')
    workflow.connect(bias_correction, 'out_file', non_linear_registration,
                     'ref_file')
    workflow.connect(extract_db_info, 'out_templates', non_linear_registration,
                     'flo_file')
    workflow.connect(robust_affine, 'out_file', non_linear_registration,
                     'aff_file')
    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', non_linear_registration,
                         'rmask_file')
    else:
        workflow.connect(input_node, 'mask_file', non_linear_registration,
                         'rmask_file')
    if use_lncc:
        non_linear_registration.inputs.lncc_val = -5

    # Save all the images where required
    registration_sink = pe.Node(interface=niu.Function(
        input_names=['templates', 'aff_files', 'cpp_files', 'in_dir'],
        output_names=['out_dir'],
        function=registration_sink_function),
                                name='registration_sink')
    registration_sink.inputs.in_dir = output_dir
    workflow.connect(extract_db_info, 'out_templates', registration_sink,
                     'templates')
    workflow.connect(affine_registration, 'aff_file', registration_sink,
                     'aff_files')
    workflow.connect(non_linear_registration, 'cpp_file', registration_sink,
                     'cpp_files')

    # Run GIF
    gif = pe.Node(interface=Gif(database_file=in_db_file), name='gif')
    gif.inputs.omp_core_val = 8
    workflow.connect(registration_sink, 'out_dir', gif, 'cpp_dir')
    workflow.connect(bias_correction, 'out_file', gif, 'in_file')

    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', gif, 'mask_file')
    else:
        workflow.connect(input_node, 'mask_file', gif, 'mask_file')

    # Rename and redirect the output
    output_merger = pe.Node(interface=niu.Merge(numinputs=7),
                            name='output_merger')
    workflow.connect(gif, 'parc_file', output_merger, 'in1')
    workflow.connect(gif, 'prior_file', output_merger, 'in2')
    workflow.connect(gif, 'tiv_file', output_merger, 'in3')
    workflow.connect(gif, 'seg_file', output_merger, 'in4')
    workflow.connect(gif, 'brain_file', output_merger, 'in5')
    workflow.connect(gif, 'bias_file', output_merger, 'in6')
    workflow.connect(gif, 'volume_file', output_merger, 'in7')
    renamer = pe.MapNode(interface=niu.Rename(format_string=subject_id +
                                              "_%(type)s",
                                              keep_ext=True),
                         iterfield=['in_file', 'type'],
                         name='renamer')
    renamer.inputs.type = [
        'labels', 'prior', 'tiv', 'seg', 'brain', 'bias_corrected', 'volumes'
    ]
    workflow.connect(output_merger, 'out', renamer, 'in_file')

    return workflow
def create_tensor_groupwise_and_feature_extraction_workflow(input_tensor_fields, output_dir,
                                                            rig_iteration=3, aff_iteration=3, nrr_iteration=6,
                                                            biomarkers=['fa', 'tr', 'ad', 'rd']):

    subject_ids = [split_filename(os.path.basename(f))[1] for f in input_tensor_fields]

    pipeline_name = 'dti_wm_regional_analysis'
    workflow = create_dtitk_groupwise_workflow(in_files=input_tensor_fields,
                                               name=pipeline_name,
                                               rig_iteration=rig_iteration,
                                               aff_iteration=aff_iteration,
                                               nrr_iteration=nrr_iteration)
    workflow.base_output_dir = pipeline_name
    workflow.base_dir = output_dir

    groupwise_fa = pe.Node(interface=dtitk.TVtool(operation='fa'), name='groupwise_fa')
    workflow.connect(workflow.get_node('output_node'), 'out_template', groupwise_fa, 'in_file')

    aff_jhu_to_groupwise = pe.Node(interface=niftyreg.RegAladin(flo_file=jhu_atlas_fa), name='aff_jhu_to_groupwise')
    workflow.connect(groupwise_fa, 'out_file', aff_jhu_to_groupwise, 'ref_file')

    nrr_jhu_to_groupwise = pe.Node(interface=niftyreg.RegF3D(vel_flag=True, lncc_val=-5, maxit_val=150, be_val=0.025,
                                                             flo_file=jhu_atlas_fa), name='nrr_jhu_to_groupwise')
    workflow.connect(groupwise_fa, 'out_file', nrr_jhu_to_groupwise, 'ref_file')
    workflow.connect(aff_jhu_to_groupwise, 'aff_file', nrr_jhu_to_groupwise, 'aff_file')

    resample_labels = pe.Node(interface=niftyreg.RegResample(inter_val='NN', flo_file=jhu_atlas_labels),
                              name='resample_labels')
    workflow.connect(groupwise_fa, 'out_file', resample_labels, 'ref_file')
    workflow.connect(nrr_jhu_to_groupwise, 'cpp_file', resample_labels, 'trans_file')

    iterator = pe.Node(interface=niu.IdentityInterface(fields=['biomarker']), name='iterator')
    iterator.iterables = ('biomarker', biomarkers)

    tvtool = pe.MapNode(interface=dtitk.TVtool(), name='tvtool', iterfield=['in_file'])
    workflow.connect(workflow.get_node('output_node'), 'out_res', tvtool, 'in_file')
    workflow.connect(iterator, 'biomarker', tvtool, 'operation')

    stats_extractor = pe.MapNode(interface=niu.Function(input_names=['in_file', 'roi_file'],
                                                        output_names=['out_file'],
                                                        function=extract_statistics_extended_function),
                                 name='stats_extractor', iterfield=['in_file'])
    workflow.connect(resample_labels, 'out_file', stats_extractor, 'roi_file')
    workflow.connect(tvtool, 'out_file', stats_extractor, 'in_file')

    tensors_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_tensors', keep_ext=True),
                                 name='tensors_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(workflow.get_node('output_node'), 'out_res', tensors_renamer, 'in_file')
    tensors_renamer.inputs.subject_id = subject_ids

    maps_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_%(biomarker)s', keep_ext=True),
                              name='maps_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(tvtool, 'out_file', maps_renamer, 'in_file')
    workflow.connect(iterator, 'biomarker', maps_renamer, 'biomarker')
    maps_renamer.inputs.subject_id = subject_ids

    stats_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_%(biomarker)s.csv'),
                               name='stats_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(stats_extractor, 'out_file', stats_renamer, 'in_file')
    workflow.connect(iterator, 'biomarker', stats_renamer, 'biomarker')
    stats_renamer.inputs.subject_id = subject_ids

    groupwise_outputs = ['fa', 'labels', 'tensors']
    gw_outputs_merger = pe.Node(interface=niu.Merge(numinputs=len(groupwise_outputs)), name='gw_outputs_merger')
    workflow.connect(groupwise_fa, 'out_file', gw_outputs_merger, 'in1')
    workflow.connect(resample_labels, 'out_file', gw_outputs_merger, 'in2')
    workflow.connect(workflow.get_node('output_node'), 'out_template', gw_outputs_merger, 'in3')

    groupwise_renamer = pe.MapNode(interface=niu.Rename(format_string='groupwise_%(type)s', keep_ext=True),
                                   name='groupwise_renamer', iterfield=['in_file', 'type'])
    workflow.connect(gw_outputs_merger, 'out', groupwise_renamer, 'in_file')
    groupwise_renamer.inputs.type = groupwise_outputs

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False),
                 name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    workflow.connect(maps_renamer, 'out_file', ds, 'biomarkers.@maps')
    workflow.connect(stats_renamer, 'out_file', ds, 'biomarkers.@stats')
    workflow.connect(tensors_renamer, 'out_file', ds, 'tensors')
    workflow.connect(groupwise_renamer, 'out_file', ds, '@outputs')

    return workflow
Пример #5
0
                    required=True)
parser.add_argument('-f',
                    '--floating',
                    dest='flo',
                    metavar='flo',
                    help='Floating Image',
                    required=True)

args = parser.parse_args()

workflow = pe.Workflow(name=name)
workflow.base_output_dir = name
workflow.base_dir = name

directory = os.getcwd()

node = pe.Node(interface=niftyreg.RegF3D(), name='regf3d')
node.inputs.vel_flag = True

output_node = pe.Node(interface=niu.IdentityInterface(
    fields=['res_file', 'cpp_file', 'invcpp_file']),
                      name='output_node')
workflow.connect(node, 'res_file', output_node, 'res_file')
workflow.connect(node, 'cpp_file', output_node, 'cpp_file')
workflow.connect(node, 'invcpp_file', output_node, 'invcpp_file')

node.inputs.ref_file = os.path.abspath(args.ref)
node.inputs.flo_file = os.path.abspath(args.flo)

workflow.run()
def create_steps_propagation_pipeline(name='steps_propagation',
                                      aligned_templates=False):

    workflow = pe.Workflow(name=name)

    # Create an input node
    input_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['in_file',
                    'database_file']),
        name='input_node')

    extract_db_info = pe.Node(interface=niu.Function(input_names=['in_db_file'], output_names=['input_template_images',
                                                                                               'input_template_labels'],
                                                     function=extract_db_info_function),
                              name='extract_db_info')
    workflow.connect(input_node, 'database_file', extract_db_info, 'in_db_file')

    # All the template images are affinely registered to the target image
    current_aladin = pe.MapNode(interface=niftyreg.RegAladin(verbosity_off_flag=True),
                                name='aladin',
                                iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_aladin, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_images', current_aladin, 'flo_file')

    # Compute the affine TLS if required
    current_robust_affine = None
    if aligned_templates is True:
        current_robust_affine = pe.Node(interface=niftyreg.RegAverage(), name='robust_affine')
        workflow.connect(current_aladin, 'aff_file', current_robust_affine, 'avg_lts_files')
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_aff_prop, 'trans_file')
    else:
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file',
                                                 'trans_file'])
        workflow.connect(current_aladin, 'aff_file', current_aff_prop, 'trans_file')
    workflow.connect(input_node, 'in_file', current_aff_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_aff_prop, 'flo_file')

    # Merge all the affine parcellation into one 4D
    current_aff_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_aff_prop')
    workflow.connect(current_aff_prop, 'out_file', current_aff_prop_merge, 'in_files')

    # Combine all the propagated parcellation into a single image
    current_aff_prop_max = pe.Node(interface=MaxImage(dimension='T'), name='max_aff')
    workflow.connect(current_aff_prop_merge, 'merged_file', current_aff_prop_max, 'in_file')

    # Binarise the obtained mask
    current_aff_prop_bin = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'), name='bin_aff')
    workflow.connect(current_aff_prop_max, 'out_file', current_aff_prop_bin, 'in_file')

    # Dilate the obtained mask
    current_aff_prop_dil = pe.Node(interface=niftyseg.BinaryMathsInteger(operation='dil', operand_value=10),
                                   name='dil_aff')
    workflow.connect(current_aff_prop_bin, 'out_file', current_aff_prop_dil, 'in_file')

    # Fill the obtained mask
    current_aff_prop_fill = pe.Node(interface=niftyseg.UnaryMaths(operation='fill'), name='fill_aff')
    workflow.connect(current_aff_prop_dil, 'out_file', current_aff_prop_fill, 'in_file')

    # Crop the target image to speed up the process
    current_crop_target = pe.Node(interface=CropImage(), name='crop_target')
    workflow.connect(input_node, 'in_file', current_crop_target, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_target, 'mask_file')

    # Crop the mask image to speed up the process
    current_crop_mask = pe.Node(interface=CropImage(), name='crop_mask')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'mask_file')

    # Perform all the non-linear registration
    if aligned_templates is True:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(sx_val=-2.5, be_val=0.01, verbosity_off_flag=True),
                                 name='f3d',
                                 iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_f3d, 'aff_file')
    else:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(),
                                 name='f3d',
                                 iterfield=['flo_file',
                                            'aff_file'])
        workflow.connect(current_aladin, 'aff_file', current_f3d, 'aff_file')
    workflow.connect(current_crop_target, 'out_file', current_f3d, 'ref_file')
    workflow.connect(current_crop_mask, 'out_file', current_f3d, 'rmask_file')
    workflow.connect(extract_db_info, 'input_template_images', current_f3d, 'flo_file')

    # Merge all the non-linear warped images into one 4D
    current_f3d_temp_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_temp')
    workflow.connect(current_f3d, 'res_file', current_f3d_temp_merge, 'in_files')

    # Propagate the obtained mask
    current_f3d_prop = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                  name='f3d_prop',
                                  iterfield=['flo_file',
                                             'trans_file'])
    workflow.connect(current_crop_target, 'out_file', current_f3d_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_f3d_prop, 'flo_file')
    workflow.connect(current_f3d, 'cpp_file', current_f3d_prop, 'trans_file')

    # Merge all the non-linear warped labels into one 4D
    current_f3d_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_prop')
    workflow.connect(current_f3d_prop, 'out_file', current_f3d_prop_merge, 'in_files')

    # Extract the consensus parcellation using steps
    current_fusion = pe.Node(interface=niftyseg.STEPS(template_num=15, kernel_size=1.5, mrf_value=0.15),
                             name='fusion')
    workflow.connect(current_crop_target, 'out_file', current_fusion, 'in_file')
    workflow.connect(current_f3d_temp_merge, 'merged_file', current_fusion, 'warped_img_file')
    workflow.connect(current_f3d_prop_merge, 'merged_file', current_fusion, 'warped_seg_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_fusion, 'mask_file')

    # Resample the obtained consensus label into the original image space
    current_prop_orig_res = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                       name='prop_orig_res',
                                       iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_prop_orig_res, 'ref_file')
    workflow.connect(current_fusion, 'out_file', current_prop_orig_res, 'flo_file')

    # Connect the output to the output node
    output_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['parcellated_file']),
        name='output_node')
    workflow.connect(current_prop_orig_res, 'out_file', output_node, 'parcellated_file')

    return workflow
Пример #7
0
def create_nonlinear_gw_step(name="nonlinear_gw_niftyreg",
                             demean=True,
                             nonlinear_options_hash=None,
                             initial_affines=False,
                             use_mask=False,
                             verbose=False):
    """
    Creates a workflow that perform non-linear co-registrations of a set of
    images using RegF3d, producing an non-linear average image and a set of
    cpp transformation linking each of the floating images to the average.

    Inputs::

        inputspec.in_files - The input files to be registered
        inputspec.ref_file - The initial reference image that the input files
                              are registered to
        inputspec.rmask_file - Mask of the reference image
        inputspec.in_trans_files - Initial transformation files (affine or
                                    cpps)

    Outputs::

        outputspec.average_image - The average image
        outputspec.cpp_files - The bspline transformation files

    Optional arguments::

        nonlinear_options_hash - An options dictionary containing a list of
                                 parameters for RegAladin that take the
        same form as given in the interface (default None)
        initial_affines - Selects whether to iterate over initial affine
                          images, which we generally won't have (default False)

    Example
    -------
    >>> from nipype.workflows.smri.niftyreg import create_nonlinear_gw_step
    >>> nlc = create_nonlinear_gw_step('nonlinear_coreg')  # doctest: +SKIP
    >>> nlc.inputs.inputspec.in_files = [
    ...     'file1.nii.gz', 'file2.nii.gz']  # doctest: +SKIP
    >>> nlc.inputs.inputspec.ref_file = ['ref.nii.gz']  # doctest: +SKIP
    >>> nlc.run()  # doctest: +SKIP

    """

    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # We need to create an input node for the workflow
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_files',
                'ref_file',
                'rmask_file',
                'input_aff_files']),
        name='inputspec')

    if nonlinear_options_hash is None:
        nonlinear_options_hash = dict()

    # non-rigidly register each of the images to the average
    # flo_file can take a list of files
    # Need to be able to iterate over input affine files, but what about the
    # cases where we have no input affine files?
    # Passing empty strings are not valid filenames, and undefined fields can
    # not be iterated over.
    # Current simple solution, as this is not generally required, is to use a
    # flag which specifies wherther to iterate
    if initial_affines:
        nonlin_reg = pe.MapNode(interface=niftyreg.RegF3D(
            **nonlinear_options_hash), name="nonlin_reg",
            iterfield=['flo_file', 'aff_file'])
    else:
        nonlin_reg = pe.MapNode(interface=niftyreg.RegF3D(
            **nonlinear_options_hash), name="nonlin_reg",
            iterfield=['flo_file'])

    if verbose is False:
        nonlin_reg.inputs.verbosity_off_flag = True

    # Average the images
    ave_ims = pe.Node(interface=niftyreg.RegAverage(), name="ave_ims")

    # We have a new centered average image, the resampled original images and
    # the affine transformations, which are returned as an output node.
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['average_image',
                'trans_files']),
        name='outputspec')

    # Connect the inputs to the lin_reg node, which is split over in_files
    workflow.connect([
        (inputnode, nonlin_reg, [('in_files', 'flo_file')]),
        (inputnode, nonlin_reg, [('ref_file', 'ref_file')])
                     ])

    if use_mask:
        workflow.connect(inputnode, 'rmask_file', nonlin_reg, 'rmask_file')

    # If we have initial affine transforms, we need to connect them in
    if initial_affines:
        workflow.connect(inputnode, 'input_aff_files', nonlin_reg, 'aff_file')

    if demean:
        if 'vel_flag' in list(nonlinear_options_hash.keys()) and \
           nonlinear_options_hash['vel_flag'] is True and \
           initial_affines:
            workflow.connect(
                inputnode, 'ref_file', ave_ims, 'demean3_ref_file')
        else:
            workflow.connect(
                inputnode, 'ref_file', ave_ims, 'demean2_ref_file')
        workflow.connect(nonlin_reg, 'avg_output', ave_ims, 'warp_files')
    else:
        workflow.connect(nonlin_reg, 'res_file', ave_ims, 'avg_files')

    # Connect up the output node
    workflow.connect([
        (nonlin_reg, outputnode, [('cpp_file', 'trans_files')]),
        (ave_ims, outputnode, [('out_file', 'average_image')])
                     ])

    return workflow
Пример #8
0
def preprocessing_input_pipeline(name='preprocessing_inputs_pipeline',
                                 number_of_affine_iterations=7,
                                 ref_file=mni_template,
                                 ref_mask=mni_template_mask):

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    input_node = pe.Node(interface=niu.IdentityInterface(
        fields=['in_file', 'in_images', 'in_affines']),
                         name='input_node')
    '''
    *****************************************************************************
    First step: Cropping inputs according to 10 voxels surrounding the skull
    *****************************************************************************
    '''
    register_mni_to_image = pe.Node(interface=niftyreg.RegAladin(),
                                    name='register_mni_to_image')
    register_mni_to_image.inputs.flo_file = mni_template
    resample_mni_mask_to_image = pe.Node(interface=niftyreg.RegResample(),
                                         name='resample_mni_mask_to_image')
    resample_mni_mask_to_image.inputs.inter_val = 'NN'
    resample_mni_mask_to_image.inputs.flo_file = mni_template_mask

    dilate_image_mask = pe.Node(interface=niftyseg.BinaryMaths(),
                                name='dilate_image_mask')
    dilate_image_mask.inputs.operation = 'dil'
    dilate_image_mask.inputs.operand_value = 10

    crop_image_with_mask = pe.Node(interface=niftk.CropImage(),
                                   name='crop_image_with_mask')

    resample_image_mask_to_cropped_image = pe.Node(
        interface=niftyreg.RegResample(),
        name='resample_image_mask_to_cropped_image')
    resample_image_mask_to_cropped_image.inputs.inter_val = 'NN'
    resample_image_mask_to_cropped_image.inputs.flo_file = mni_template_mask

    bias_correction = pe.Node(interface=niftk.N4BiasCorrection(),
                              name='bias_correction')
    bias_correction.inputs.in_downsampling = 2
    '''
    *****************************************************************************
    Second step: Calculate the cumulated input affine transformations
    *****************************************************************************
    '''
    register_mni_to_cropped_image = pe.Node(
        interface=niftyreg.RegAladin(), name='register_mni_to_cropped_image')
    register_mni_to_cropped_image.inputs.ref_file = mni_template

    invert_affine_transformations = pe.Node(
        niftyreg.RegTransform(),
        name='invert_affine_transformations',
        iterfield=['inv_aff_input'])
    compose_affine_transformations = pe.MapNode(
        niftyreg.RegTransform(),
        name='compose_affine_transformations',
        iterfield=['comp_input2'])
    '''
    *****************************************************************************
    Third step: Non linear registration of all pairs
    *****************************************************************************
    '''
    nonlinear_registration = pe.MapNode(interface=niftyreg.RegF3D(),
                                        name='nonlinear_registration',
                                        iterfield=['flo_file', 'aff_file'])
    nonlinear_registration.inputs.vel_flag = True
    nonlinear_registration.inputs.lncc_val = -5
    nonlinear_registration.inputs.maxit_val = 150
    nonlinear_registration.inputs.be_val = 0.025
    '''
    *****************************************************************************
    First step: Cropping inputs according to 10 voxels surrounding the skull
    *****************************************************************************
    '''
    workflow.connect(input_node, 'in_file', register_mni_to_image, 'ref_file')
    workflow.connect(input_node, 'in_file', resample_mni_mask_to_image,
                     'ref_file')
    workflow.connect(register_mni_to_image, 'aff_file',
                     resample_mni_mask_to_image, 'aff_file')
    workflow.connect(resample_mni_mask_to_image, 'res_file', dilate_image_mask,
                     'in_file')
    workflow.connect(input_node, 'in_file', crop_image_with_mask, 'in_file')
    workflow.connect(dilate_image_mask, 'out_file', crop_image_with_mask,
                     'mask_file')
    workflow.connect(crop_image_with_mask, 'out_file',
                     resample_image_mask_to_cropped_image, 'ref_file')
    workflow.connect(register_mni_to_image, 'aff_file',
                     resample_image_mask_to_cropped_image, 'aff_file')
    workflow.connect(crop_image_with_mask, 'out_file', bias_correction,
                     'in_file')
    workflow.connect(resample_image_mask_to_cropped_image, 'res_file',
                     bias_correction, 'mask_file')
    '''
    *****************************************************************************
    Fourth step: Calculate the cumulated input affine transformations
    *****************************************************************************
    '''
    workflow.connect(bias_correction, 'out_file',
                     register_mni_to_cropped_image, 'flo_file')
    workflow.connect(register_mni_to_cropped_image, 'aff_file',
                     invert_affine_transformations, 'inv_aff_input')
    workflow.connect(invert_affine_transformations, 'out_file',
                     compose_affine_transformations, 'comp_input')
    workflow.connect(input_node, 'in_affines', compose_affine_transformations,
                     'comp_input2')
    '''
    *****************************************************************************
    Fith step: Non linear registration of all pairs
    *****************************************************************************
    '''

    workflow.connect(bias_correction, 'out_file', nonlinear_registration,
                     'ref_file')
    workflow.connect(input_node, 'in_images', nonlinear_registration,
                     'flo_file')
    workflow.connect(compose_affine_transformations, 'out_file',
                     nonlinear_registration, 'aff_file')
    '''
    *****************************************************************************
    Connect the outputs
    *****************************************************************************
    '''
    output_node = pe.Node(interface=niu.IdentityInterface(
        fields=['out_file', 'out_mask', 'out_aff', 'out_cpps', 'out_invcpps']),
                          name='output_node')
    workflow.connect(bias_correction, 'out_file', output_node, 'out_file')
    workflow.connect(resample_image_mask_to_cropped_image, 'res_file',
                     output_node, 'out_mask')
    workflow.connect(register_mni_to_cropped_image, 'aff_file', output_node,
                     'out_aff')
    workflow.connect(nonlinear_registration, 'cpp_file', output_node,
                     'out_cpps')
    workflow.connect(nonlinear_registration, 'invcpp_file', output_node,
                     'out_invcpps')

    return workflow
Пример #9
0
def create_core_dwi_processing_pipeline():
    """

    :return:
    """
    # Pipeline Nodes
    # Inputs params
    inputnode = pe.Node(
        utility.IdentityInterface(
            fields=[
                "diffusion_volume",
                "t1_volume",
                "nb_tracks",
                "min_length",
                "max_length",
            ],
            mandatory_inputs=False,
        ),
        name="inputnode",
    )
    # Processing steps
    preprocessing = create_preprocessing_pipeline()
    # tensor and derived metrics (FA)
    tensor = create_tensor_pipeline()
    # t1 brain extraction
    bet = pe.Node(fsl.preprocess.BET(robust=True), name="bet")
    # tissue classification (T1 volume)
    tissue_classif = create_tissue_classification_node()
    # rigid registration between diffusion and structural space
    # rigid_registration = create_rigid_registration_node()
    # fa (dwi space) upsampling to 1mm to ease rigid registration
    resample_fa = pe.Node(fsl.preprocess.FLIRT(apply_isoxfm=1),
                          name="resample_fa")
    # apply rigid transformation
    # applyxfm = pe.Node(fsl.preprocess.ApplyXFM(), name="applyxfm")
    # inverse rigid transformation
    # invxfm = pe.Node(fsl.utils.ConvertXFM(invert_xfm=True), name="invxfm")
    # non rigid registration with
    reg_f3d = pe.Node(niftyreg.RegF3D(), name="reg_f3d")
    # Multi shell multi tissue spherical deconvolution
    csd = create_spherical_deconvolution_pipeline()
    # Whole brain anatomically constrained probabilistic tractogram
    tractogram_pipeline = create_tractogram_generation_pipeline()
    # Outputs params
    outputnode = pe.Node(
        utility.IdentityInterface(
            fields=[
                "corrected_diffusion_volume",
                "wm_fod",
                "tractogram",
                "diffusion_to_t1_transform",
            ],
            mandatory_inputs=False,
        ),
        name="outputnode",
    )
    # mandatory steps of the diffusion pipeline (for the sake of modularity)
    core_pipeline = pe.Workflow(name="core_dwi_processing_pipeline")
    core_pipeline.connect(inputnode, "diffusion_volume", preprocessing,
                          "inputnode.diffusion_volume")
    core_pipeline.connect(
        preprocessing,
        "outputnode.corrected_diffusion_volume",
        tensor,
        "inputnode.diffusion_volume",
    )
    core_pipeline.connect(preprocessing, "outputnode.mask", tensor,
                          "inputnode.mask")
    core_pipeline.connect(
        preprocessing,
        "outputnode.corrected_diffusion_volume",
        csd,
        "inputnode.diffusion_volume",
    )
    # Upsample FA to 1mm which is roughly the T1 resolution
    core_pipeline.connect(tensor, "outputnode.fa", resample_fa, "in_file")
    core_pipeline.connect(tensor, "outputnode.fa", resample_fa, "reference")
    # Estimate rigid transform (T1 --> FA directly)
    # brain masked T1 volume
    core_pipeline.connect(inputnode, "t1_volume", bet, "in_file")
    core_pipeline.connect(bet, "out_file", reg_f3d, "flo_file")
    core_pipeline.connect(resample_fa, "in_file", reg_f3d, "ref_file")
    core_pipeline.connect(reg_f3d, "res_file", tissue_classif, "in_file")

    core_pipeline.connect(preprocessing, "outputnode.mask", csd,
                          "inputnode.mask")
    core_pipeline.connect(tissue_classif, "out_file", csd,
                          "inputnode.5tt_file")
    core_pipeline.connect(csd, "outputnode.wm_fod", tractogram_pipeline,
                          "inputnode.wm_fod")
    core_pipeline.connect(inputnode, "nb_tracks", tractogram_pipeline,
                          "inputnode.nb_tracks")
    core_pipeline.connect(inputnode, "min_length", tractogram_pipeline,
                          "inputnode.min_length")
    core_pipeline.connect(inputnode, "max_length", tractogram_pipeline,
                          "inputnode.max_length")
    core_pipeline.connect(tissue_classif, "out_file", tractogram_pipeline,
                          "inputnode.act_file")
    core_pipeline.connect(preprocessing, "outputnode.mask",
                          tractogram_pipeline, "inputnode.mask")

    core_pipeline.connect(tractogram_pipeline, "outputnode.tractogram",
                          outputnode, "tractogram")
    core_pipeline.connect(csd, "outputnode.wm_fod", outputnode, "wm_fod")
    core_pipeline.connect(
        preprocessing,
        "outputnode.corrected_diffusion_volume",
        outputnode,
        "corrected_diffusion_volume",
    )

    return core_pipeline