Beispiel #1
0
 def _run_interface(self, runtime):
     mni_template = os.path.join(os.environ['FSLDIR'], 'data', 'standard',
                                 'MNI152_T1_2mm.nii.gz')
     mni_template_mask = os.path.join(os.environ['FSLDIR'], 'data',
                                      'standard',
                                      'MNI152_T1_2mm_brain_mask.nii.gz')
     in_file = self.inputs.in_file
     mni2input = niftyreg.RegAladin()
     mni2input.inputs.verbosity_off_flag = True
     mni2input.inputs.ref_file = in_file
     mni2input.inputs.flo_file = mni_template
     mni2input_res = mni2input.run()
     mask_resample = niftyreg.RegResample(inter_val='NN')
     if self.inputs.use_nrr:
         mni2input_nrr = niftyreg.RegF3D()
         mni2input_nrr.inputs.verbosity_off_flag = True
         mni2input_nrr.inputs.ref_file = in_file
         mni2input_nrr.inputs.flo_file = mni_template
         mni2input_nrr.inputs.aff_file = mni2input_res.outputs.aff_file
         mni2input_nrr.inputs.vel_flag = True
         mni2input_nrr_res = mni2input_nrr.run()
         mask_resample.inputs.trans_file = mni2input_nrr_res.outputs.cpp_file
     else:
         mask_resample.inputs.trans_file = mni2input_res.outputs.aff_file
     mask_resample.inputs.ref_file = in_file
     mask_resample.inputs.flo_file = mni_template_mask
     mask_resample_res = mask_resample.run()
     fill_mask = niftyseg.UnaryMaths(operation='fill')
     fill_mask.inputs.in_file = mask_resample_res.outputs.out_file
     fill_mask.run()
     return runtime
Beispiel #2
0
def create_workflow(name='simple_workflow'):
    input_node = pe.Node(
        interface=niu.IdentityInterface(fields=['ref_file', 'flo_file']),
        name='input_node')
    aladin = pe.MapNode(interface=niftyreg.RegAladin(),
                        name='aladin',
                        iterfield=['flo_file'])
    resample = pe.MapNode(interface=niftyreg.RegResample(),
                          name='resample',
                          iterfield=['flo_file', 'aff_file'])
    output_node = pe.Node(
        interface=niu.IdentityInterface(fields=['res_file', 'aff_file']),
        name='output_node')
    w = pe.Workflow(name=name)
    w.base_output_dir = name
    w.connect(input_node, 'ref_file', aladin, 'ref_file')
    w.connect(input_node, 'flo_file', aladin, 'flo_file')
    w.connect(aladin, 'aff_file', resample, 'aff_file')
    w.connect(input_node, 'ref_file', resample, 'ref_file')
    w.connect(input_node, 'flo_file', resample, 'flo_file')
    w.connect(resample, 'res_file', output_node, 'res_file')
    w.connect(aladin, 'aff_file', output_node, 'aff_file')
    return w
Beispiel #3
0
def register(ref_path,
             flo_path,
             trsf_path=None,
             res_path=None,
             ref_mask_path=None,
             flo_mask_path=None,
             init_trsf_path=None,
             rigid_only=False,
             affine_directly=False):
    cleanup = []
    if trsf_path is None:
        trsf_path = get_temp_path('.txt')
        cleanup.append(trsf_path)
    if res_path is None:
        res_path = get_temp_path('.nii.gz')
        cleanup.append(res_path)

    aladin = niftyreg.RegAladin()
    aladin.inputs.ref_file = str(ref_path)
    aladin.inputs.flo_file = str(flo_path)
    aladin.inputs.aff_file = str(trsf_path)
    aladin.inputs.res_file = str(res_path)
    aladin.inputs.aff_direct_flag = affine_directly
    aladin.inputs.rig_only_flag = rigid_only

    if ref_mask_path is not None:
        aladin.inputs.rmask_file = str(ref_mask_path)
    if flo_mask_path is not None:
        aladin.inputs.fmask_file = str(flo_mask_path)
    if init_trsf_path is not None:
        aladin.inputs.in_aff_file = str(init_trsf_path)
    ensure_dir(res_path)
    ensure_dir(trsf_path)
    aladin.run()
    for path in cleanup:
        path.unlink()
    return aladin
def create_tensor_groupwise_and_feature_extraction_workflow(input_tensor_fields, output_dir,
                                                            rig_iteration=3, aff_iteration=3, nrr_iteration=6,
                                                            biomarkers=['fa', 'tr', 'ad', 'rd']):

    subject_ids = [split_filename(os.path.basename(f))[1] for f in input_tensor_fields]

    pipeline_name = 'dti_wm_regional_analysis'
    workflow = create_dtitk_groupwise_workflow(in_files=input_tensor_fields,
                                               name=pipeline_name,
                                               rig_iteration=rig_iteration,
                                               aff_iteration=aff_iteration,
                                               nrr_iteration=nrr_iteration)
    workflow.base_output_dir = pipeline_name
    workflow.base_dir = output_dir

    groupwise_fa = pe.Node(interface=dtitk.TVtool(operation='fa'), name='groupwise_fa')
    workflow.connect(workflow.get_node('output_node'), 'out_template', groupwise_fa, 'in_file')

    aff_jhu_to_groupwise = pe.Node(interface=niftyreg.RegAladin(flo_file=jhu_atlas_fa), name='aff_jhu_to_groupwise')
    workflow.connect(groupwise_fa, 'out_file', aff_jhu_to_groupwise, 'ref_file')

    nrr_jhu_to_groupwise = pe.Node(interface=niftyreg.RegF3D(vel_flag=True, lncc_val=-5, maxit_val=150, be_val=0.025,
                                                             flo_file=jhu_atlas_fa), name='nrr_jhu_to_groupwise')
    workflow.connect(groupwise_fa, 'out_file', nrr_jhu_to_groupwise, 'ref_file')
    workflow.connect(aff_jhu_to_groupwise, 'aff_file', nrr_jhu_to_groupwise, 'aff_file')

    resample_labels = pe.Node(interface=niftyreg.RegResample(inter_val='NN', flo_file=jhu_atlas_labels),
                              name='resample_labels')
    workflow.connect(groupwise_fa, 'out_file', resample_labels, 'ref_file')
    workflow.connect(nrr_jhu_to_groupwise, 'cpp_file', resample_labels, 'trans_file')

    iterator = pe.Node(interface=niu.IdentityInterface(fields=['biomarker']), name='iterator')
    iterator.iterables = ('biomarker', biomarkers)

    tvtool = pe.MapNode(interface=dtitk.TVtool(), name='tvtool', iterfield=['in_file'])
    workflow.connect(workflow.get_node('output_node'), 'out_res', tvtool, 'in_file')
    workflow.connect(iterator, 'biomarker', tvtool, 'operation')

    stats_extractor = pe.MapNode(interface=niu.Function(input_names=['in_file', 'roi_file'],
                                                        output_names=['out_file'],
                                                        function=extract_statistics_extended_function),
                                 name='stats_extractor', iterfield=['in_file'])
    workflow.connect(resample_labels, 'out_file', stats_extractor, 'roi_file')
    workflow.connect(tvtool, 'out_file', stats_extractor, 'in_file')

    tensors_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_tensors', keep_ext=True),
                                 name='tensors_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(workflow.get_node('output_node'), 'out_res', tensors_renamer, 'in_file')
    tensors_renamer.inputs.subject_id = subject_ids

    maps_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_%(biomarker)s', keep_ext=True),
                              name='maps_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(tvtool, 'out_file', maps_renamer, 'in_file')
    workflow.connect(iterator, 'biomarker', maps_renamer, 'biomarker')
    maps_renamer.inputs.subject_id = subject_ids

    stats_renamer = pe.MapNode(interface=niu.Rename(format_string='%(subject_id)s_%(biomarker)s.csv'),
                               name='stats_renamer', iterfield=['in_file', 'subject_id'])
    workflow.connect(stats_extractor, 'out_file', stats_renamer, 'in_file')
    workflow.connect(iterator, 'biomarker', stats_renamer, 'biomarker')
    stats_renamer.inputs.subject_id = subject_ids

    groupwise_outputs = ['fa', 'labels', 'tensors']
    gw_outputs_merger = pe.Node(interface=niu.Merge(numinputs=len(groupwise_outputs)), name='gw_outputs_merger')
    workflow.connect(groupwise_fa, 'out_file', gw_outputs_merger, 'in1')
    workflow.connect(resample_labels, 'out_file', gw_outputs_merger, 'in2')
    workflow.connect(workflow.get_node('output_node'), 'out_template', gw_outputs_merger, 'in3')

    groupwise_renamer = pe.MapNode(interface=niu.Rename(format_string='groupwise_%(type)s', keep_ext=True),
                                   name='groupwise_renamer', iterfield=['in_file', 'type'])
    workflow.connect(gw_outputs_merger, 'out', groupwise_renamer, 'in_file')
    groupwise_renamer.inputs.type = groupwise_outputs

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False),
                 name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    workflow.connect(maps_renamer, 'out_file', ds, 'biomarkers.@maps')
    workflow.connect(stats_renamer, 'out_file', ds, 'biomarkers.@stats')
    workflow.connect(tensors_renamer, 'out_file', ds, 'tensors')
    workflow.connect(groupwise_renamer, 'out_file', ds, '@outputs')

    return workflow
def create_gif_propagation_workflow(in_file,
                                    in_db_file,
                                    output_dir,
                                    in_mask_file=None,
                                    name='gif_propagation',
                                    use_lncc=False):
    """create_niftyseg_gif_propagation_pipeline.
    @param in_file            input target file
    @param in_db_file         input database xml file for the GIF algorithm
    @param output_dir         output directory
    @param in_mask_file       optional input mask for the target T1 file
    @param name               optional name of the pipeline
    """

    # Extract the basename of the input file
    subject_id = split_filename(os.path.basename(in_file))[1]
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    input_node = pe.Node(interface=niu.IdentityInterface(
        fields=['input_file', 'mask_file', 'database_file']),
                         name='input_node')
    input_node.inputs.input_file = in_file
    input_node.inputs.database_file = in_db_file
    input_node.inputs.mask_file = in_mask_file

    # Extract the database information
    extract_db_info = pe.Node(interface=niu.Function(
        input_names=['in_db_file'],
        output_names=['out_templates', 'group_mask'],
        function=extract_db_info_function),
                              name='extract_db_info')
    workflow.connect(input_node, 'database_file', extract_db_info,
                     'in_db_file')

    # Affine registration - All images in the database are registered to the input image
    affine_registration = pe.MapNode(interface=niftyreg.RegAladin(),
                                     iterfield='flo_file',
                                     name='affine_registration')
    workflow.connect(input_node, 'input_file', affine_registration, 'ref_file')
    workflow.connect(extract_db_info, 'out_templates', affine_registration,
                     'flo_file')

    # Extract a robust affine registration if applicable
    robust_affine = pe.Node(interface=niftyreg.RegAverage(),
                            name='robust_affine')
    workflow.connect(affine_registration, 'aff_file', robust_affine,
                     'avg_lts_files')

    # A mask is created
    propagate_mask = None
    if in_mask_file is None:
        propagate_mask = pe.Node(interface=niftyreg.RegResample(inter_val='NN',
                                                                pad_val=0),
                                 name='propagate_mask')
        workflow.connect(input_node, 'input_file', propagate_mask, 'ref_file')
        workflow.connect(extract_db_info, 'group_mask', propagate_mask,
                         'flo_file')
        workflow.connect(robust_affine, 'out_file', propagate_mask,
                         'trans_file')

    # Initial Bias correction of the input image
    bias_correction = pe.Node(interface=N4BiasCorrection(in_downsampling=2),
                              name='bias_correction')
    workflow.connect(input_node, 'input_file', bias_correction, 'in_file')
    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', bias_correction,
                         'mask_file')
    else:
        workflow.connect(input_node, 'mask_file', bias_correction, 'mask_file')

    # Non linear registration
    non_linear_registration = pe.MapNode(interface=niftyreg.RegF3D(ln_val=4),
                                         iterfield='flo_file',
                                         name='non_linear_registration')
    workflow.connect(bias_correction, 'out_file', non_linear_registration,
                     'ref_file')
    workflow.connect(extract_db_info, 'out_templates', non_linear_registration,
                     'flo_file')
    workflow.connect(robust_affine, 'out_file', non_linear_registration,
                     'aff_file')
    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', non_linear_registration,
                         'rmask_file')
    else:
        workflow.connect(input_node, 'mask_file', non_linear_registration,
                         'rmask_file')
    if use_lncc:
        non_linear_registration.inputs.lncc_val = -5

    # Save all the images where required
    registration_sink = pe.Node(interface=niu.Function(
        input_names=['templates', 'aff_files', 'cpp_files', 'in_dir'],
        output_names=['out_dir'],
        function=registration_sink_function),
                                name='registration_sink')
    registration_sink.inputs.in_dir = output_dir
    workflow.connect(extract_db_info, 'out_templates', registration_sink,
                     'templates')
    workflow.connect(affine_registration, 'aff_file', registration_sink,
                     'aff_files')
    workflow.connect(non_linear_registration, 'cpp_file', registration_sink,
                     'cpp_files')

    # Run GIF
    gif = pe.Node(interface=Gif(database_file=in_db_file), name='gif')
    gif.inputs.omp_core_val = 8
    workflow.connect(registration_sink, 'out_dir', gif, 'cpp_dir')
    workflow.connect(bias_correction, 'out_file', gif, 'in_file')

    if in_mask_file is None:
        workflow.connect(propagate_mask, 'out_file', gif, 'mask_file')
    else:
        workflow.connect(input_node, 'mask_file', gif, 'mask_file')

    # Rename and redirect the output
    output_merger = pe.Node(interface=niu.Merge(numinputs=7),
                            name='output_merger')
    workflow.connect(gif, 'parc_file', output_merger, 'in1')
    workflow.connect(gif, 'prior_file', output_merger, 'in2')
    workflow.connect(gif, 'tiv_file', output_merger, 'in3')
    workflow.connect(gif, 'seg_file', output_merger, 'in4')
    workflow.connect(gif, 'brain_file', output_merger, 'in5')
    workflow.connect(gif, 'bias_file', output_merger, 'in6')
    workflow.connect(gif, 'volume_file', output_merger, 'in7')
    renamer = pe.MapNode(interface=niu.Rename(format_string=subject_id +
                                              "_%(type)s",
                                              keep_ext=True),
                         iterfield=['in_file', 'type'],
                         name='renamer')
    renamer.inputs.type = [
        'labels', 'prior', 'tiv', 'seg', 'brain', 'bias_corrected', 'volumes'
    ]
    workflow.connect(output_merger, 'out', renamer, 'in_file')

    return workflow
def create_gif_pseudoct_workflow(in_ute_echo2_file,
                                 in_ute_umap_dir,
                                 in_db_file,
                                 cpp_dir,
                                 in_t1_file=None,
                                 in_t2_file=None,
                                 in_mask_file=None,
                                 in_nac_pet_dir=None,
                                 name='gif_pseudoct'):
    """create_niftyseg_gif_propagation_pipeline.
    @param in_ute_echo2_file  input UTE echo file
    @param in_ute_umap_dir    input UTE umap file
    @param in_db_file         input database xml file for the GIF algorithm
    @param cpp_dir            cpp directory
    @param in_t1_file         input T1 target file
    @param in_t2_file         input T2 target file
    @param in_mask_file       optional input mask for the target T1 file
    @param name               optional name of the pipeline
    """

    in_file = in_t1_file if in_t1_file else in_t2_file
    subject_id = split_filename(os.path.basename(in_file))[1]

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    gif = pe.Node(interface=Gif(database_file=in_db_file,
                                cpp_dir=cpp_dir,
                                lncc_ker=3,
                                regNMI=True,
                                regBE=0.01),
                  name='gif')
    if in_mask_file:
        gif.inputs.mask_file = in_mask_file

    # Create empty masks for the bias correction to cover the full image
    t1_full_mask = pe.Node(interface=niu.Function(input_names=['in_file'],
                                                  output_names=['out_file'],
                                                  function=create_full_mask),
                           name='t1_full_mask')
    t1_full_mask.inputs.in_file = in_t1_file
    t2_full_mask = pe.Node(interface=niu.Function(input_names=['in_file'],
                                                  output_names=['out_file'],
                                                  function=create_full_mask),
                           name='t2_full_mask')
    t2_full_mask.inputs.in_file = in_t2_file

    # Create bias correction nodes that are adapted to our needs. i.e. Boost the T2 bias correction
    bias_correction_t1 = pe.Node(interface=N4BiasCorrection(),
                                 name='bias_correction_t1')
    if in_t1_file:
        bias_correction_t1.inputs.in_file = in_t1_file

    # Create bias correction nodes that are adapted to our needs. i.e. Boost the T2 bias correction
    bias_correction_t2 = pe.Node(interface=N4BiasCorrection(
        in_maxiter=300, in_convergence=0.0001),
                                 name='bias_correction_t2')
    if in_t2_file:
        bias_correction_t2.inputs.in_file = in_t2_file

    # Only connect the nodes if the input image exist respectively
    if in_t1_file:
        workflow.connect(t1_full_mask, 'out_file', bias_correction_t1,
                         'mask_file')
    if in_t2_file:
        workflow.connect(t2_full_mask, 'out_file', bias_correction_t2,
                         'mask_file')

    if in_t1_file and in_t2_file:
        affine_mr_target = pe.Node(interface=niftyreg.RegAladin(maxit_val=10),
                                   name='affine_mr_target')
        workflow.connect(bias_correction_t1, 'out_file', affine_mr_target,
                         'ref_file')
        workflow.connect(bias_correction_t2, 'out_file', affine_mr_target,
                         'flo_file')
        resample_mr_target = pe.Node(
            interface=niftyreg.RegResample(pad_val=float('nan')),
            name='resample_MR_target')
        workflow.connect(bias_correction_t1, 'out_file', resample_mr_target,
                         'ref_file')
        workflow.connect(bias_correction_t2, 'out_file', resample_mr_target,
                         'flo_file')
        lister = pe.Node(interface=niu.Merge(2), name='lister')
        merger = pe.Node(interface=fsl.Merge(dimension='t',
                                             output_type='NIFTI_GZ'),
                         name='fsl_merge')
        workflow.connect(affine_mr_target, 'aff_file', resample_mr_target,
                         'trans_file')
        workflow.connect(bias_correction_t1, 'out_file', lister, 'in1')
        workflow.connect(resample_mr_target, 'out_file', lister, 'in2')
        workflow.connect(lister, 'out', merger, 'in_files')
        workflow.connect(merger, 'merged_file', gif, 'in_file')
    else:
        if in_t1_file:
            workflow.connect(bias_correction_t1, 'out_file', gif, 'in_file')
        if in_t2_file:
            workflow.connect(bias_correction_t2, 'out_file', gif, 'in_file')

    pct_hu_to_umap = pe.Node(interface=niu.Function(
        input_names=['pCT_file', 'structural_mri_file', 'ute_echo2_file'],
        output_names=['pct_umap_file'],
        function=convert_pct_hu_to_umap),
                             name='pCT_HU_to_umap')
    pct_hu_to_umap.inputs.structural_mri_file = in_file
    pct_hu_to_umap.inputs.ute_echo2_file = in_ute_echo2_file
    workflow.connect(gif, 'synth_file', pct_hu_to_umap, 'pCT_file')

    pct2dcm_pct_umap = pe.Node(interface=Pct2Dcm(in_umap_name='pCT_umap'),
                               name='pct2dcm_pct_umap')
    workflow.connect(pct_hu_to_umap, 'pct_umap_file', pct2dcm_pct_umap,
                     'in_umap_file')
    pct2dcm_pct_umap.inputs.in_ute_umap_dir = os.path.abspath(in_ute_umap_dir)

    merger_output_number = 2

    pct2dcm_ute_umap_end = None
    pct2dcm_pct_umap_end = None
    if in_nac_pet_dir:

        ute_umap_dcm2nii = pe.Node(
            interface=Dcm2nii(source_dir=in_ute_umap_dir),
            name='ute_umap_dcm2nii')
        first_item_selector = pe.Node(interface=niu.Select(index=0),
                                      name='first_item_selector')
        workflow.connect(ute_umap_dcm2nii, 'converted_files',
                         first_item_selector, 'inlist')

        nac_extractor = pe.Node(interface=niu.Function(
            input_names=['dicom_folder'],
            output_names=['nifti_file'],
            function=extract_nac_pet),
                                name='nac_extractor')
        nac_extractor.inputs.dicom_folder = in_nac_pet_dir

        ute_to_nac_registration = pe.Node(
            interface=niftyreg.RegAladin(rig_only_flag=True),
            name='ute_to_nac_registration')
        workflow.connect(nac_extractor, 'nifti_file', ute_to_nac_registration,
                         'ref_file')
        ute_to_nac_registration.inputs.flo_file = in_ute_echo2_file

        ute_resample = pe.Node(interface=niftyreg.RegResample(),
                               name='ute_resample')
        workflow.connect(first_item_selector, 'out', ute_resample, 'ref_file')
        workflow.connect(first_item_selector, 'out', ute_resample, 'flo_file')
        workflow.connect(ute_to_nac_registration, 'aff_file', ute_resample,
                         'aff_file')

        pct2dcm_ute_umap_end = pe.Node(
            interface=Pct2Dcm(in_umap_name='UTE_umap_end'),
            name='pct2dcm_ute_umap_end')
        workflow.connect(ute_resample, 'res_file', pct2dcm_ute_umap_end,
                         'in_umap_file')
        pct2dcm_ute_umap_end.inputs.in_ute_umap_dir = os.path.abspath(
            in_ute_umap_dir)

        pct_resample = pe.Node(interface=niftyreg.RegResample(),
                               name='pct_resample')
        workflow.connect(pct_hu_to_umap, 'pct_umap_file', pct_resample,
                         'ref_file')
        workflow.connect(pct_hu_to_umap, 'pct_umap_file', pct_resample,
                         'flo_file')
        workflow.connect(ute_to_nac_registration, 'aff_file', pct_resample,
                         'aff_file')

        pct2dcm_pct_umap_end = pe.Node(
            interface=Pct2Dcm(in_umap_name='pCT_umap_end'),
            name='pct2dcm_pct_umap_end')
        workflow.connect(pct_resample, 'res_file', pct2dcm_pct_umap_end,
                         'in_umap_file')
        pct2dcm_pct_umap_end.inputs.in_ute_umap_dir = os.path.abspath(
            in_ute_umap_dir)

        merger_output_number = 4

    # merge output
    output_merger = pe.Node(
        interface=niu.Merge(numinputs=merger_output_number),
        name='output_merger')
    workflow.connect(gif, 'synth_file', output_merger, 'in1')
    workflow.connect(pct2dcm_pct_umap, 'output_file', output_merger, 'in2')

    renamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                           "_%(type)s",
                                           keep_ext=True),
                      name='renamer')
    if in_nac_pet_dir:
        workflow.connect(pct2dcm_ute_umap_end, 'output_file', output_merger,
                         'in3')
        workflow.connect(pct2dcm_pct_umap_end, 'output_file', output_merger,
                         'in4')
        renamer.inputs.type = ['synth', 'pct', 'ute_end', 'pct_end']
    else:
        renamer.inputs.type = ['synth', 'pct']
    workflow.connect(output_merger, 'out', renamer, 'in_file')

    return workflow
Beispiel #7
0
def create_n4_bias_correction_workflow(input_images,
                                       output_dir,
                                       input_masks=None,
                                       name='n4_bias_correction'):

    subject_ids = [
        split_filename(os.path.basename(f))[1] for f in input_images
    ]

    # Create a workflow to process the images
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name
    # Define the input and output node
    input_node = pe.Node(interface=niu.IdentityInterface(
        fields=['in_files', 'mask_files'], mandatory_inputs=False),
                         name='input_node')
    output_node = pe.Node(interface=niu.IdentityInterface(
        fields=['out_img_files', 'out_bias_files', 'out_mask_files']),
                          name='output_node')

    input_node.inputs.in_files = input_images
    if input_masks is not None:
        input_node.inputs.mask_files = input_masks

    thresholder = pe.MapNode(interface=fsl.Threshold(),
                             name='thresholder',
                             iterfield=['in_file'])
    thresholder.inputs.thresh = 0

    # Finding masks to use for bias correction:
    bias_correction = pe.MapNode(interface=N4BiasCorrection(),
                                 name='bias_correction',
                                 iterfield=['in_file', 'mask_file'])
    bias_correction.inputs.in_downsampling = 2
    bias_correction.inputs.in_maxiter = 200
    bias_correction.inputs.in_convergence = 0.0002
    bias_correction.inputs.in_fwhm = 0.05

    renamer = pe.MapNode(
        interface=niu.Rename(format_string="%(subject_id)s_corrected.nii.gz"),
        name='renamer',
        iterfield=['in_file', 'subject_id'])
    renamer.inputs.subject_id = subject_ids
    mask_renamer = pe.MapNode(interface=niu.Rename(
        format_string="%(subject_id)s_corrected_mask.nii.gz"),
                              name='mask_renamer',
                              iterfield=['in_file', 'subject_id'])
    mask_renamer.inputs.subject_id = subject_ids

    if input_masks is None:
        mni_to_input = pe.MapNode(interface=niftyreg.RegAladin(),
                                  name='mni_to_input',
                                  iterfield=['ref_file'])
        mni_to_input.inputs.flo_file = mni_template
        mask_resample = pe.MapNode(interface=niftyreg.RegResample(),
                                   name='mask_resample',
                                   iterfield=['ref_file', 'aff_file'])
        mask_resample.inputs.inter_val = 'NN'
        mask_resample.inputs.flo_file = mni_template_mask
        mask_eroder = pe.MapNode(interface=niftyseg.BinaryMathsInteger(),
                                 name='mask_eroder',
                                 iterfield=['in_file'])
        mask_eroder.inputs.operation = 'ero'
        mask_eroder.inputs.operand_value = 3
        workflow.connect(input_node, 'in_files', mni_to_input, 'ref_file')
        workflow.connect(input_node, 'in_files', mask_resample, 'ref_file')
        workflow.connect(mni_to_input, 'aff_file', mask_resample, 'aff_file')
        workflow.connect(mask_resample, 'out_file', mask_eroder, 'in_file')
        workflow.connect(mask_eroder, 'out_file', bias_correction, 'mask_file')
        workflow.connect(mask_eroder, 'out_file', mask_renamer, 'in_file')
    else:
        workflow.connect(input_node, 'mask_files', bias_correction,
                         'mask_file')
        workflow.connect(input_node, 'mask_files', mask_renamer, 'in_file')

    workflow.connect(input_node, 'in_files', thresholder, 'in_file')
    workflow.connect(thresholder, 'out_file', bias_correction, 'in_file')

    # Gather the processed images
    workflow.connect(bias_correction, 'out_file', renamer, 'in_file')
    workflow.connect(renamer, 'out_file', output_node, 'out_img_files')
    workflow.connect(bias_correction, 'out_biasfield_file', output_node,
                     'out_bias_files')
    workflow.connect(mask_renamer, 'out_file', output_node, 'out_mask_files')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = output_dir
    workflow.connect(output_node, 'out_img_files', ds, '@img')
    workflow.connect(output_node, 'out_mask_files', ds, '@mask')

    return workflow
Beispiel #8
0
def create_binary_to_meshes(label,
                            name='gw_binary_to_meshes',
                            reduction_rate=0.3,
                            operand_value=1):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input node
    input_node = pe.Node(niu.IdentityInterface(fields=[
        'input_images', 'input_parcellations', 'input_reference',
        'trans_files', 'ref_file'
    ]),
                         name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(fields=['output_meshes']),
                          name='output_node')

    # Extract the relevant label from the GIF parcellation
    extract_label = pe.MapNode(interface=MergeLabels(),
                               iterfield=['in_file'],
                               name='extract_label')
    extract_label.inputs.roi_list = label
    workflow.connect(input_node, 'input_parcellations', extract_label,
                     'in_file')

    # Removing parasite segmentation: Erosion.
    erode_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='ero', operand_value=operand_value),
                                iterfield=['in_file'],
                                name='erode_binaries')
    workflow.connect(extract_label, 'out_file', erode_binaries, 'in_file')

    # Removing parasite segmentation: Dilatation.
    dilate_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=operand_value),
                                 iterfield=['in_file'],
                                 name='dilate_binaries')
    workflow.connect(erode_binaries, 'out_file', dilate_binaries, 'in_file')

    # Apply the relevant transformations to the roi
    apply_affine = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN'),
                              iterfield=['flo_file', 'trans_file'],
                              name='apply_affine')
    workflow.connect(input_node, 'trans_files', apply_affine, 'trans_file')
    workflow.connect(input_node, 'ref_file', apply_affine, 'ref_file')
    workflow.connect(dilate_binaries, 'out_file', apply_affine, 'flo_file')

    # compute the large ROI that correspond to the union of all warped label
    extract_union_roi = pe.Node(interface=niftyreg.RegAverage(),
                                name='extract_union_roi')
    workflow.connect(apply_affine, 'out_file', extract_union_roi, 'avg_files')

    # Binarise the average ROI
    binarise_roi = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'),
                           name='binarise_roi')
    workflow.connect(extract_union_roi, 'out_file', binarise_roi, 'in_file')

    # Dilation of the binarise union ROI
    dilate_roi = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=4),
                         name='dilate_roi')
    workflow.connect(binarise_roi, 'out_file', dilate_roi, 'in_file')

    # Apply the transformations
    apply_rigid_refinement = pe.MapNode(interface=niftyreg.RegAladin(
        rig_only_flag=True, ln_val=1),
                                        iterfield=['flo_file', 'in_aff_file'],
                                        name='apply_rigid_refinement')
    workflow.connect(input_node, 'input_images', apply_rigid_refinement,
                     'flo_file')
    workflow.connect(input_node, 'ref_file', apply_rigid_refinement,
                     'ref_file')
    workflow.connect(input_node, 'trans_files', apply_rigid_refinement,
                     'in_aff_file')
    workflow.connect(dilate_roi, 'out_file', apply_rigid_refinement,
                     'rmask_file')

    # Extract the mesh corresponding to the label
    final_resampling = pe.MapNode(
        interface=niftyreg.RegResample(inter_val='NN'),
        iterfield=['flo_file', 'trans_file'],
        name='final_resampling')
    workflow.connect(apply_rigid_refinement, 'aff_file', final_resampling,
                     'trans_file')
    workflow.connect(input_node, 'ref_file', final_resampling, 'ref_file')
    workflow.connect(dilate_binaries, 'out_file', final_resampling, 'flo_file')

    # Extract the mesh corresponding to the label
    extract_mesh = pe.MapNode(
        interface=Image2VtkMesh(in_reductionRate=reduction_rate),
        iterfield=['in_file'],
        name='extract_mesh')
    workflow.connect(final_resampling, 'out_file', extract_mesh, 'in_file')
    # workflow.connect(apply_rigid_refinement, 'aff_file', extract_mesh, 'matrix_file')

    # Create a rename for the average image
    groupwise_renamer = pe.Node(interface=niu.Rename(format_string='atlas',
                                                     keep_ext=True),
                                name='groupwise_renamer')
    workflow.connect(input_node, 'ref_file', groupwise_renamer, 'in_file')

    workflow.connect(extract_mesh, 'out_file', output_node, 'output_meshes')
    return workflow
def create_image_to_mesh_workflow(input_images,
                                  input_parcellations,
                                  input_label_id,
                                  result_dir,
                                  rigid_iteration=3,
                                  affine_iteration=3,
                                  reduction_rate=0.1,
                                  name='registrations_init'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = result_dir
    workflow.base_output_dir = name

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             name='groupwise')
    groupwise.inputs.input_node.in_files = input_images
    groupwise.inputs.input_node.ref_file = input_images[0]

    # Extract the relevant label from the GIF parcellation
    extract_label = pe.MapNode(interface=MergeLabels(),
                               iterfield=['in_file'],
                               name='extract_label')
    extract_label.iterables = ("roi_list", [[l] for l in input_label_id])
    extract_label.inputs.in_file = input_parcellations

    # Removing parasite segmentation: Erosion.
    erode_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='ero', operand_value=1),
                                iterfield=['in_file'],
                                name='erode_binaries')
    workflow.connect(extract_label, 'out_file', erode_binaries, 'in_file')

    # Removing parasite segmentation: Dilatation.
    dilate_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=1),
                                 iterfield=['in_file'],
                                 name='dilate_binaries')
    workflow.connect(erode_binaries, 'out_file', dilate_binaries, 'in_file')

    # Apply the relevant transformations to the roi
    apply_affine = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN'),
                              iterfield=['flo_file', 'trans_file'],
                              name='apply_affine')
    workflow.connect(groupwise, 'output_node.trans_files', apply_affine,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', apply_affine,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', apply_affine, 'flo_file')

    # compute the large ROI that correspond to the union of all warped label
    extract_union_roi = pe.Node(interface=niftyreg.RegAverage(),
                                name='extract_union_roi')
    workflow.connect(apply_affine, 'res_file', extract_union_roi, 'in_files')

    # Binarise the average ROI
    binarise_roi = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'),
                           name='binarise_roi')
    workflow.connect(extract_union_roi, 'out_file', binarise_roi, 'in_file')

    # Dilation of the binarise union ROI
    dilate_roi = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=5),
                         name='dilate_roi')
    workflow.connect(binarise_roi, 'out_file', dilate_roi, 'in_file')

    # Apply the transformations
    apply_rigid_refinement = pe.MapNode(interface=niftyreg.RegAladin(
        rig_only_flag=True, ln_val=1),
                                        iterfield=['flo_file', 'in_aff_file'],
                                        name='apply_rigid_refinement')
    apply_rigid_refinement.inputs.flo_file = input_images
    workflow.connect(groupwise, 'output_node.average_image',
                     apply_rigid_refinement, 'ref_file')
    workflow.connect(groupwise, 'output_node.trans_files',
                     apply_rigid_refinement, 'in_aff_file')
    workflow.connect(dilate_roi, 'out_file', apply_rigid_refinement,
                     'rmask_file')

    # Extract the mesh corresponding to the label
    final_resampling = pe.MapNode(
        interface=niftyreg.RegResample(inter_val='NN'),
        iterfield=['flo_file', 'trans_file'],
        name='final_resampling')
    workflow.connect(apply_rigid_refinement, 'aff_file', final_resampling,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', final_resampling,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', final_resampling, 'flo_file')

    # Extract the mesh corresponding to the label
    extract_mesh = pe.MapNode(
        interface=Image2VtkMesh(in_reductionRate=reduction_rate),
        iterfield=['in_file'],
        name='extract_mesh')
    workflow.connect(final_resampling, 'res_file', extract_mesh, 'in_file')
    # workflow.connect(apply_rigid_refinement, 'aff_file', extract_mesh, 'matrix_file')

    # Create a rename for the average image
    groupwise_renamer = pe.Node(interface=niu.Rename(format_string='atlas',
                                                     keep_ext=True),
                                name='groupwise_renamer')
    workflow.connect(groupwise, 'output_node.average_image', groupwise_renamer,
                     'in_file')

    # Create a datasink
    ds = pe.Node(nio.DataSink(parameterization=False), name='ds')
    ds.inputs.base_directory = result_dir
    workflow.connect(groupwise_renamer, 'out_file', ds, '@avg')
    workflow.connect(apply_rigid_refinement, 'res_file', ds, '@raf_mask')
    workflow.connect(extract_union_roi, 'out_file', ds, '@union_mask')
    workflow.connect(dilate_roi, 'out_file', ds, '@dilate_mask')
    workflow.connect(extract_mesh, 'out_file', ds, 'mesh_vtk')

    return workflow
Beispiel #10
0
                    dest='ref',
                    metavar='ref',
                    help='Reference Image',
                    required=True)
parser.add_argument('-f',
                    '--floating',
                    dest='flo',
                    metavar='flo',
                    help='Floating Image',
                    required=True)

args = parser.parse_args()

workflow = pe.Workflow(name=name)
workflow.base_output_dir = name
workflow.base_dir = name

directory = os.getcwd()

node = pe.Node(interface=niftyreg.RegAladin(), name='aladin')
output_node = pe.Node(
    interface=niu.IdentityInterface(fields=['res_file', 'aff_file']),
    name='output_node')
workflow.connect(node, 'aff_file', output_node, 'aff_file')
workflow.connect(node, 'res_file', output_node, 'res_file')

node.inputs.ref_file = os.path.absbath(args.ref)
node.inputs.flo_file = os.path.absbath(args.flo)

workflow.run()
def create_steps_propagation_pipeline(name='steps_propagation',
                                      aligned_templates=False):

    workflow = pe.Workflow(name=name)

    # Create an input node
    input_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['in_file',
                    'database_file']),
        name='input_node')

    extract_db_info = pe.Node(interface=niu.Function(input_names=['in_db_file'], output_names=['input_template_images',
                                                                                               'input_template_labels'],
                                                     function=extract_db_info_function),
                              name='extract_db_info')
    workflow.connect(input_node, 'database_file', extract_db_info, 'in_db_file')

    # All the template images are affinely registered to the target image
    current_aladin = pe.MapNode(interface=niftyreg.RegAladin(verbosity_off_flag=True),
                                name='aladin',
                                iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_aladin, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_images', current_aladin, 'flo_file')

    # Compute the affine TLS if required
    current_robust_affine = None
    if aligned_templates is True:
        current_robust_affine = pe.Node(interface=niftyreg.RegAverage(), name='robust_affine')
        workflow.connect(current_aladin, 'aff_file', current_robust_affine, 'avg_lts_files')
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_aff_prop, 'trans_file')
    else:
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file',
                                                 'trans_file'])
        workflow.connect(current_aladin, 'aff_file', current_aff_prop, 'trans_file')
    workflow.connect(input_node, 'in_file', current_aff_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_aff_prop, 'flo_file')

    # Merge all the affine parcellation into one 4D
    current_aff_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_aff_prop')
    workflow.connect(current_aff_prop, 'out_file', current_aff_prop_merge, 'in_files')

    # Combine all the propagated parcellation into a single image
    current_aff_prop_max = pe.Node(interface=MaxImage(dimension='T'), name='max_aff')
    workflow.connect(current_aff_prop_merge, 'merged_file', current_aff_prop_max, 'in_file')

    # Binarise the obtained mask
    current_aff_prop_bin = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'), name='bin_aff')
    workflow.connect(current_aff_prop_max, 'out_file', current_aff_prop_bin, 'in_file')

    # Dilate the obtained mask
    current_aff_prop_dil = pe.Node(interface=niftyseg.BinaryMathsInteger(operation='dil', operand_value=10),
                                   name='dil_aff')
    workflow.connect(current_aff_prop_bin, 'out_file', current_aff_prop_dil, 'in_file')

    # Fill the obtained mask
    current_aff_prop_fill = pe.Node(interface=niftyseg.UnaryMaths(operation='fill'), name='fill_aff')
    workflow.connect(current_aff_prop_dil, 'out_file', current_aff_prop_fill, 'in_file')

    # Crop the target image to speed up the process
    current_crop_target = pe.Node(interface=CropImage(), name='crop_target')
    workflow.connect(input_node, 'in_file', current_crop_target, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_target, 'mask_file')

    # Crop the mask image to speed up the process
    current_crop_mask = pe.Node(interface=CropImage(), name='crop_mask')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'mask_file')

    # Perform all the non-linear registration
    if aligned_templates is True:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(sx_val=-2.5, be_val=0.01, verbosity_off_flag=True),
                                 name='f3d',
                                 iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_f3d, 'aff_file')
    else:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(),
                                 name='f3d',
                                 iterfield=['flo_file',
                                            'aff_file'])
        workflow.connect(current_aladin, 'aff_file', current_f3d, 'aff_file')
    workflow.connect(current_crop_target, 'out_file', current_f3d, 'ref_file')
    workflow.connect(current_crop_mask, 'out_file', current_f3d, 'rmask_file')
    workflow.connect(extract_db_info, 'input_template_images', current_f3d, 'flo_file')

    # Merge all the non-linear warped images into one 4D
    current_f3d_temp_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_temp')
    workflow.connect(current_f3d, 'res_file', current_f3d_temp_merge, 'in_files')

    # Propagate the obtained mask
    current_f3d_prop = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                  name='f3d_prop',
                                  iterfield=['flo_file',
                                             'trans_file'])
    workflow.connect(current_crop_target, 'out_file', current_f3d_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_f3d_prop, 'flo_file')
    workflow.connect(current_f3d, 'cpp_file', current_f3d_prop, 'trans_file')

    # Merge all the non-linear warped labels into one 4D
    current_f3d_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_prop')
    workflow.connect(current_f3d_prop, 'out_file', current_f3d_prop_merge, 'in_files')

    # Extract the consensus parcellation using steps
    current_fusion = pe.Node(interface=niftyseg.STEPS(template_num=15, kernel_size=1.5, mrf_value=0.15),
                             name='fusion')
    workflow.connect(current_crop_target, 'out_file', current_fusion, 'in_file')
    workflow.connect(current_f3d_temp_merge, 'merged_file', current_fusion, 'warped_img_file')
    workflow.connect(current_f3d_prop_merge, 'merged_file', current_fusion, 'warped_seg_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_fusion, 'mask_file')

    # Resample the obtained consensus label into the original image space
    current_prop_orig_res = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                       name='prop_orig_res',
                                       iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_prop_orig_res, 'ref_file')
    workflow.connect(current_fusion, 'out_file', current_prop_orig_res, 'flo_file')

    # Connect the output to the output node
    output_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['parcellated_file']),
        name='output_node')
    workflow.connect(current_prop_orig_res, 'out_file', output_node, 'parcellated_file')

    return workflow
Beispiel #12
0
def main():
    # Initialise the pipeline variables and the argument parsing
    mri_histo_value_var = HistoMRVariables()
    # Parse the input arguments
    input_variables = mri_histo_value_var.parser.parse_args()

    # Create the output folder if it does not exists
    if not os.path.exists(os.path.abspath(input_variables.output_folder)):
        os.mkdir(os.path.abspath(input_variables.output_folder))

    # Create the workflow
    name = 'mri_histo_rigid'
    workflow = pe.Workflow(name=name)
    workflow.base_dir = os.path.abspath(input_variables.output_folder)
    workflow.base_output_dir = name

    # Create the input node interface
    input_node = pe.Node(interface=niu.IdentityInterface(fields=[
        'input_histo', 'input_histo_mask', 'input_mri', 'input_mri_mask',
        'resolution', 'sim_measure'
    ]),
                         name='input_node')
    input_node.inputs.input_histo = os.path.abspath(
        input_variables.input_histo)
    input_node.inputs.input_histo_mask = os.path.abspath(
        input_variables.input_histo_mask)
    input_node.inputs.input_mri = os.path.abspath(input_variables.input_mri)
    input_node.inputs.input_mri_mask = os.path.abspath(
        input_variables.input_mri_mask)
    input_node.inputs.resolution = float(input_variables.resolution)
    input_node.inputs.sim_measure = input_variables.sim_measure

    # Alter the input image headers
    alter_mri_header = pe.Node(interface=niu.Function(
        function=initialise_headers,
        input_names=['in_file'],
        output_names=['out_file']),
                               name='alter_mri_header')
    workflow.connect(input_node, 'input_mri', alter_mri_header, 'in_file')
    alter_histo_header = pe.Node(interface=niu.Function(
        function=initialise_headers,
        input_names=['in_file'],
        output_names=['out_file']),
                                 name='alter_histo_header')
    workflow.connect(input_node, 'input_histo', alter_histo_header, 'in_file')

    alter_histo_mask_header = pe.Node(interface=niu.Function(
        function=initialise_headers,
        input_names=['in_file'],
        output_names=['out_file']),
                                      name='alter_histo_mask_header')

    alter_mri_mask_header = pe.Node(interface=niu.Function(
        function=initialise_headers,
        input_names=['in_file'],
        output_names=['out_file']),
                                    name='alter_mri_mask_header')

    # Resample to isotropic images
    iso_mri = pe.Node(interface=niftyreg.RegTools(), name='iso_mri')
    iso_mri.inputs.chg_res_val = (input_node.inputs.resolution,
                                  input_node.inputs.resolution,
                                  input_node.inputs.resolution)
    workflow.connect(alter_mri_header, 'out_file', iso_mri, 'in_file')
    iso_histo = pe.Node(interface=niftyreg.RegTools(), name='iso_histo')
    iso_histo.inputs.chg_res_val = (input_node.inputs.resolution,
                                    input_node.inputs.resolution,
                                    input_node.inputs.resolution)
    workflow.connect(alter_histo_header, 'out_file', iso_histo, 'in_file')

    iso_histo_mask = pe.Node(interface=niftyreg.RegTools(),
                             name='iso_histo_mask')
    iso_histo_mask.inputs.chg_res_val = (input_node.inputs.resolution,
                                         input_node.inputs.resolution,
                                         input_node.inputs.resolution)
    iso_mri_mask = pe.Node(interface=niftyreg.RegTools(), name='iso_mri_mask')
    iso_mri_mask.inputs.chg_res_val = (input_node.inputs.resolution,
                                       input_node.inputs.resolution,
                                       input_node.inputs.resolution)

    # Generate matrices
    create_matrices = pe.Node(interface=niu.Function(
        function=generate_matrices,
        input_names=['histo_file', 'mri_file'],
        output_names=['matrix_files']),
                              name='create_matrices')
    workflow.connect(iso_histo, 'out_file', create_matrices, 'histo_file')
    workflow.connect(iso_mri, 'out_file', create_matrices, 'mri_file')

    # Run all the registrations
    rigid = pe.MapNode(interface=niftyreg.RegAladin(),
                       iterfield=['in_aff_file'],
                       name='rigid')
    rigid.inputs.rig_only_flag = True
    rigid.inputs.verbosity_off_flag = True
    if input_variables.nosym == True:
        rigid.inputs.nosym_flag = True
    workflow.connect(create_matrices, 'matrix_files', rigid, 'in_aff_file')
    workflow.connect(iso_histo, 'out_file', rigid, 'ref_file')
    workflow.connect(iso_mri, 'out_file', rigid, 'flo_file')
    if input_variables.input_histo_mask is not None:
        workflow.connect(input_node, 'input_histo_mask',
                         alter_histo_mask_header, 'in_file')
        workflow.connect(alter_histo_header, 'out_file', iso_histo_mask,
                         'in_file')
        workflow.connect(iso_histo_mask, 'out_file', rigid, 'rmask_file')
    if input_variables.input_mri_mask is not None:
        workflow.connect(input_node, 'input_mri_mask', alter_mri_mask_header,
                         'in_file')
        workflow.connect(alter_mri_header, 'out_file', iso_mri_mask, 'in_file')
        workflow.connect(iso_mri_mask, 'out_file', rigid, 'fmask_file')

    # Run all the similarity measures
    similarity = pe.MapNode(interface=niftyreg.RegMeasure(),
                            iterfield=['flo_file'],
                            name='similarity')
    workflow.connect(input_node, 'sim_measure', similarity, 'measure_type')
    workflow.connect(iso_histo, 'out_file', similarity, 'ref_file')
    workflow.connect(rigid, 'res_file', similarity, 'flo_file')

    # Display similarity measures
    disp_sim = pe.Node(interface=niu.Function(
        function=generate_plotsim,
        input_names=['measures', 'in_mat', 'out_mat'],
        output_names=['out_file']),
                       name='disp_sim')
    workflow.connect(similarity, 'out_file', disp_sim, 'measures')
    workflow.connect(create_matrices, 'matrix_files', disp_sim, 'in_mat')
    workflow.connect(rigid, 'aff_file', disp_sim, 'out_mat')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = workflow.base_dir
    workflow.connect(disp_sim, 'out_file', ds, '@plot')
    workflow.connect(alter_histo_header, 'out_file', ds, '@histo')
    workflow.connect(alter_mri_header, 'out_file', ds, '@mri')

    # output the graph if required
    if input_variables.graph is True:
        niftk.generate_graph(workflow=workflow)
        return

    # Run the workflow
    qsub_args = '-l h_rt=01:00:00 -l tmem=1.9G -l h_vmem=1.9G -l vf=1.9G -l s_stack=10240 -j y -b y -S /bin/csh -V'
    niftk.run_workflow(workflow=workflow, qsubargs=qsub_args)
Beispiel #13
0
def create_linear_gw_step(name="linear_gw_niftyreg",
                          demean=True,
                          linear_options_hash=None,
                          use_mask=False,
                          verbose=False):
    """
    Creates a workflow that performs linear co-registration of a set of images
    using RegAladin, producing an average image and a set of affine
    transformation matrices linking each of the floating images to the average.

    Inputs::

        inputspec.in_files - The input files to be registered
        inputspec.ref_file - The initial reference image that the input files
                              are registered to
        inputspec.rmask_file - Mask of the reference image
        inputspec.in_aff_files - Initial affine transformation files

    Outputs::

        outputspec.average_image - The average image
        outputspec.aff_files - The affine transformation files

    Optional arguments::

        linear_options_hash - An options dictionary containing a list of
                              parameters for RegAladin that take
        the same form as given in the interface (default None)
        demean - Selects whether to demean the transformation matrices when
                 performing the averaging (default True)
        initial_affines - Selects whether to iterate over initial affine
                          images, which we generally won't have (default False)

    Example
    -------

    >>> from nipype.workflows.smri.niftyreg import create_linear_gw_step
    >>> lgw = create_linear_gw_step('my_linear_coreg')  # doctest: +SKIP
    >>> lgw.inputs.inputspec.in_files = [
    ...     'file1.nii.gz', 'file2.nii.gz']  # doctest: +SKIP
    >>> lgw.inputs.inputspec.ref_file = ['ref.nii.gz']  # doctest: +SKIP
    >>> lgw.run()  # doctest: +SKIP

    """
    # Create the sub workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # We need to create an input node for the workflow
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_files', 'ref_file', 'rmask_file']),
        name='inputspec')

    if linear_options_hash is None:
        linear_options_hash = dict()

    # Rigidly register each of the images to the average
    lin_reg = pe.MapNode(interface=niftyreg.RegAladin(**linear_options_hash),
                         name="lin_reg", iterfield=['flo_file'])

    if verbose is False:
        lin_reg.inputs.verbosity_off_flag = True

    # Average the images
    ave_ims = pe.Node(interface=niftyreg.RegAverage(), name="ave_ims")

    # We have a new average image and the affine
    # transformations, which are returned as an output node.
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['average_image', 'trans_files']), name='outputspec')

    # Connect the inputs to the lin_reg node
    workflow.connect([
        (inputnode, lin_reg, [('ref_file', 'ref_file')]),
        (inputnode, lin_reg, [('in_files', 'flo_file')])
                     ])
    if use_mask:
        workflow.connect(inputnode, 'rmask_file', lin_reg, 'rmask_file')

    if demean:
        workflow.connect([
            (inputnode, ave_ims, [('ref_file', 'demean1_ref_file')]),
            (lin_reg, ave_ims, [('avg_output', 'warp_files')])
                     ])
    else:
        workflow.connect(lin_reg, 'res_file', ave_ims, 'avg_files')

    # Connect up the output node
    workflow.connect([
        (lin_reg, outputnode, [('aff_file', 'trans_files')]),
        (ave_ims, outputnode, [('out_file', 'average_image')])
                     ])

    return workflow
Beispiel #14
0
def create_cross_sectional_tbss_pipeline(in_files,
                                         output_dir,
                                         name='cross_sectional_tbss',
                                         skeleton_threshold=0.2,
                                         design_mat=None,
                                         design_con=None):
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Create the dtitk groupwise registration workflow
    groupwise_dtitk = create_dtitk_groupwise_workflow(in_files=in_files,
                                                      name="dtitk_groupwise",
                                                      rig_iteration=3,
                                                      aff_iteration=3,
                                                      nrr_iteration=6)

    # Create the average FA map
    mean_fa = pe.Node(interface=dtitk.TVtool(), name="mean_fa")
    workflow.connect(groupwise_dtitk, 'output_node.out_template', mean_fa,
                     'in_file')
    mean_fa.inputs.operation = 'fa'

    # Register the FMRIB58_FA_1mm.nii.gz atlas to the mean FA map
    reg_atlas = pe.Node(interface=niftyreg.RegAladin(), name='reg_atlas')
    workflow.connect(mean_fa, 'out_file', reg_atlas, 'ref_file')
    reg_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'FMRIB58_FA_1mm.nii.gz')

    # Apply the transformation to the lower cingulum image
    war_atlas = pe.Node(interface=niftyreg.RegResample(), name='war_atlas')
    workflow.connect(mean_fa, 'out_file', war_atlas, 'ref_file')
    war_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'LowerCingulum_1mm.nii.gz')
    workflow.connect(reg_atlas, 'aff_file', war_atlas, 'trans_file')
    war_atlas.inputs.inter_val = 'LIN'

    # Threshold the propagated lower cingulum
    thr_atlas = pe.Node(interface=niftyseg.BinaryMaths(), name='thr_atlas')
    workflow.connect(war_atlas, 'out_file', thr_atlas, 'in_file')
    thr_atlas.inputs.operation = 'thr'
    thr_atlas.inputs.operand_value = 0.5

    # Binarise the propagated lower cingulum
    bin_atlas = pe.Node(interface=niftyseg.UnaryMaths(), name='bin_atlas')
    workflow.connect(thr_atlas, 'out_file', bin_atlas, 'in_file')
    bin_atlas.inputs.operation = 'bin'

    # Create all the individual FA maps
    individual_fa = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_fa",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_fa,
                     'in_file')
    individual_fa.inputs.operation = 'fa'

    # Create all the individual MD maps
    individual_md = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_md",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_md,
                     'in_file')
    individual_md.inputs.operation = 'tr'

    # Create all the individual RD maps
    individual_rd = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_rd",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_rd,
                     'in_file')
    individual_rd.inputs.operation = 'rd'

    # Create all the individual RD maps
    individual_ad = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_ad",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_ad,
                     'in_file')
    individual_ad.inputs.operation = 'ad'

    # Combine all the warped FA images into a 4D image
    merged_4d_fa = pe.Node(interface=fsl.Merge(), name='merged_4d_fa')
    merged_4d_fa.inputs.dimension = 't'
    workflow.connect(individual_fa, 'out_file', merged_4d_fa, 'in_files')

    # Combine all the warped MD images into a 4D image
    merged_4d_md = pe.Node(interface=fsl.Merge(), name='merged_4d_md')
    merged_4d_md.inputs.dimension = 't'
    workflow.connect(individual_md, 'out_file', merged_4d_md, 'in_files')

    # Combine all the warped RD images into a 4D image
    merged_4d_rd = pe.Node(interface=fsl.Merge(), name='merged_4d_rd')
    merged_4d_rd.inputs.dimension = 't'
    workflow.connect(individual_rd, 'out_file', merged_4d_rd, 'in_files')

    # Combine all the warped AD images into a 4D image
    merged_4d_ad = pe.Node(interface=fsl.Merge(), name='merged_4d_ad')
    merged_4d_ad.inputs.dimension = 't'
    workflow.connect(individual_ad, 'out_file', merged_4d_ad, 'in_files')

    # Threshold the 4D FA image to 0
    merged_4d_fa_thresholded = pe.Node(interface=niftyseg.BinaryMaths(),
                                       name='merged_4d_fa_thresholded')
    merged_4d_fa_thresholded.inputs.operation = 'thr'
    merged_4d_fa_thresholded.inputs.operand_value = 0
    workflow.connect(merged_4d_fa, 'merged_file', merged_4d_fa_thresholded,
                     'in_file')

    # Extract the min value from the 4D FA image
    minimal_value_across_all_fa = pe.Node(interface=niftyseg.UnaryMaths(),
                                          name='minimal_value_across_all_fa')
    minimal_value_across_all_fa.inputs.operation = 'tmin'
    workflow.connect(merged_4d_fa_thresholded, 'out_file',
                     minimal_value_across_all_fa, 'in_file')

    # Create the mask image
    fa_mask = pe.Node(interface=niftyseg.UnaryMaths(), name='fa_mask')
    fa_mask.inputs.operation = 'bin'
    fa_mask.inputs.output_datatype = 'char'
    workflow.connect(minimal_value_across_all_fa, 'out_file', fa_mask,
                     'in_file')

    # Mask the mean FA image
    masked_mean_fa = pe.Node(interface=fsl.ApplyMask(), name='masked_mean_fa')
    workflow.connect(mean_fa, 'out_file', masked_mean_fa, 'in_file')
    workflow.connect(fa_mask, 'out_file', masked_mean_fa, 'mask_file')

    # Create the skeleton image
    skeleton = pe.Node(interface=fsl.TractSkeleton(), name='skeleton')
    skeleton.inputs.skeleton_file = True
    workflow.connect(masked_mean_fa, 'out_file', skeleton, 'in_file')

    # Threshold the skeleton image
    thresholded_skeleton = pe.Node(interface=niftyseg.BinaryMaths(),
                                   name='thresholded_skeleton')
    thresholded_skeleton.inputs.operation = 'thr'
    thresholded_skeleton.inputs.operand_value = skeleton_threshold
    workflow.connect(skeleton, 'skeleton_file', thresholded_skeleton,
                     'in_file')

    # Binarise the skeleton image
    binarised_skeleton = pe.Node(interface=niftyseg.UnaryMaths(),
                                 name='binarised_skeleton')
    binarised_skeleton.inputs.operation = 'bin'
    workflow.connect(thresholded_skeleton, 'out_file', binarised_skeleton,
                     'in_file')

    # Create skeleton distance map
    invert_mask1 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask1')
    invert_mask1.inputs.operation = 'mul'
    invert_mask1.inputs.operand_value = -1
    workflow.connect(fa_mask, 'out_file', invert_mask1, 'in_file')
    invert_mask2 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask2')
    invert_mask2.inputs.operation = 'add'
    invert_mask2.inputs.operand_value = 1
    workflow.connect(invert_mask1, 'out_file', invert_mask2, 'in_file')
    invert_mask3 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask3')
    invert_mask3.inputs.operation = 'add'
    workflow.connect(invert_mask2, 'out_file', invert_mask3, 'in_file')
    workflow.connect(binarised_skeleton, 'out_file', invert_mask3,
                     'operand_file')
    distance_map = pe.Node(interface=fsl.DistanceMap(), name='distance_map')
    workflow.connect(invert_mask3, 'out_file', distance_map, 'in_file')

    # Project the FA values onto the skeleton
    all_fa_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_fa_projected')
    all_fa_projected.inputs.threshold = skeleton_threshold
    all_fa_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_fa_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_fa_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_fa_projected,
                     'data_file')
    workflow.connect(bin_atlas, 'out_file', all_fa_projected,
                     'search_mask_file')

    # Project the MD values onto the skeleton
    all_md_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_md_projected')
    all_md_projected.inputs.threshold = skeleton_threshold
    all_md_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_md_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_md_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_md_projected,
                     'data_file')
    workflow.connect(merged_4d_md, 'merged_file', all_md_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_md_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_rd_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_rd_projected')
    all_rd_projected.inputs.threshold = skeleton_threshold
    all_rd_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_rd_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_rd_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_rd_projected,
                     'data_file')
    workflow.connect(merged_4d_rd, 'merged_file', all_rd_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_rd_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_ad_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_ad_projected')
    all_ad_projected.inputs.threshold = skeleton_threshold
    all_ad_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_ad_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_ad_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_ad_projected,
                     'data_file')
    workflow.connect(merged_4d_ad, 'merged_file', all_ad_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_ad_projected,
                     'search_mask_file')

    # Create an output node
    output_node = pe.Node(interface=niu.IdentityInterface(fields=[
        'mean_fa', 'all_fa_skeletonised', 'all_md_skeletonised',
        'all_rd_skeletonised', 'all_ad_skeletonised', 'skeleton',
        'skeleton_bin', 't_contrast_raw_stat', 't_contrast_uncorrected_pvalue',
        't_contrast_corrected_pvalue'
    ]),
                          name='output_node')

    # Connect the workflow to the output node
    workflow.connect(masked_mean_fa, 'out_file', output_node, 'mean_fa')
    workflow.connect(all_fa_projected, 'projected_data', output_node,
                     'all_fa_skeletonised')
    workflow.connect(all_md_projected, 'projected_data', output_node,
                     'all_md_skeletonised')
    workflow.connect(all_rd_projected, 'projected_data', output_node,
                     'all_rd_skeletonised')
    workflow.connect(all_ad_projected, 'projected_data', output_node,
                     'all_ad_skeletonised')
    workflow.connect(skeleton, 'skeleton_file', output_node, 'skeleton')
    workflow.connect(binarised_skeleton, 'out_file', output_node,
                     'skeleton_bin')

    # Run randomise if required and connect its output to the output node
    if design_mat is not None and design_con is not None:
        randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
        randomise.inputs.base_name = 'stats_tbss'
        randomise.inputs.tfce2D = True
        randomise.inputs.num_perm = 5000
        workflow.connect(all_fa_projected, 'projected_data', randomise,
                         'in_file')
        randomise.inputs.design_mat = design_mat
        randomise.inputs.design_con = design_con
        workflow.connect(binarised_skeleton, 'out_file', randomise, 'mask')

        workflow.connect(randomise, 'tstat_files', output_node,
                         't_contrast_raw_stat')
        workflow.connect(randomise, 't_p_files', output_node,
                         't_contrast_uncorrected_pvalue')
        workflow.connect(randomise, 't_corrected_p_files', output_node,
                         't_contrast_corrected_pvalue')

    # Create nodes to rename the outputs
    mean_fa_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa', keep_ext=True),
                              name='mean_fa_renamer')
    workflow.connect(output_node, 'mean_fa', mean_fa_renamer, 'in_file')

    mean_sk_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton', keep_ext=True),
                              name='mean_sk_renamer')
    workflow.connect(output_node, 'skeleton', mean_sk_renamer, 'in_file')

    bin_ske_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton_mask', keep_ext=True),
                              name='bin_ske_renamer')
    workflow.connect(output_node, 'skeleton_bin', bin_ske_renamer, 'in_file')

    fa_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_fa_skeletonised', keep_ext=True),
                              name='fa_skel_renamer')
    workflow.connect(output_node, 'all_fa_skeletonised', fa_skel_renamer,
                     'in_file')
    md_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_md_skeletonised', keep_ext=True),
                              name='md_skel_renamer')
    workflow.connect(output_node, 'all_md_skeletonised', md_skel_renamer,
                     'in_file')
    rd_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_rd_skeletonised', keep_ext=True),
                              name='rd_skel_renamer')
    workflow.connect(output_node, 'all_rd_skeletonised', rd_skel_renamer,
                     'in_file')
    ad_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_ad_skeletonised', keep_ext=True),
                              name='ad_skel_renamer')
    workflow.connect(output_node, 'all_ad_skeletonised', ad_skel_renamer,
                     'in_file')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    # Connect the data sink
    workflow.connect(mean_fa_renamer, 'out_file', ds, '@mean_fa')
    workflow.connect(mean_sk_renamer, 'out_file', ds, '@skel_fa')
    workflow.connect(bin_ske_renamer, 'out_file', ds, '@bkel_fa')
    workflow.connect(fa_skel_renamer, 'out_file', ds, '@all_fa')
    workflow.connect(md_skel_renamer, 'out_file', ds, '@all_md')
    workflow.connect(rd_skel_renamer, 'out_file', ds, '@all_rd')
    workflow.connect(ad_skel_renamer, 'out_file', ds, '@all_ad')

    if design_mat is not None and design_con is not None:
        workflow.connect(output_node, 't_contrast_raw_stat', ds,
                         '@t_contrast_raw_stat')
        workflow.connect(output_node, 't_contrast_uncorrected_pvalue', ds,
                         '@t_contrast_uncorrected_pvalue')
        workflow.connect(output_node, 't_contrast_corrected_pvalue', ds,
                         '@t_contrast_corrected_pvalue')

    return workflow
Beispiel #15
0
def preprocessing_input_pipeline(name='preprocessing_inputs_pipeline',
                                 number_of_affine_iterations=7,
                                 ref_file=mni_template,
                                 ref_mask=mni_template_mask):

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    input_node = pe.Node(interface=niu.IdentityInterface(
        fields=['in_file', 'in_images', 'in_affines']),
                         name='input_node')
    '''
    *****************************************************************************
    First step: Cropping inputs according to 10 voxels surrounding the skull
    *****************************************************************************
    '''
    register_mni_to_image = pe.Node(interface=niftyreg.RegAladin(),
                                    name='register_mni_to_image')
    register_mni_to_image.inputs.flo_file = mni_template
    resample_mni_mask_to_image = pe.Node(interface=niftyreg.RegResample(),
                                         name='resample_mni_mask_to_image')
    resample_mni_mask_to_image.inputs.inter_val = 'NN'
    resample_mni_mask_to_image.inputs.flo_file = mni_template_mask

    dilate_image_mask = pe.Node(interface=niftyseg.BinaryMaths(),
                                name='dilate_image_mask')
    dilate_image_mask.inputs.operation = 'dil'
    dilate_image_mask.inputs.operand_value = 10

    crop_image_with_mask = pe.Node(interface=niftk.CropImage(),
                                   name='crop_image_with_mask')

    resample_image_mask_to_cropped_image = pe.Node(
        interface=niftyreg.RegResample(),
        name='resample_image_mask_to_cropped_image')
    resample_image_mask_to_cropped_image.inputs.inter_val = 'NN'
    resample_image_mask_to_cropped_image.inputs.flo_file = mni_template_mask

    bias_correction = pe.Node(interface=niftk.N4BiasCorrection(),
                              name='bias_correction')
    bias_correction.inputs.in_downsampling = 2
    '''
    *****************************************************************************
    Second step: Calculate the cumulated input affine transformations
    *****************************************************************************
    '''
    register_mni_to_cropped_image = pe.Node(
        interface=niftyreg.RegAladin(), name='register_mni_to_cropped_image')
    register_mni_to_cropped_image.inputs.ref_file = mni_template

    invert_affine_transformations = pe.Node(
        niftyreg.RegTransform(),
        name='invert_affine_transformations',
        iterfield=['inv_aff_input'])
    compose_affine_transformations = pe.MapNode(
        niftyreg.RegTransform(),
        name='compose_affine_transformations',
        iterfield=['comp_input2'])
    '''
    *****************************************************************************
    Third step: Non linear registration of all pairs
    *****************************************************************************
    '''
    nonlinear_registration = pe.MapNode(interface=niftyreg.RegF3D(),
                                        name='nonlinear_registration',
                                        iterfield=['flo_file', 'aff_file'])
    nonlinear_registration.inputs.vel_flag = True
    nonlinear_registration.inputs.lncc_val = -5
    nonlinear_registration.inputs.maxit_val = 150
    nonlinear_registration.inputs.be_val = 0.025
    '''
    *****************************************************************************
    First step: Cropping inputs according to 10 voxels surrounding the skull
    *****************************************************************************
    '''
    workflow.connect(input_node, 'in_file', register_mni_to_image, 'ref_file')
    workflow.connect(input_node, 'in_file', resample_mni_mask_to_image,
                     'ref_file')
    workflow.connect(register_mni_to_image, 'aff_file',
                     resample_mni_mask_to_image, 'aff_file')
    workflow.connect(resample_mni_mask_to_image, 'res_file', dilate_image_mask,
                     'in_file')
    workflow.connect(input_node, 'in_file', crop_image_with_mask, 'in_file')
    workflow.connect(dilate_image_mask, 'out_file', crop_image_with_mask,
                     'mask_file')
    workflow.connect(crop_image_with_mask, 'out_file',
                     resample_image_mask_to_cropped_image, 'ref_file')
    workflow.connect(register_mni_to_image, 'aff_file',
                     resample_image_mask_to_cropped_image, 'aff_file')
    workflow.connect(crop_image_with_mask, 'out_file', bias_correction,
                     'in_file')
    workflow.connect(resample_image_mask_to_cropped_image, 'res_file',
                     bias_correction, 'mask_file')
    '''
    *****************************************************************************
    Fourth step: Calculate the cumulated input affine transformations
    *****************************************************************************
    '''
    workflow.connect(bias_correction, 'out_file',
                     register_mni_to_cropped_image, 'flo_file')
    workflow.connect(register_mni_to_cropped_image, 'aff_file',
                     invert_affine_transformations, 'inv_aff_input')
    workflow.connect(invert_affine_transformations, 'out_file',
                     compose_affine_transformations, 'comp_input')
    workflow.connect(input_node, 'in_affines', compose_affine_transformations,
                     'comp_input2')
    '''
    *****************************************************************************
    Fith step: Non linear registration of all pairs
    *****************************************************************************
    '''

    workflow.connect(bias_correction, 'out_file', nonlinear_registration,
                     'ref_file')
    workflow.connect(input_node, 'in_images', nonlinear_registration,
                     'flo_file')
    workflow.connect(compose_affine_transformations, 'out_file',
                     nonlinear_registration, 'aff_file')
    '''
    *****************************************************************************
    Connect the outputs
    *****************************************************************************
    '''
    output_node = pe.Node(interface=niu.IdentityInterface(
        fields=['out_file', 'out_mask', 'out_aff', 'out_cpps', 'out_invcpps']),
                          name='output_node')
    workflow.connect(bias_correction, 'out_file', output_node, 'out_file')
    workflow.connect(resample_image_mask_to_cropped_image, 'res_file',
                     output_node, 'out_mask')
    workflow.connect(register_mni_to_cropped_image, 'aff_file', output_node,
                     'out_aff')
    workflow.connect(nonlinear_registration, 'cpp_file', output_node,
                     'out_cpps')
    workflow.connect(nonlinear_registration, 'invcpp_file', output_node,
                     'out_invcpps')

    return workflow
Beispiel #16
0
    ]]
    dg.inputs.user = args.username
    dg.inputs.pwd = args.password
    dg.inputs.server = args.server
    dg.inputs.project = args.project
    dg.inputs.subject = subject

    dcm2nii = pe.Node(interface=mricron.Dcm2nii(), name='dcm2nii')
    dcm2nii.inputs.args = '-d n'
    dcm2nii.inputs.gzip_output = True
    dcm2nii.inputs.anonymize = False
    dcm2nii.inputs.reorient = True
    dcm2nii.inputs.reorient_and_crop = False
    #'/project/ADNI/subjects/0002/experiments/*/assessors/BET_MASK/resources/NIFTI

    mni_to_input = pe.Node(interface=niftyreg.RegAladin(), name='mni_to_input')
    mni_to_input.inputs.flo_file = mni_template

    mask_resample = pe.Node(interface=niftyreg.RegResample(),
                            name='mask_resample')
    mask_resample.inputs.inter_val = 'NN'
    mask_resample.inputs.flo_file = mni_template_mask

    dsx = pe.Node(interface=nio.XNATSink(), name='dsx')
    dsx.inputs.user = args.username
    dsx.inputs.pwd = args.password
    dsx.inputs.server = args.server
    dsx.inputs.project_id = args.project
    dsx.inputs.subject_id = subject
    dsx.inputs.experiment_id = first_mr_experiment.label()
    dsx.inputs.assessor_id = 'BRAIN_MASK'
Beispiel #17
0
def create_compute_suvr_pipeline(input_pet,
                                 input_mri,
                                 input_par,
                                 erode_ref,
                                 output_dir,
                                 name='compute_suvr',
                                 norm_region='cereb'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Merge all the parcelation into a binary image
    merge_roi = pe.MapNode(interface=niftyseg.UnaryMaths(),
                           name='merge_roi',
                           iterfield=['in_file'])
    merge_roi.inputs.in_file = input_par
    merge_roi.inputs.operation = 'bin'
    dilation = pe.MapNode(interface=niftyseg.BinaryMathsInteger(),
                          name='dilation',
                          iterfield=['in_file'])
    workflow.connect(merge_roi, 'out_file', dilation, 'in_file')
    dilation.inputs.operation = 'dil'
    dilation.inputs.operand_value = 5
    # generate a mask for the pet image
    mask_pet = create_mask_from_functional()
    mask_pet.inputs.input_node.in_files = input_pet

    # The structural image is first register to the pet image
    rigid_reg = pe.MapNode(interface=niftyreg.RegAladin(),
                           name='rigid_reg',
                           iterfield=['ref_file',
                                      'flo_file',
                                      'rmask_file',
                                      'fmask_file'])
    rigid_reg.inputs.rig_only_flag = True
    rigid_reg.inputs.verbosity_off_flag = True
    rigid_reg.inputs.v_val = 80
    rigid_reg.inputs.nosym_flag = False
    rigid_reg.inputs.ref_file = input_pet
    rigid_reg.inputs.flo_file = input_mri
    workflow.connect(mask_pet, 'output_node.mask_files', rigid_reg, 'rmask_file')
    workflow.connect(dilation, 'out_file', rigid_reg, 'fmask_file')
    # Propagate the ROIs into the pet space
    resampling = pe.MapNode(interface=niftyreg.RegResample(),
                            name='resampling',
                            iterfield=['ref_file', 'flo_file', 'trans_file'])
    resampling.inputs.inter_val = 'NN'
    resampling.inputs.verbosity_off_flag = True
    resampling.inputs.ref_file = input_pet
    resampling.inputs.flo_file = input_par
    workflow.connect(rigid_reg, 'aff_file', resampling, 'trans_file')
    # The PET image is normalised
    normalisation_workflow = create_regional_normalisation_pipeline(erode_ref=erode_ref)
    normalisation_workflow.inputs.input_node.input_files = input_pet
    workflow.connect(resampling, 'out_file', normalisation_workflow, 'input_node.input_rois')
    if norm_region == 'pons':
        roi_indices = [35]
    elif norm_region == 'gm_cereb':
        roi_indices = [39, 40,72,73,74]
    elif norm_region == 'wm_subcort':
        roi_indices = [45, 46]
    else:  # full cerebellum
        roi_indices = [39, 40, 41, 42, 72, 73, 74]
    normalisation_workflow.inputs.input_node.label_indices = roi_indices
    # The regional uptake are computed
    regional_average_workflow = create_regional_average_pipeline(output_dir=output_dir, neuromorphometrics=True)
    workflow.connect(normalisation_workflow, 'output_node.out_files',
                     regional_average_workflow, 'input_node.in_files')
    workflow.connect(resampling, 'out_file',
                     regional_average_workflow, 'input_node.in_rois')
    # Create an output node
    output_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['norm_files',
                    'suvr_files',
                    'tran_files',
                    'out_par_files']),
        name='output_node')
    workflow.connect(normalisation_workflow, 'output_node.out_files',
                     output_node, 'norm_files')
    workflow.connect(regional_average_workflow, 'output_node.out_files', output_node, 'suvr_files')
    workflow.connect(rigid_reg, 'aff_file', output_node, 'tran_files')
    workflow.connect(resampling, 'out_file', output_node, 'out_par_files')


    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = output_dir
    workflow.connect(output_node, 'norm_files', ds, '@norm_files')
    workflow.connect(output_node, 'tran_files', ds, '@tran_files')

    # Return the created workflow
    return workflow
def create_asl_processing_workflow(in_inversion_recovery_file,
                                   in_asl_file,
                                   output_dir,
                                   in_t1_file=None,
                                   name='asl_processing_workflow'):

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    subject_id = split_filename(os.path.basename(in_asl_file))[1]

    ir_splitter = pe.Node(interface=fsl.Split(
        dimension='t',
        out_base_name='out_',
        in_file=in_inversion_recovery_file),
                          name='ir_splitter')
    ir_selector = pe.Node(interface=niu.Select(index=[0, 2, 4]),
                          name='ir_selector')
    workflow.connect(ir_splitter, 'out_files', ir_selector, 'inlist')
    ir_merger = pe.Node(interface=fsl.Merge(dimension='t'), name='ir_merger')
    workflow.connect(ir_selector, 'out', ir_merger, 'in_files')
    fitqt1 = pe.Node(interface=niftyfit.FitQt1(TIs=[4, 2, 1], SR=True),
                     name='fitqt1')
    workflow.connect(ir_merger, 'merged_file', fitqt1, 'source_file')
    extract_ir_0 = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='tp', operand_value=0, in_file=in_inversion_recovery_file),
                           name='extract_ir_0')
    ir_thresolder = pe.Node(interface=fsl.Threshold(thresh=250),
                            name='ir_thresolder')
    workflow.connect(extract_ir_0, 'out_file', ir_thresolder, 'in_file')
    create_mask = pe.Node(interface=fsl.UnaryMaths(operation='bin'),
                          name='create_mask')
    workflow.connect(ir_thresolder, 'out_file', create_mask, 'in_file')

    model_fitting = pe.Node(niftyfit.FitAsl(source_file=in_asl_file,
                                            pcasl=True,
                                            PLD=1800,
                                            LDD=1800,
                                            eff=0.614,
                                            mul=0.1),
                            name='model_fitting')
    workflow.connect(fitqt1, 'm0map', model_fitting, 'm0map')
    workflow.connect(create_mask, 'out_file', model_fitting, 'mask')

    t1_to_asl_registration = pe.Node(niftyreg.RegAladin(rig_only_flag=True),
                                     name='t1_to_asl_registration')
    m0_resampling = pe.Node(niftyreg.RegResample(inter_val='LIN'),
                            name='m0_resampling')
    mc_resampling = pe.Node(niftyreg.RegResample(inter_val='LIN'),
                            name='mc_resampling')
    t1_resampling = pe.Node(niftyreg.RegResample(inter_val='LIN'),
                            name='t1_resampling')
    cbf_resampling = pe.Node(niftyreg.RegResample(inter_val='LIN'),
                             name='cbf_resampling')

    if in_t1_file:
        t1_to_asl_registration.inputs.flo_file = in_asl_file
        t1_to_asl_registration.inputs.ref_file = in_t1_file
        m0_resampling.inputs.ref_file = in_t1_file
        mc_resampling.inputs.ref_file = in_t1_file
        t1_resampling.inputs.ref_file = in_t1_file
        cbf_resampling.inputs.ref_file = in_t1_file
        workflow.connect(fitqt1, 'm0map', m0_resampling, 'flo_file')
        workflow.connect(fitqt1, 'mcmap', mc_resampling, 'flo_file')
        workflow.connect(fitqt1, 't1map', t1_resampling, 'flo_file')
        workflow.connect(model_fitting, 'cbf_file', cbf_resampling, 'flo_file')
        workflow.connect(t1_to_asl_registration, 'aff_file', m0_resampling,
                         'trans_file')
        workflow.connect(t1_to_asl_registration, 'aff_file', mc_resampling,
                         'trans_file')
        workflow.connect(t1_to_asl_registration, 'aff_file', t1_resampling,
                         'trans_file')
        workflow.connect(t1_to_asl_registration, 'aff_file', cbf_resampling,
                         'trans_file')

    maskrenamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                               '_mask',
                                               keep_ext=True),
                          name='maskrenamer')
    m0renamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                             '_m0map',
                                             keep_ext=True),
                        name='m0renamer')
    mcrenamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                             '_mcmap',
                                             keep_ext=True),
                        name='mcrenamer')
    t1renamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                             '_t1map',
                                             keep_ext=True),
                        name='t1renamer')
    workflow.connect(create_mask, 'out_file', maskrenamer, 'in_file')
    if in_t1_file:
        workflow.connect(m0_resampling, 'out_file', m0renamer, 'in_file')
        workflow.connect(mc_resampling, 'out_file', mcrenamer, 'in_file')
        workflow.connect(t1_resampling, 'out_file', t1renamer, 'in_file')
    else:
        workflow.connect(fitqt1, 'm0map', m0renamer, 'in_file')
        workflow.connect(fitqt1, 'mcmap', mcrenamer, 'in_file')
        workflow.connect(fitqt1, 't1map', t1renamer, 'in_file')

    ds = pe.Node(nio.DataSink(parameterization=False,
                              base_directory=output_dir),
                 name='ds')
    workflow.connect(maskrenamer, 'out_file', ds, '@mask_file')
    workflow.connect(m0renamer, 'out_file', ds, '@m0_file')
    workflow.connect(mcrenamer, 'out_file', ds, '@mc_file')
    workflow.connect(t1renamer, 'out_file', ds, '@t1_file')
    if in_t1_file:
        workflow.connect(cbf_resampling, 'out_file', ds, '@cbf_file')
    else:
        workflow.connect(model_fitting, 'cbf_file', ds, '@cbf_file')
    workflow.connect(model_fitting, 'error_file', ds, '@err_file')

    return workflow