Пример #1
0
 def _run_interface(self, runtime):
     mni_template = os.path.join(os.environ['FSLDIR'], 'data', 'standard',
                                 'MNI152_T1_2mm.nii.gz')
     mni_template_mask = os.path.join(os.environ['FSLDIR'], 'data',
                                      'standard',
                                      'MNI152_T1_2mm_brain_mask.nii.gz')
     in_file = self.inputs.in_file
     mni2input = niftyreg.RegAladin()
     mni2input.inputs.verbosity_off_flag = True
     mni2input.inputs.ref_file = in_file
     mni2input.inputs.flo_file = mni_template
     mni2input_res = mni2input.run()
     mask_resample = niftyreg.RegResample(inter_val='NN')
     if self.inputs.use_nrr:
         mni2input_nrr = niftyreg.RegF3D()
         mni2input_nrr.inputs.verbosity_off_flag = True
         mni2input_nrr.inputs.ref_file = in_file
         mni2input_nrr.inputs.flo_file = mni_template
         mni2input_nrr.inputs.aff_file = mni2input_res.outputs.aff_file
         mni2input_nrr.inputs.vel_flag = True
         mni2input_nrr_res = mni2input_nrr.run()
         mask_resample.inputs.trans_file = mni2input_nrr_res.outputs.cpp_file
     else:
         mask_resample.inputs.trans_file = mni2input_res.outputs.aff_file
     mask_resample.inputs.ref_file = in_file
     mask_resample.inputs.flo_file = mni_template_mask
     mask_resample_res = mask_resample.run()
     fill_mask = niftyseg.UnaryMaths(operation='fill')
     fill_mask.inputs.in_file = mask_resample_res.outputs.out_file
     fill_mask.run()
     return runtime
Пример #2
0
def create_mask_from_functional():
    workflow = pe.Workflow(name='mask_func')
    workflow.base_dir = os.getcwd()
    workflow.base_output_dir = 'mask_func'
    # Create all the required nodes
    input_node = pe.Node(interface=niu.IdentityInterface(fields=['in_files']),
                         name='input_node')
    otsu_filter = pe.MapNode(interface=niftyseg.UnaryMaths(),
                             name='otsu_filter',
                             iterfield=['in_file'])
    erosion_filter = pe.MapNode(interface=niftyseg.BinaryMathsInteger(),
                                name='erosion_filter',
                                iterfield=['in_file'])
    lconcomp_filter = pe.MapNode(interface=niftyseg.UnaryMaths(),
                                 name='lconcomp_filter',
                                 iterfield=['in_file'])
    dilation_filter = pe.MapNode(interface=niftyseg.BinaryMathsInteger(),
                                 name='dilation_filter',
                                 iterfield=['in_file'])
    fill_filter = pe.MapNode(interface=niftyseg.UnaryMaths(),
                             name='fill_filter',
                             iterfield=['in_file'])
    output_node = pe.Node(interface=niu.IdentityInterface(fields=['mask_files']),
                          name='output_node')
    # Define the node options
    otsu_filter.inputs.operation = 'otsu'
    erosion_filter.inputs.operation = 'ero'
    erosion_filter.inputs.operand_value = 1
    lconcomp_filter.inputs.operation = 'lconcomp'
    dilation_filter.inputs.operation = 'dil'
    dilation_filter.inputs.operand_value = 5
    fill_filter.inputs.operation = 'fill'
    fill_filter.inputs.output_datatype = 'char'
    # Create the connections
    workflow.connect(input_node, 'in_files', otsu_filter, 'in_file')
    workflow.connect(otsu_filter, 'out_file', erosion_filter, 'in_file')
    workflow.connect(erosion_filter, 'out_file', lconcomp_filter, 'in_file')
    workflow.connect(lconcomp_filter, 'out_file', dilation_filter, 'in_file')
    workflow.connect(dilation_filter, 'out_file', fill_filter, 'in_file')
    workflow.connect(fill_filter, 'out_file', output_node, 'mask_files')

    return workflow
Пример #3
0
def create_compute_suvr_pipeline(input_pet,
                                 input_mri,
                                 input_par,
                                 erode_ref,
                                 output_dir,
                                 name='compute_suvr',
                                 norm_region='cereb'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Merge all the parcelation into a binary image
    merge_roi = pe.MapNode(interface=niftyseg.UnaryMaths(),
                           name='merge_roi',
                           iterfield=['in_file'])
    merge_roi.inputs.in_file = input_par
    merge_roi.inputs.operation = 'bin'
    dilation = pe.MapNode(interface=niftyseg.BinaryMathsInteger(),
                          name='dilation',
                          iterfield=['in_file'])
    workflow.connect(merge_roi, 'out_file', dilation, 'in_file')
    dilation.inputs.operation = 'dil'
    dilation.inputs.operand_value = 5
    # generate a mask for the pet image
    mask_pet = create_mask_from_functional()
    mask_pet.inputs.input_node.in_files = input_pet

    # The structural image is first register to the pet image
    rigid_reg = pe.MapNode(interface=niftyreg.RegAladin(),
                           name='rigid_reg',
                           iterfield=['ref_file',
                                      'flo_file',
                                      'rmask_file',
                                      'fmask_file'])
    rigid_reg.inputs.rig_only_flag = True
    rigid_reg.inputs.verbosity_off_flag = True
    rigid_reg.inputs.v_val = 80
    rigid_reg.inputs.nosym_flag = False
    rigid_reg.inputs.ref_file = input_pet
    rigid_reg.inputs.flo_file = input_mri
    workflow.connect(mask_pet, 'output_node.mask_files', rigid_reg, 'rmask_file')
    workflow.connect(dilation, 'out_file', rigid_reg, 'fmask_file')
    # Propagate the ROIs into the pet space
    resampling = pe.MapNode(interface=niftyreg.RegResample(),
                            name='resampling',
                            iterfield=['ref_file', 'flo_file', 'trans_file'])
    resampling.inputs.inter_val = 'NN'
    resampling.inputs.verbosity_off_flag = True
    resampling.inputs.ref_file = input_pet
    resampling.inputs.flo_file = input_par
    workflow.connect(rigid_reg, 'aff_file', resampling, 'trans_file')
    # The PET image is normalised
    normalisation_workflow = create_regional_normalisation_pipeline(erode_ref=erode_ref)
    normalisation_workflow.inputs.input_node.input_files = input_pet
    workflow.connect(resampling, 'out_file', normalisation_workflow, 'input_node.input_rois')
    if norm_region == 'pons':
        roi_indices = [35]
    elif norm_region == 'gm_cereb':
        roi_indices = [39, 40,72,73,74]
    elif norm_region == 'wm_subcort':
        roi_indices = [45, 46]
    else:  # full cerebellum
        roi_indices = [39, 40, 41, 42, 72, 73, 74]
    normalisation_workflow.inputs.input_node.label_indices = roi_indices
    # The regional uptake are computed
    regional_average_workflow = create_regional_average_pipeline(output_dir=output_dir, neuromorphometrics=True)
    workflow.connect(normalisation_workflow, 'output_node.out_files',
                     regional_average_workflow, 'input_node.in_files')
    workflow.connect(resampling, 'out_file',
                     regional_average_workflow, 'input_node.in_rois')
    # Create an output node
    output_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['norm_files',
                    'suvr_files',
                    'tran_files',
                    'out_par_files']),
        name='output_node')
    workflow.connect(normalisation_workflow, 'output_node.out_files',
                     output_node, 'norm_files')
    workflow.connect(regional_average_workflow, 'output_node.out_files', output_node, 'suvr_files')
    workflow.connect(rigid_reg, 'aff_file', output_node, 'tran_files')
    workflow.connect(resampling, 'out_file', output_node, 'out_par_files')


    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = output_dir
    workflow.connect(output_node, 'norm_files', ds, '@norm_files')
    workflow.connect(output_node, 'tran_files', ds, '@tran_files')

    # Return the created workflow
    return workflow
Пример #4
0
def create_binary_to_meshes(label,
                            name='gw_binary_to_meshes',
                            reduction_rate=0.3,
                            operand_value=1):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input node
    input_node = pe.Node(niu.IdentityInterface(fields=[
        'input_images', 'input_parcellations', 'input_reference',
        'trans_files', 'ref_file'
    ]),
                         name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(fields=['output_meshes']),
                          name='output_node')

    # Extract the relevant label from the GIF parcellation
    extract_label = pe.MapNode(interface=MergeLabels(),
                               iterfield=['in_file'],
                               name='extract_label')
    extract_label.inputs.roi_list = label
    workflow.connect(input_node, 'input_parcellations', extract_label,
                     'in_file')

    # Removing parasite segmentation: Erosion.
    erode_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='ero', operand_value=operand_value),
                                iterfield=['in_file'],
                                name='erode_binaries')
    workflow.connect(extract_label, 'out_file', erode_binaries, 'in_file')

    # Removing parasite segmentation: Dilatation.
    dilate_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=operand_value),
                                 iterfield=['in_file'],
                                 name='dilate_binaries')
    workflow.connect(erode_binaries, 'out_file', dilate_binaries, 'in_file')

    # Apply the relevant transformations to the roi
    apply_affine = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN'),
                              iterfield=['flo_file', 'trans_file'],
                              name='apply_affine')
    workflow.connect(input_node, 'trans_files', apply_affine, 'trans_file')
    workflow.connect(input_node, 'ref_file', apply_affine, 'ref_file')
    workflow.connect(dilate_binaries, 'out_file', apply_affine, 'flo_file')

    # compute the large ROI that correspond to the union of all warped label
    extract_union_roi = pe.Node(interface=niftyreg.RegAverage(),
                                name='extract_union_roi')
    workflow.connect(apply_affine, 'out_file', extract_union_roi, 'avg_files')

    # Binarise the average ROI
    binarise_roi = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'),
                           name='binarise_roi')
    workflow.connect(extract_union_roi, 'out_file', binarise_roi, 'in_file')

    # Dilation of the binarise union ROI
    dilate_roi = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=4),
                         name='dilate_roi')
    workflow.connect(binarise_roi, 'out_file', dilate_roi, 'in_file')

    # Apply the transformations
    apply_rigid_refinement = pe.MapNode(interface=niftyreg.RegAladin(
        rig_only_flag=True, ln_val=1),
                                        iterfield=['flo_file', 'in_aff_file'],
                                        name='apply_rigid_refinement')
    workflow.connect(input_node, 'input_images', apply_rigid_refinement,
                     'flo_file')
    workflow.connect(input_node, 'ref_file', apply_rigid_refinement,
                     'ref_file')
    workflow.connect(input_node, 'trans_files', apply_rigid_refinement,
                     'in_aff_file')
    workflow.connect(dilate_roi, 'out_file', apply_rigid_refinement,
                     'rmask_file')

    # Extract the mesh corresponding to the label
    final_resampling = pe.MapNode(
        interface=niftyreg.RegResample(inter_val='NN'),
        iterfield=['flo_file', 'trans_file'],
        name='final_resampling')
    workflow.connect(apply_rigid_refinement, 'aff_file', final_resampling,
                     'trans_file')
    workflow.connect(input_node, 'ref_file', final_resampling, 'ref_file')
    workflow.connect(dilate_binaries, 'out_file', final_resampling, 'flo_file')

    # Extract the mesh corresponding to the label
    extract_mesh = pe.MapNode(
        interface=Image2VtkMesh(in_reductionRate=reduction_rate),
        iterfield=['in_file'],
        name='extract_mesh')
    workflow.connect(final_resampling, 'out_file', extract_mesh, 'in_file')
    # workflow.connect(apply_rigid_refinement, 'aff_file', extract_mesh, 'matrix_file')

    # Create a rename for the average image
    groupwise_renamer = pe.Node(interface=niu.Rename(format_string='atlas',
                                                     keep_ext=True),
                                name='groupwise_renamer')
    workflow.connect(input_node, 'ref_file', groupwise_renamer, 'in_file')

    workflow.connect(extract_mesh, 'out_file', output_node, 'output_meshes')
    return workflow
def create_steps_propagation_pipeline(name='steps_propagation',
                                      aligned_templates=False):

    workflow = pe.Workflow(name=name)

    # Create an input node
    input_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['in_file',
                    'database_file']),
        name='input_node')

    extract_db_info = pe.Node(interface=niu.Function(input_names=['in_db_file'], output_names=['input_template_images',
                                                                                               'input_template_labels'],
                                                     function=extract_db_info_function),
                              name='extract_db_info')
    workflow.connect(input_node, 'database_file', extract_db_info, 'in_db_file')

    # All the template images are affinely registered to the target image
    current_aladin = pe.MapNode(interface=niftyreg.RegAladin(verbosity_off_flag=True),
                                name='aladin',
                                iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_aladin, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_images', current_aladin, 'flo_file')

    # Compute the affine TLS if required
    current_robust_affine = None
    if aligned_templates is True:
        current_robust_affine = pe.Node(interface=niftyreg.RegAverage(), name='robust_affine')
        workflow.connect(current_aladin, 'aff_file', current_robust_affine, 'avg_lts_files')
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_aff_prop, 'trans_file')
    else:
        current_aff_prop = pe.MapNode(interface=niftyreg.RegResample(verbosity_off_flag=True, inter_val='NN'),
                                      name='resample_aff',
                                      iterfield=['flo_file',
                                                 'trans_file'])
        workflow.connect(current_aladin, 'aff_file', current_aff_prop, 'trans_file')
    workflow.connect(input_node, 'in_file', current_aff_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_aff_prop, 'flo_file')

    # Merge all the affine parcellation into one 4D
    current_aff_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_aff_prop')
    workflow.connect(current_aff_prop, 'out_file', current_aff_prop_merge, 'in_files')

    # Combine all the propagated parcellation into a single image
    current_aff_prop_max = pe.Node(interface=MaxImage(dimension='T'), name='max_aff')
    workflow.connect(current_aff_prop_merge, 'merged_file', current_aff_prop_max, 'in_file')

    # Binarise the obtained mask
    current_aff_prop_bin = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'), name='bin_aff')
    workflow.connect(current_aff_prop_max, 'out_file', current_aff_prop_bin, 'in_file')

    # Dilate the obtained mask
    current_aff_prop_dil = pe.Node(interface=niftyseg.BinaryMathsInteger(operation='dil', operand_value=10),
                                   name='dil_aff')
    workflow.connect(current_aff_prop_bin, 'out_file', current_aff_prop_dil, 'in_file')

    # Fill the obtained mask
    current_aff_prop_fill = pe.Node(interface=niftyseg.UnaryMaths(operation='fill'), name='fill_aff')
    workflow.connect(current_aff_prop_dil, 'out_file', current_aff_prop_fill, 'in_file')

    # Crop the target image to speed up the process
    current_crop_target = pe.Node(interface=CropImage(), name='crop_target')
    workflow.connect(input_node, 'in_file', current_crop_target, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_target, 'mask_file')

    # Crop the mask image to speed up the process
    current_crop_mask = pe.Node(interface=CropImage(), name='crop_mask')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'in_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_crop_mask, 'mask_file')

    # Perform all the non-linear registration
    if aligned_templates is True:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(sx_val=-2.5, be_val=0.01, verbosity_off_flag=True),
                                 name='f3d',
                                 iterfield=['flo_file'])
        workflow.connect(current_robust_affine, 'out_file', current_f3d, 'aff_file')
    else:
        current_f3d = pe.MapNode(interface=niftyreg.RegF3D(),
                                 name='f3d',
                                 iterfield=['flo_file',
                                            'aff_file'])
        workflow.connect(current_aladin, 'aff_file', current_f3d, 'aff_file')
    workflow.connect(current_crop_target, 'out_file', current_f3d, 'ref_file')
    workflow.connect(current_crop_mask, 'out_file', current_f3d, 'rmask_file')
    workflow.connect(extract_db_info, 'input_template_images', current_f3d, 'flo_file')

    # Merge all the non-linear warped images into one 4D
    current_f3d_temp_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_temp')
    workflow.connect(current_f3d, 'res_file', current_f3d_temp_merge, 'in_files')

    # Propagate the obtained mask
    current_f3d_prop = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                  name='f3d_prop',
                                  iterfield=['flo_file',
                                             'trans_file'])
    workflow.connect(current_crop_target, 'out_file', current_f3d_prop, 'ref_file')
    workflow.connect(extract_db_info, 'input_template_labels', current_f3d_prop, 'flo_file')
    workflow.connect(current_f3d, 'cpp_file', current_f3d_prop, 'trans_file')

    # Merge all the non-linear warped labels into one 4D
    current_f3d_prop_merge = pe.Node(interface=fsl.Merge(dimension='t'), name='merge_f3d_prop')
    workflow.connect(current_f3d_prop, 'out_file', current_f3d_prop_merge, 'in_files')

    # Extract the consensus parcellation using steps
    current_fusion = pe.Node(interface=niftyseg.STEPS(template_num=15, kernel_size=1.5, mrf_value=0.15),
                             name='fusion')
    workflow.connect(current_crop_target, 'out_file', current_fusion, 'in_file')
    workflow.connect(current_f3d_temp_merge, 'merged_file', current_fusion, 'warped_img_file')
    workflow.connect(current_f3d_prop_merge, 'merged_file', current_fusion, 'warped_seg_file')
    workflow.connect(current_aff_prop_fill, 'out_file', current_fusion, 'mask_file')

    # Resample the obtained consensus label into the original image space
    current_prop_orig_res = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN', verbosity_off_flag=True),
                                       name='prop_orig_res',
                                       iterfield=['flo_file'])
    workflow.connect(input_node, 'in_file', current_prop_orig_res, 'ref_file')
    workflow.connect(current_fusion, 'out_file', current_prop_orig_res, 'flo_file')

    # Connect the output to the output node
    output_node = pe.Node(
        interface=niu.IdentityInterface(
            fields=['parcellated_file']),
        name='output_node')
    workflow.connect(current_prop_orig_res, 'out_file', output_node, 'parcellated_file')

    return workflow
Пример #6
0
def create_cross_sectional_tbss_pipeline(in_files,
                                         output_dir,
                                         name='cross_sectional_tbss',
                                         skeleton_threshold=0.2,
                                         design_mat=None,
                                         design_con=None):
    workflow = pe.Workflow(name=name)
    workflow.base_dir = output_dir
    workflow.base_output_dir = name

    # Create the dtitk groupwise registration workflow
    groupwise_dtitk = create_dtitk_groupwise_workflow(in_files=in_files,
                                                      name="dtitk_groupwise",
                                                      rig_iteration=3,
                                                      aff_iteration=3,
                                                      nrr_iteration=6)

    # Create the average FA map
    mean_fa = pe.Node(interface=dtitk.TVtool(), name="mean_fa")
    workflow.connect(groupwise_dtitk, 'output_node.out_template', mean_fa,
                     'in_file')
    mean_fa.inputs.operation = 'fa'

    # Register the FMRIB58_FA_1mm.nii.gz atlas to the mean FA map
    reg_atlas = pe.Node(interface=niftyreg.RegAladin(), name='reg_atlas')
    workflow.connect(mean_fa, 'out_file', reg_atlas, 'ref_file')
    reg_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'FMRIB58_FA_1mm.nii.gz')

    # Apply the transformation to the lower cingulum image
    war_atlas = pe.Node(interface=niftyreg.RegResample(), name='war_atlas')
    workflow.connect(mean_fa, 'out_file', war_atlas, 'ref_file')
    war_atlas.inputs.flo_file = os.path.join(os.environ['FSLDIR'], 'data',
                                             'standard',
                                             'LowerCingulum_1mm.nii.gz')
    workflow.connect(reg_atlas, 'aff_file', war_atlas, 'trans_file')
    war_atlas.inputs.inter_val = 'LIN'

    # Threshold the propagated lower cingulum
    thr_atlas = pe.Node(interface=niftyseg.BinaryMaths(), name='thr_atlas')
    workflow.connect(war_atlas, 'out_file', thr_atlas, 'in_file')
    thr_atlas.inputs.operation = 'thr'
    thr_atlas.inputs.operand_value = 0.5

    # Binarise the propagated lower cingulum
    bin_atlas = pe.Node(interface=niftyseg.UnaryMaths(), name='bin_atlas')
    workflow.connect(thr_atlas, 'out_file', bin_atlas, 'in_file')
    bin_atlas.inputs.operation = 'bin'

    # Create all the individual FA maps
    individual_fa = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_fa",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_fa,
                     'in_file')
    individual_fa.inputs.operation = 'fa'

    # Create all the individual MD maps
    individual_md = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_md",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_md,
                     'in_file')
    individual_md.inputs.operation = 'tr'

    # Create all the individual RD maps
    individual_rd = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_rd",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_rd,
                     'in_file')
    individual_rd.inputs.operation = 'rd'

    # Create all the individual RD maps
    individual_ad = pe.MapNode(interface=dtitk.TVtool(),
                               name="individual_ad",
                               iterfield=['in_file'])
    workflow.connect(groupwise_dtitk, 'output_node.out_res', individual_ad,
                     'in_file')
    individual_ad.inputs.operation = 'ad'

    # Combine all the warped FA images into a 4D image
    merged_4d_fa = pe.Node(interface=fsl.Merge(), name='merged_4d_fa')
    merged_4d_fa.inputs.dimension = 't'
    workflow.connect(individual_fa, 'out_file', merged_4d_fa, 'in_files')

    # Combine all the warped MD images into a 4D image
    merged_4d_md = pe.Node(interface=fsl.Merge(), name='merged_4d_md')
    merged_4d_md.inputs.dimension = 't'
    workflow.connect(individual_md, 'out_file', merged_4d_md, 'in_files')

    # Combine all the warped RD images into a 4D image
    merged_4d_rd = pe.Node(interface=fsl.Merge(), name='merged_4d_rd')
    merged_4d_rd.inputs.dimension = 't'
    workflow.connect(individual_rd, 'out_file', merged_4d_rd, 'in_files')

    # Combine all the warped AD images into a 4D image
    merged_4d_ad = pe.Node(interface=fsl.Merge(), name='merged_4d_ad')
    merged_4d_ad.inputs.dimension = 't'
    workflow.connect(individual_ad, 'out_file', merged_4d_ad, 'in_files')

    # Threshold the 4D FA image to 0
    merged_4d_fa_thresholded = pe.Node(interface=niftyseg.BinaryMaths(),
                                       name='merged_4d_fa_thresholded')
    merged_4d_fa_thresholded.inputs.operation = 'thr'
    merged_4d_fa_thresholded.inputs.operand_value = 0
    workflow.connect(merged_4d_fa, 'merged_file', merged_4d_fa_thresholded,
                     'in_file')

    # Extract the min value from the 4D FA image
    minimal_value_across_all_fa = pe.Node(interface=niftyseg.UnaryMaths(),
                                          name='minimal_value_across_all_fa')
    minimal_value_across_all_fa.inputs.operation = 'tmin'
    workflow.connect(merged_4d_fa_thresholded, 'out_file',
                     minimal_value_across_all_fa, 'in_file')

    # Create the mask image
    fa_mask = pe.Node(interface=niftyseg.UnaryMaths(), name='fa_mask')
    fa_mask.inputs.operation = 'bin'
    fa_mask.inputs.output_datatype = 'char'
    workflow.connect(minimal_value_across_all_fa, 'out_file', fa_mask,
                     'in_file')

    # Mask the mean FA image
    masked_mean_fa = pe.Node(interface=fsl.ApplyMask(), name='masked_mean_fa')
    workflow.connect(mean_fa, 'out_file', masked_mean_fa, 'in_file')
    workflow.connect(fa_mask, 'out_file', masked_mean_fa, 'mask_file')

    # Create the skeleton image
    skeleton = pe.Node(interface=fsl.TractSkeleton(), name='skeleton')
    skeleton.inputs.skeleton_file = True
    workflow.connect(masked_mean_fa, 'out_file', skeleton, 'in_file')

    # Threshold the skeleton image
    thresholded_skeleton = pe.Node(interface=niftyseg.BinaryMaths(),
                                   name='thresholded_skeleton')
    thresholded_skeleton.inputs.operation = 'thr'
    thresholded_skeleton.inputs.operand_value = skeleton_threshold
    workflow.connect(skeleton, 'skeleton_file', thresholded_skeleton,
                     'in_file')

    # Binarise the skeleton image
    binarised_skeleton = pe.Node(interface=niftyseg.UnaryMaths(),
                                 name='binarised_skeleton')
    binarised_skeleton.inputs.operation = 'bin'
    workflow.connect(thresholded_skeleton, 'out_file', binarised_skeleton,
                     'in_file')

    # Create skeleton distance map
    invert_mask1 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask1')
    invert_mask1.inputs.operation = 'mul'
    invert_mask1.inputs.operand_value = -1
    workflow.connect(fa_mask, 'out_file', invert_mask1, 'in_file')
    invert_mask2 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask2')
    invert_mask2.inputs.operation = 'add'
    invert_mask2.inputs.operand_value = 1
    workflow.connect(invert_mask1, 'out_file', invert_mask2, 'in_file')
    invert_mask3 = pe.Node(interface=niftyseg.BinaryMaths(),
                           name='invert_mask3')
    invert_mask3.inputs.operation = 'add'
    workflow.connect(invert_mask2, 'out_file', invert_mask3, 'in_file')
    workflow.connect(binarised_skeleton, 'out_file', invert_mask3,
                     'operand_file')
    distance_map = pe.Node(interface=fsl.DistanceMap(), name='distance_map')
    workflow.connect(invert_mask3, 'out_file', distance_map, 'in_file')

    # Project the FA values onto the skeleton
    all_fa_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_fa_projected')
    all_fa_projected.inputs.threshold = skeleton_threshold
    all_fa_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_fa_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_fa_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_fa_projected,
                     'data_file')
    workflow.connect(bin_atlas, 'out_file', all_fa_projected,
                     'search_mask_file')

    # Project the MD values onto the skeleton
    all_md_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_md_projected')
    all_md_projected.inputs.threshold = skeleton_threshold
    all_md_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_md_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_md_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_md_projected,
                     'data_file')
    workflow.connect(merged_4d_md, 'merged_file', all_md_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_md_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_rd_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_rd_projected')
    all_rd_projected.inputs.threshold = skeleton_threshold
    all_rd_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_rd_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_rd_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_rd_projected,
                     'data_file')
    workflow.connect(merged_4d_rd, 'merged_file', all_rd_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_rd_projected,
                     'search_mask_file')

    # Project the RD values onto the skeleton
    all_ad_projected = pe.Node(interface=fsl.TractSkeleton(),
                               name='all_ad_projected')
    all_ad_projected.inputs.threshold = skeleton_threshold
    all_ad_projected.inputs.project_data = True
    workflow.connect(masked_mean_fa, 'out_file', all_ad_projected, 'in_file')
    workflow.connect(distance_map, 'distance_map', all_ad_projected,
                     'distance_map')
    workflow.connect(merged_4d_fa, 'merged_file', all_ad_projected,
                     'data_file')
    workflow.connect(merged_4d_ad, 'merged_file', all_ad_projected,
                     'alt_data_file')
    workflow.connect(bin_atlas, 'out_file', all_ad_projected,
                     'search_mask_file')

    # Create an output node
    output_node = pe.Node(interface=niu.IdentityInterface(fields=[
        'mean_fa', 'all_fa_skeletonised', 'all_md_skeletonised',
        'all_rd_skeletonised', 'all_ad_skeletonised', 'skeleton',
        'skeleton_bin', 't_contrast_raw_stat', 't_contrast_uncorrected_pvalue',
        't_contrast_corrected_pvalue'
    ]),
                          name='output_node')

    # Connect the workflow to the output node
    workflow.connect(masked_mean_fa, 'out_file', output_node, 'mean_fa')
    workflow.connect(all_fa_projected, 'projected_data', output_node,
                     'all_fa_skeletonised')
    workflow.connect(all_md_projected, 'projected_data', output_node,
                     'all_md_skeletonised')
    workflow.connect(all_rd_projected, 'projected_data', output_node,
                     'all_rd_skeletonised')
    workflow.connect(all_ad_projected, 'projected_data', output_node,
                     'all_ad_skeletonised')
    workflow.connect(skeleton, 'skeleton_file', output_node, 'skeleton')
    workflow.connect(binarised_skeleton, 'out_file', output_node,
                     'skeleton_bin')

    # Run randomise if required and connect its output to the output node
    if design_mat is not None and design_con is not None:
        randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
        randomise.inputs.base_name = 'stats_tbss'
        randomise.inputs.tfce2D = True
        randomise.inputs.num_perm = 5000
        workflow.connect(all_fa_projected, 'projected_data', randomise,
                         'in_file')
        randomise.inputs.design_mat = design_mat
        randomise.inputs.design_con = design_con
        workflow.connect(binarised_skeleton, 'out_file', randomise, 'mask')

        workflow.connect(randomise, 'tstat_files', output_node,
                         't_contrast_raw_stat')
        workflow.connect(randomise, 't_p_files', output_node,
                         't_contrast_uncorrected_pvalue')
        workflow.connect(randomise, 't_corrected_p_files', output_node,
                         't_contrast_corrected_pvalue')

    # Create nodes to rename the outputs
    mean_fa_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa', keep_ext=True),
                              name='mean_fa_renamer')
    workflow.connect(output_node, 'mean_fa', mean_fa_renamer, 'in_file')

    mean_sk_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton', keep_ext=True),
                              name='mean_sk_renamer')
    workflow.connect(output_node, 'skeleton', mean_sk_renamer, 'in_file')

    bin_ske_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_mean_fa_skeleton_mask', keep_ext=True),
                              name='bin_ske_renamer')
    workflow.connect(output_node, 'skeleton_bin', bin_ske_renamer, 'in_file')

    fa_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_fa_skeletonised', keep_ext=True),
                              name='fa_skel_renamer')
    workflow.connect(output_node, 'all_fa_skeletonised', fa_skel_renamer,
                     'in_file')
    md_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_md_skeletonised', keep_ext=True),
                              name='md_skel_renamer')
    workflow.connect(output_node, 'all_md_skeletonised', md_skel_renamer,
                     'in_file')
    rd_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_rd_skeletonised', keep_ext=True),
                              name='rd_skel_renamer')
    workflow.connect(output_node, 'all_rd_skeletonised', rd_skel_renamer,
                     'in_file')
    ad_skel_renamer = pe.Node(interface=niu.Rename(
        format_string='tbss_all_ad_skeletonised', keep_ext=True),
                              name='ad_skel_renamer')
    workflow.connect(output_node, 'all_ad_skeletonised', ad_skel_renamer,
                     'in_file')

    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='data_sink')
    ds.inputs.base_directory = os.path.abspath(output_dir)

    # Connect the data sink
    workflow.connect(mean_fa_renamer, 'out_file', ds, '@mean_fa')
    workflow.connect(mean_sk_renamer, 'out_file', ds, '@skel_fa')
    workflow.connect(bin_ske_renamer, 'out_file', ds, '@bkel_fa')
    workflow.connect(fa_skel_renamer, 'out_file', ds, '@all_fa')
    workflow.connect(md_skel_renamer, 'out_file', ds, '@all_md')
    workflow.connect(rd_skel_renamer, 'out_file', ds, '@all_rd')
    workflow.connect(ad_skel_renamer, 'out_file', ds, '@all_ad')

    if design_mat is not None and design_con is not None:
        workflow.connect(output_node, 't_contrast_raw_stat', ds,
                         '@t_contrast_raw_stat')
        workflow.connect(output_node, 't_contrast_uncorrected_pvalue', ds,
                         '@t_contrast_uncorrected_pvalue')
        workflow.connect(output_node, 't_contrast_corrected_pvalue', ds,
                         '@t_contrast_corrected_pvalue')

    return workflow
def create_image_to_mesh_workflow(input_images,
                                  input_parcellations,
                                  input_label_id,
                                  result_dir,
                                  rigid_iteration=3,
                                  affine_iteration=3,
                                  reduction_rate=0.1,
                                  name='registrations_init'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = result_dir
    workflow.base_output_dir = name

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             name='groupwise')
    groupwise.inputs.input_node.in_files = input_images
    groupwise.inputs.input_node.ref_file = input_images[0]

    # Extract the relevant label from the GIF parcellation
    extract_label = pe.MapNode(interface=MergeLabels(),
                               iterfield=['in_file'],
                               name='extract_label')
    extract_label.iterables = ("roi_list", [[l] for l in input_label_id])
    extract_label.inputs.in_file = input_parcellations

    # Removing parasite segmentation: Erosion.
    erode_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='ero', operand_value=1),
                                iterfield=['in_file'],
                                name='erode_binaries')
    workflow.connect(extract_label, 'out_file', erode_binaries, 'in_file')

    # Removing parasite segmentation: Dilatation.
    dilate_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=1),
                                 iterfield=['in_file'],
                                 name='dilate_binaries')
    workflow.connect(erode_binaries, 'out_file', dilate_binaries, 'in_file')

    # Apply the relevant transformations to the roi
    apply_affine = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN'),
                              iterfield=['flo_file', 'trans_file'],
                              name='apply_affine')
    workflow.connect(groupwise, 'output_node.trans_files', apply_affine,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', apply_affine,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', apply_affine, 'flo_file')

    # compute the large ROI that correspond to the union of all warped label
    extract_union_roi = pe.Node(interface=niftyreg.RegAverage(),
                                name='extract_union_roi')
    workflow.connect(apply_affine, 'res_file', extract_union_roi, 'in_files')

    # Binarise the average ROI
    binarise_roi = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'),
                           name='binarise_roi')
    workflow.connect(extract_union_roi, 'out_file', binarise_roi, 'in_file')

    # Dilation of the binarise union ROI
    dilate_roi = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=5),
                         name='dilate_roi')
    workflow.connect(binarise_roi, 'out_file', dilate_roi, 'in_file')

    # Apply the transformations
    apply_rigid_refinement = pe.MapNode(interface=niftyreg.RegAladin(
        rig_only_flag=True, ln_val=1),
                                        iterfield=['flo_file', 'in_aff_file'],
                                        name='apply_rigid_refinement')
    apply_rigid_refinement.inputs.flo_file = input_images
    workflow.connect(groupwise, 'output_node.average_image',
                     apply_rigid_refinement, 'ref_file')
    workflow.connect(groupwise, 'output_node.trans_files',
                     apply_rigid_refinement, 'in_aff_file')
    workflow.connect(dilate_roi, 'out_file', apply_rigid_refinement,
                     'rmask_file')

    # Extract the mesh corresponding to the label
    final_resampling = pe.MapNode(
        interface=niftyreg.RegResample(inter_val='NN'),
        iterfield=['flo_file', 'trans_file'],
        name='final_resampling')
    workflow.connect(apply_rigid_refinement, 'aff_file', final_resampling,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', final_resampling,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', final_resampling, 'flo_file')

    # Extract the mesh corresponding to the label
    extract_mesh = pe.MapNode(
        interface=Image2VtkMesh(in_reductionRate=reduction_rate),
        iterfield=['in_file'],
        name='extract_mesh')
    workflow.connect(final_resampling, 'res_file', extract_mesh, 'in_file')
    # workflow.connect(apply_rigid_refinement, 'aff_file', extract_mesh, 'matrix_file')

    # Create a rename for the average image
    groupwise_renamer = pe.Node(interface=niu.Rename(format_string='atlas',
                                                     keep_ext=True),
                                name='groupwise_renamer')
    workflow.connect(groupwise, 'output_node.average_image', groupwise_renamer,
                     'in_file')

    # Create a datasink
    ds = pe.Node(nio.DataSink(parameterization=False), name='ds')
    ds.inputs.base_directory = result_dir
    workflow.connect(groupwise_renamer, 'out_file', ds, '@avg')
    workflow.connect(apply_rigid_refinement, 'res_file', ds, '@raf_mask')
    workflow.connect(extract_union_roi, 'out_file', ds, '@union_mask')
    workflow.connect(dilate_roi, 'out_file', ds, '@dilate_mask')
    workflow.connect(extract_mesh, 'out_file', ds, 'mesh_vtk')

    return workflow