Example #1
0
def create_get_deformation_shape_analysis(labels,
                                          reduction_rate,
                                          rigid_iteration=1,
                                          affine_iteration=2,
                                          dkw=10,
                                          dkt='Exact',
                                          okw=[8],
                                          dtp=30,
                                          dsk=0.5,
                                          dcps=5,
                                          dcpp='x',
                                          dfcp='Off',
                                          dmi=200,
                                          dat=0.00005,
                                          dls=20,
                                          ods=[0.5],
                                          ot=["NonOrientedSurfaceMesh"],
                                          atlas_image='none',
                                          name='shape_analysis'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input input node
    input_node = pe.Node(niu.IdentityInterface(
        fields=['input_images', 'input_ref', 'input_seg', 'subject_ids']),
                         name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(fields=[
        'extracted_meshes', 'out_template_vtk_file', 'out_template_CP_file',
        'out_template_MOM_file', 'out_template_vtk_files'
    ]),
                          name='output_node')

    # Create a sub-workflow for groupwise registration
    if atlas_image == 'none':
        groupwise = create_atlas(itr_rigid=rigid_iteration,
                                 itr_affine=affine_iteration,
                                 itr_non_lin=0,
                                 verbose=False,
                                 name='groupwise')
        workflow.connect(input_node, 'input_images', groupwise,
                         'input_node.in_files')
        workflow.connect(input_node, 'input_ref', groupwise,
                         'input_node.ref_file')

        # Create the workflow to create the meshes in an average space
        gw_binary_to_meshes = create_binary_to_meshes(
            label=labels, reduction_rate=reduction_rate)
        workflow.connect(input_node, 'input_images', gw_binary_to_meshes,
                         'input_node.input_images')
        workflow.connect(input_node, 'input_seg', gw_binary_to_meshes,
                         'input_node.input_parcellations')
        workflow.connect(groupwise, 'output_node.trans_files',
                         gw_binary_to_meshes, 'input_node.trans_files')
        workflow.connect(groupwise, 'output_node.average_image',
                         gw_binary_to_meshes, 'input_node.ref_file')
        workflow.connect(gw_binary_to_meshes, 'output_node.output_meshes',
                         output_node, 'extracted_meshes')
    else:
        # Create the workflow to create the meshes in an average space
        gw_binary_to_meshes = create_binary_to_meshes(
            label=labels, reduction_rate=reduction_rate)
        workflow.connect(input_node, 'input_images', gw_binary_to_meshes,
                         'input_node.input_images')
        workflow.connect(input_node, 'input_seg', gw_binary_to_meshes,
                         'input_node.input_parcellations')
        workflow.connect(input_node, 'trans_files', gw_binary_to_meshes,
                         'input_node.trans_files')
        workflow.connect(input_node, 'average_image', gw_binary_to_meshes,
                         'input_node.ref_file')
        workflow.connect(gw_binary_to_meshes, 'output_node.output_meshes',
                         output_node, 'extracted_meshes')

    template_computation = atlas_computation(dkw=dkw,
                                             dkt=dkt,
                                             okw=okw,
                                             dtp=dtp,
                                             dsk=dsk,
                                             dcps=dcps,
                                             dcpp=dcpp,
                                             dfcp=dfcp,
                                             dmi=dmi,
                                             dat=dat,
                                             dls=dls,
                                             ods=ods,
                                             type_xml_file='All',
                                             name='template_computation')

    workflow.connect(gw_binary_to_meshes, 'output_node.output_meshes',
                     template_computation, 'input_node.input_vtk_meshes')
    workflow.connect(input_node, 'subject_ids', template_computation,
                     'input_node.subject_ids')
    workflow.connect(template_computation, 'output_node.out_template_vtk_file',
                     output_node, 'out_template_vtk_file')
    workflow.connect(template_computation, 'output_node.out_template_CP_file',
                     output_node, 'out_template_CP_file')
    workflow.connect(template_computation, 'output_node.out_template_MOM_file',
                     output_node, 'out_template_MOM_file')
    workflow.connect(template_computation,
                     'output_node.out_template_vtk_files', output_node,
                     'out_template_vtk_files')

    return workflow
Example #2
0
def create_spatio_temporal_analysis(labels,
                                    reduction_rate,
                                    rigid_iteration=1,
                                    affine_iteration=2,
                                    scan_number=2,
                                    name='spatio_temporal_analysis'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input input node
    input_node = pe.Node(niu.IdentityInterface(fields=[
        'input_images', 'input_parcellations', 'input_ref', 'label_indices',
        'ages', 'subject_ids', 'xml_dkw', 'xml_dkt', 'xml_dtp', 'xml_dsk',
        'xml_dcps', 'xml_dcpp', 'xml_dfcp', 'xml_dmi', 'xml_dat', 'xml_dls',
        'xml_ods', 'xml_okw', 'xml_ot'
    ]),
                         name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(fields=[
        'extracted_meshes', 'param_diffeo_file', 'param_object_file',
        'out_AgeToOnsetNorm_file', 'out_centroid_mat_file',
        'out_init_shape_vtk_file', 'out_vertices_centroid_file',
        'transported_res_mom', 'transported_res_vect', 'out_file_CP',
        'out_file_MOM'
    ]),
                          name='output_node')

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             name='groupwise')
    workflow.connect(input_node, 'input_images', groupwise,
                     'input_node.in_files')
    workflow.connect(input_node, 'input_ref', groupwise, 'input_node.ref_file')

    # Create the workflow to create the meshes in an average space
    meshes_workflow = create_binary_to_meshes(label=labels,
                                              reduction_rate=reduction_rate)
    workflow.connect(input_node, 'input_images', meshes_workflow,
                     'input_node.input_images')
    workflow.connect(input_node, 'input_parcellations', meshes_workflow,
                     'input_node.input_parcellations')
    workflow.connect(groupwise, 'output_node.trans_files', meshes_workflow,
                     'input_node.trans_files')
    workflow.connect(groupwise, 'output_node.average_image', meshes_workflow,
                     'input_node.ref_file')
    workflow.connect(meshes_workflow, 'output_node.output_meshes', output_node,
                     'extracted_meshes')
    # Create the workflow to generate the required data for the regression
    # Done for only one label. Should be doable for a set a label, we would analyse together.

    preprocessing_regression = create_spatio_temporal_regression_preprocessing(
        label=labels,
        scan_number=scan_number,
    )
    workflow.connect(meshes_workflow, 'output_node.output_meshes',
                     preprocessing_regression, 'input_node.input_meshes')
    workflow.connect(input_node, 'ages', preprocessing_regression,
                     'input_node.ages')
    workflow.connect(input_node, 'subject_ids', preprocessing_regression,
                     'input_node.subject_ids')
    workflow.connect(input_node, 'xml_dkw', preprocessing_regression,
                     'input_node.xml_dkw')
    workflow.connect(input_node, 'xml_dkt', preprocessing_regression,
                     'input_node.xml_dkt')
    workflow.connect(input_node, 'xml_dtp', preprocessing_regression,
                     'input_node.xml_dtp')
    workflow.connect(input_node, 'xml_dsk', preprocessing_regression,
                     'input_node.xml_dsk')
    workflow.connect(input_node, 'xml_dcps', preprocessing_regression,
                     'input_node.xml_dcps')
    workflow.connect(input_node, 'xml_dcpp', preprocessing_regression,
                     'input_node.xml_dcpp')
    workflow.connect(input_node, 'xml_dfcp', preprocessing_regression,
                     'input_node.xml_dfcp')
    workflow.connect(input_node, 'xml_dmi', preprocessing_regression,
                     'input_node.xml_dmi')
    workflow.connect(input_node, 'xml_dat', preprocessing_regression,
                     'input_node.xml_dat')
    workflow.connect(input_node, 'xml_dls', preprocessing_regression,
                     'input_node.xml_dls')
    workflow.connect(input_node, 'xml_ods', preprocessing_regression,
                     'input_node.xml_ods')
    workflow.connect(input_node, 'xml_okw', preprocessing_regression,
                     'input_node.xml_okw')
    workflow.connect(input_node, 'xml_ot', preprocessing_regression,
                     'input_node.xml_ot')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlDiffeo',
                     output_node, 'param_diffeo_file')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlObject',
                     output_node, 'param_object_file')
    workflow.connect(preprocessing_regression,
                     'output_node.out_AgeToOnsetNorm_file', output_node,
                     'out_AgeToOnsetNorm_file')
    workflow.connect(preprocessing_regression,
                     'output_node.out_centroid_mat_file', output_node,
                     'out_centroid_mat_file')
    workflow.connect(preprocessing_regression,
                     'output_node.out_init_shape_vtk_file', output_node,
                     'out_init_shape_vtk_file')
    workflow.connect(preprocessing_regression,
                     'output_node.out_vertices_centroid_file', output_node,
                     'out_vertices_centroid_file')

    # Create the workflow for the computation of the regression and residual deformations transportation
    computation_regression = create_get_shape_distance_from_regression(
        scan_number=scan_number)
    workflow.connect(meshes_workflow, 'output_node.output_meshes',
                     computation_regression, 'input_node.input_meshes')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlDiffeo',
                     computation_regression, 'input_node.xmlDiffeo')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlObject',
                     computation_regression, 'input_node.xmlObject')
    workflow.connect(preprocessing_regression,
                     'output_node.out_init_shape_vtk_file',
                     computation_regression, 'input_node.baseline_vtk_file')
    workflow.connect(preprocessing_regression,
                     'output_node.out_AgeToOnsetNorm_file',
                     computation_regression,
                     'input_node.in_AgeToOnsetNorm_file')
    workflow.connect(computation_regression, 'output_node.transported_res_mom',
                     output_node, 'transported_res_mom')
    workflow.connect(computation_regression,
                     'output_node.transported_res_vect', output_node,
                     'transported_res_vect')
    workflow.connect(computation_regression, 'output_node.out_file_CP',
                     output_node, 'out_file_CP')
    workflow.connect(computation_regression, 'output_node.out_file_MOM',
                     output_node, 'out_file_MOM')
    return workflow
def create_symmetric_spatio_temporal_analysis(labels,
                                              reduction_rate,
                                              rigid_iteration=1,
                                              affine_iteration=2,
                                              scan_number=2,
                                              name='spatio_temporal_analysis'
                                              ):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input input node
    input_node = pe.Node(niu.IdentityInterface(
        fields=['input_images',
                'input_parcellations',
                'input_ref',
                'label_indices',
                'ages',
                'subject_ids',
                'xml_dkw',
                'xml_dkt',
                'xml_dtp',
                'xml_dsk',
                'xml_dcps',
                'xml_dcpp',
                'xml_dfcp',
                'xml_dmi',
                'xml_dat',
                'xml_dls',
                'xml_ods',
                'xml_okw',
                'xml_ot']),
        name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(
        fields=['extracted_meshes', 'out_template_vtk_file',
                'param_diffeo_file', 'param_object_file', 'out_AgeToOnsetNorm_file',
                'out_init_shape_vtk_file', 'out_sym_vtk_files',
                'transported_res_mom', 'transported_res_vect', 'out_file_CP', 'out_file_MOM']),
        name='output_node')

    # COMPUTING THE SYMMETRIC OF THE IMAGES BEFORE COMPUTING THE ATLAS
    symmetrisation = create_symmetrical_images()
    workflow.connect(input_node, 'input_images', symmetrisation, 'input_node.input_images')
    workflow.connect(input_node, 'input_parcellations', symmetrisation, 'input_node.input_parcellations')

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             verbose=False,
                             name='groupwise')
    workflow.connect(symmetrisation, 'output_node.LR_images', groupwise, 'input_node.in_files')
    workflow.connect(input_node, 'input_ref', groupwise, 'input_node.ref_file')

    # Create the workflow to create the meshes in an average space
    meshes_workflow = create_binary_to_meshes(label=labels, reduction_rate=reduction_rate)
    workflow.connect(symmetrisation, 'output_node.LR_images', meshes_workflow, 'input_node.input_images')
    workflow.connect(symmetrisation, 'output_node.LR_parcellations', meshes_workflow, 'input_node.input_parcellations')
    workflow.connect(groupwise, 'output_node.trans_files', meshes_workflow, 'input_node.trans_files')
    workflow.connect(groupwise, 'output_node.average_image', meshes_workflow, 'input_node.ref_file')
    workflow.connect(meshes_workflow, 'output_node.output_meshes',
                     output_node, 'extracted_meshes')

    # order the lists of meshes to be [[L R] [L R] [L R]], and age and subject ID accordingly:
    reorder_lists = create_lists_symmetrical_subject()
    workflow.connect(meshes_workflow, 'output_node.output_meshes',reorder_lists, 'input_node.input_meshes')
    workflow.connect(input_node, 'ages', reorder_lists, 'input_node.ages')
    workflow.connect(input_node, 'subject_ids', reorder_lists, 'input_node.subject_ids')
    # Create the workflow to generate the required data for the regression
    # Done for only one label. Should be doable for a set a label, we would analyse together.

    preprocessing_regression = create_spatio_temporal_regression_preprocessing(label=labels[0],
                                                                               scan_number=scan_number
                                                                               )
    workflow.connect(meshes_workflow, 'output_node.output_meshes',
                     preprocessing_regression, 'input_node.input_meshes')
    workflow.connect(reorder_lists, 'output_node.meshes_sorted_by_suj',
                     preprocessing_regression, 'input_node.input_meshes_sorted')
    workflow.connect(reorder_lists, 'output_node.ages_sorted_by_suj',
                     preprocessing_regression, 'input_node.ages')
    workflow.connect(input_node, 'ages',
                     preprocessing_regression, 'input_node.ages_per_suj')
    workflow.connect(reorder_lists, 'output_node.subject_ids_sorted_by_suj',
                     preprocessing_regression, 'input_node.subject_ids')
    workflow.connect(input_node, 'subject_ids',
                     preprocessing_regression, 'input_node.subject_ids_by_suj')
    workflow.connect(input_node, 'xml_dkw',
                     preprocessing_regression, 'input_node.xml_dkw')
    workflow.connect(input_node, 'xml_dkt',
                     preprocessing_regression, 'input_node.xml_dkt')
    workflow.connect(input_node, 'xml_dtp',
                     preprocessing_regression, 'input_node.xml_dtp')
    workflow.connect(input_node, 'xml_dsk',
                     preprocessing_regression, 'input_node.xml_dsk')
    workflow.connect(input_node, 'xml_dcps',
                     preprocessing_regression, 'input_node.xml_dcps')
    workflow.connect(input_node, 'xml_dcpp',
                     preprocessing_regression, 'input_node.xml_dcpp')
    workflow.connect(input_node, 'xml_dfcp',
                     preprocessing_regression, 'input_node.xml_dfcp')
    workflow.connect(input_node, 'xml_dmi',
                     preprocessing_regression, 'input_node.xml_dmi')
    workflow.connect(input_node, 'xml_dat',
                     preprocessing_regression, 'input_node.xml_dat')
    workflow.connect(input_node, 'xml_dls',
                     preprocessing_regression, 'input_node.xml_dls')
    workflow.connect(input_node, 'xml_ods',
                     preprocessing_regression, 'input_node.xml_ods')
    workflow.connect(input_node, 'xml_okw',
                     preprocessing_regression, 'input_node.xml_okw')
    workflow.connect(input_node, 'xml_ot',
                     preprocessing_regression, 'input_node.xml_ot')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlDiffeo', output_node, 'param_diffeo_file')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlObject', output_node, 'param_object_file')
    workflow.connect(preprocessing_regression, 'output_node.out_AgeToOnsetNorm_file',
                     output_node, 'out_AgeToOnsetNorm_file')
    workflow.connect(preprocessing_regression, 'output_node.out_init_shape_vtk_file',
                     output_node, 'out_template_vtk_file')
    workflow.connect(preprocessing_regression, 'output_node.out_init_shape_vtk_file',
                     output_node, 'out_init_shape_vtk_file')
    workflow.connect(preprocessing_regression, 'output_node.out_sym_vtk_files',
                     output_node, 'out_sym_vtk_files')
    # workflow.connect(preprocessing_regression, 'output_node.out_vertices_centroid_file',
    #                  output_node, 'out_vertices_centroid_file')

    # Create the workflow for the computation of the regression and residual deformations transportation
    computation_regression = create_get_shape_distance_from_regression(scan_number=scan_number)
    workflow.connect(preprocessing_regression, 'output_node.out_sym_vtk_files',
                     computation_regression, 'input_node.input_meshes')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlDiffeo',
                     computation_regression, 'input_node.xmlDiffeo')
    workflow.connect(preprocessing_regression, 'output_node.out_xmlObject',
                     computation_regression, 'input_node.xmlObject')
    workflow.connect(preprocessing_regression, 'output_node.out_init_shape_vtk_file',
                     computation_regression, 'input_node.baseline_vtk_file')
    workflow.connect(preprocessing_regression, 'output_node.out_AgeToOnsetNorm_file',
                     computation_regression, 'input_node.in_AgeToOnsetNorm_file')
    workflow.connect(computation_regression, 'output_node.transported_res_mom',
                     output_node, 'transported_res_mom')
    workflow.connect(computation_regression, 'output_node.transported_res_vect',
                     output_node, 'transported_res_vect')
    workflow.connect(computation_regression, 'output_node.out_file_CP',
                     output_node, 'out_file_CP')
    workflow.connect(computation_regression, 'output_node.out_file_MOM',
                     output_node, 'out_file_MOM')

    return workflow
def create_preprocessing_shape_analysis_epilepsy_flipping(labels,
                                              reduction_rate,
                                              rigid_iteration=1,
                                              affine_iteration=2,
                                              scan_number=2,
                                              name='spatio_temporal_analysis'
                                              ):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    # Create the input input node
    input_node = pe.Node(niu.IdentityInterface(
        fields=['input_images',
                'input_ref',
                'flip_id',
                'no_flip_id',
                'no_flip_seg',
                'flip_seg',
                'subject_ids']),
        name='input_node')

    # Create the output node
    output_node = pe.Node(niu.IdentityInterface(
        fields=['extracted_meshes']),
        name='output_node')

    # Extract the sublist of parcelation and T1.
    split_list_to_flip_images = pe.Node(interface=Function(function=split_list,
                                                    input_names=['including_id', 'all_id', 'list_data'],
                                                    output_names='extracted_list'),
                                 name='split_list_to_flip_images')
    workflow.connect(input_node, 'flip_id', split_list_to_flip_images, 'including_id')
    workflow.connect(input_node, 'subject_ids', split_list_to_flip_images, 'all_id')
    workflow.connect(input_node, 'input_images', split_list_to_flip_images, 'list_data')

    split_list_to_not_flip_images = pe.Node(interface=Function(function=split_list,
                                                    input_names=['including_id', 'all_id', 'list_data'],
                                                    output_names='extracted_list'),
                                 name='split_list_to_not_flip_images')
    workflow.connect(input_node, 'no_flip_id', split_list_to_not_flip_images, 'including_id')
    workflow.connect(input_node, 'subject_ids', split_list_to_not_flip_images, 'all_id')
    workflow.connect(input_node, 'input_images', split_list_to_not_flip_images, 'list_data')

    split_list_to_flip_seg = pe.Node(interface=Function(function=split_list,
                                                    input_names=['including_id', 'all_id', 'list_data'],
                                                    output_names='extracted_list'),
                                 name='split_list_to_flip_seg')
    workflow.connect(input_node, 'flip_id', split_list_to_flip_seg, 'including_id')
    workflow.connect(input_node, 'subject_ids', split_list_to_flip_seg, 'all_id')
    workflow.connect(input_node, 'flip_seg', split_list_to_flip_seg, 'list_data')

    split_list_to_not_flip_seg = pe.Node(interface=Function(function=split_list,
                                                    input_names=['including_id', 'all_id', 'list_data'],
                                                    output_names='extracted_list'),
                                 name='split_list_to_not_flip_seg')
    workflow.connect(input_node, 'no_flip_id', split_list_to_not_flip_seg, 'including_id')
    workflow.connect(input_node, 'subject_ids', split_list_to_not_flip_seg, 'all_id')
    workflow.connect(input_node, 'no_flip_seg', split_list_to_not_flip_seg, 'list_data')

    # COMPUTING THE SYMMETRIC OF THE IMAGES BEFORE COMPUTING THE ATLAS
    swap_images = pe.MapNode(interface=SwapDimImage(), iterfield="image2reorient", name="swap_images")
    workflow.connect(split_list_to_flip_images, 'extracted_list', swap_images, 'image2reorient')
    swap_images.inputs.axe2flip = "LR"

    # create a list of LR swapped parcellations:
    swap_parcellations = pe.MapNode(interface=SwapDimImage(), iterfield="image2reorient", name="swap_parcellations")
    workflow.connect(split_list_to_flip_seg, 'extracted_list', swap_parcellations, 'image2reorient')
    swap_parcellations.inputs.axe2flip = "LR"

    # merge the lists
    merge_lists_images = pe.Node(interface=niu.Merge(axis='vstack', numinputs=2),
                                 name='merge_lists_images')
    workflow.connect(split_list_to_not_flip_images, 'extracted_list', merge_lists_images, 'in1')
    workflow.connect(swap_images, 'flipped_image', merge_lists_images, 'in2')

    merge_lists_seg = pe.Node(interface=niu.Merge(axis='vstack', numinputs=2),
                                 name='merge_lists_seg')
    workflow.connect(split_list_to_not_flip_seg, 'extracted_list', merge_lists_seg, 'in1')
    workflow.connect(swap_parcellations, 'flipped_image', merge_lists_seg, 'in2')

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             verbose=False,
                             name='groupwise')
    workflow.connect(merge_lists_images, 'out', groupwise, 'input_node.in_files')
    workflow.connect(input_node, 'input_ref', groupwise, 'input_node.ref_file')

    # Create the workflow to create the meshes in an average space
    meshes_workflow = create_binary_to_meshes(label=labels, reduction_rate=reduction_rate)
    workflow.connect(merge_lists_images, 'out', meshes_workflow, 'input_node.input_images')
    workflow.connect(merge_lists_seg, 'out', meshes_workflow, 'input_node.input_parcellations')
    workflow.connect(groupwise, 'output_node.trans_files', meshes_workflow, 'input_node.trans_files')
    workflow.connect(groupwise, 'output_node.average_image', meshes_workflow, 'input_node.ref_file')
    workflow.connect(meshes_workflow, 'output_node.output_meshes',
                     output_node, 'extracted_meshes')


    return workflow
def create_image_to_mesh_workflow(input_images,
                                  input_parcellations,
                                  input_label_id,
                                  result_dir,
                                  rigid_iteration=3,
                                  affine_iteration=3,
                                  reduction_rate=0.1,
                                  name='registrations_init'):
    # Create the workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = result_dir
    workflow.base_output_dir = name

    # Create a sub-workflow for groupwise registration
    groupwise = create_atlas(itr_rigid=rigid_iteration,
                             itr_affine=affine_iteration,
                             itr_non_lin=0,
                             name='groupwise')
    groupwise.inputs.input_node.in_files = input_images
    groupwise.inputs.input_node.ref_file = input_images[0]

    # Extract the relevant label from the GIF parcellation
    extract_label = pe.MapNode(interface=MergeLabels(),
                               iterfield=['in_file'],
                               name='extract_label')
    extract_label.iterables = ("roi_list", [[l] for l in input_label_id])
    extract_label.inputs.in_file = input_parcellations

    # Removing parasite segmentation: Erosion.
    erode_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='ero', operand_value=1),
                                iterfield=['in_file'],
                                name='erode_binaries')
    workflow.connect(extract_label, 'out_file', erode_binaries, 'in_file')

    # Removing parasite segmentation: Dilatation.
    dilate_binaries = pe.MapNode(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=1),
                                 iterfield=['in_file'],
                                 name='dilate_binaries')
    workflow.connect(erode_binaries, 'out_file', dilate_binaries, 'in_file')

    # Apply the relevant transformations to the roi
    apply_affine = pe.MapNode(interface=niftyreg.RegResample(inter_val='NN'),
                              iterfield=['flo_file', 'trans_file'],
                              name='apply_affine')
    workflow.connect(groupwise, 'output_node.trans_files', apply_affine,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', apply_affine,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', apply_affine, 'flo_file')

    # compute the large ROI that correspond to the union of all warped label
    extract_union_roi = pe.Node(interface=niftyreg.RegAverage(),
                                name='extract_union_roi')
    workflow.connect(apply_affine, 'res_file', extract_union_roi, 'in_files')

    # Binarise the average ROI
    binarise_roi = pe.Node(interface=niftyseg.UnaryMaths(operation='bin'),
                           name='binarise_roi')
    workflow.connect(extract_union_roi, 'out_file', binarise_roi, 'in_file')

    # Dilation of the binarise union ROI
    dilate_roi = pe.Node(interface=niftyseg.BinaryMathsInteger(
        operation='dil', operand_value=5),
                         name='dilate_roi')
    workflow.connect(binarise_roi, 'out_file', dilate_roi, 'in_file')

    # Apply the transformations
    apply_rigid_refinement = pe.MapNode(interface=niftyreg.RegAladin(
        rig_only_flag=True, ln_val=1),
                                        iterfield=['flo_file', 'in_aff_file'],
                                        name='apply_rigid_refinement')
    apply_rigid_refinement.inputs.flo_file = input_images
    workflow.connect(groupwise, 'output_node.average_image',
                     apply_rigid_refinement, 'ref_file')
    workflow.connect(groupwise, 'output_node.trans_files',
                     apply_rigid_refinement, 'in_aff_file')
    workflow.connect(dilate_roi, 'out_file', apply_rigid_refinement,
                     'rmask_file')

    # Extract the mesh corresponding to the label
    final_resampling = pe.MapNode(
        interface=niftyreg.RegResample(inter_val='NN'),
        iterfield=['flo_file', 'trans_file'],
        name='final_resampling')
    workflow.connect(apply_rigid_refinement, 'aff_file', final_resampling,
                     'trans_file')
    workflow.connect(groupwise, 'output_node.average_image', final_resampling,
                     'ref_file')
    workflow.connect(dilate_binaries, 'out_file', final_resampling, 'flo_file')

    # Extract the mesh corresponding to the label
    extract_mesh = pe.MapNode(
        interface=Image2VtkMesh(in_reductionRate=reduction_rate),
        iterfield=['in_file'],
        name='extract_mesh')
    workflow.connect(final_resampling, 'res_file', extract_mesh, 'in_file')
    # workflow.connect(apply_rigid_refinement, 'aff_file', extract_mesh, 'matrix_file')

    # Create a rename for the average image
    groupwise_renamer = pe.Node(interface=niu.Rename(format_string='atlas',
                                                     keep_ext=True),
                                name='groupwise_renamer')
    workflow.connect(groupwise, 'output_node.average_image', groupwise_renamer,
                     'in_file')

    # Create a datasink
    ds = pe.Node(nio.DataSink(parameterization=False), name='ds')
    ds.inputs.base_directory = result_dir
    workflow.connect(groupwise_renamer, 'out_file', ds, '@avg')
    workflow.connect(apply_rigid_refinement, 'res_file', ds, '@raf_mask')
    workflow.connect(extract_union_roi, 'out_file', ds, '@union_mask')
    workflow.connect(dilate_roi, 'out_file', ds, '@dilate_mask')
    workflow.connect(extract_mesh, 'out_file', ds, 'mesh_vtk')

    return workflow