Exemple #1
0
def brain_extraction(sub_id,
                     datasource,
                     sessions,
                     RESULT_DIR,
                     NIPYPE_CACHE,
                     reference,
                     t10=True):

    bet = nipype.MapNode(interface=HDBet(),
                         iterfield=['input_file'],
                         name='bet')
    bet.inputs.save_mask = 1
    bet.inputs.out_file = 'T1_preproc'

    if t10:
        bet_t10 = nipype.Node(interface=HDBet(), name='t1_0_bet')
        bet_t10.inputs.save_mask = 1
        bet_t10.inputs.out_file = 'T1_0_bet'

    datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR),
                           "datasink")

    substitutions = [('subid', sub_id)]
    for i, session in enumerate(sessions):

        substitutions += [('_bet{}/'.format(i), session + '/')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('brain_extraction_workflow',
                               base_dir=NIPYPE_CACHE)

    workflow.connect(datasource, 't1', bet, 'input_file')
    if t10:
        workflow.connect(datasource, 't1_0', bet_t10, 'input_file')
        workflow.connect(bet_t10, 'out_file', datasink,
                         'results.subid.T10.@T1_ref_bet')

    workflow.connect(bet, 'out_file', datasink, 'results.subid.@T1_preproc')
    workflow.connect(bet, 'out_mask', datasink, 'results.subid.@T1_mask')

    workflow = datasink_base(datasink,
                             datasource,
                             workflow,
                             sessions,
                             reference,
                             t10=t10)

    return workflow
Exemple #2
0
def longitudinal_registration(sub_id,
                              datasource,
                              sessions,
                              reference,
                              result_dir,
                              nipype_cache,
                              bet_workflow=None):
    """
    This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their 
    reference T1 image, in multiple time-points cohort. In particular, for each 
    subject, this workflow will register the MR images in each time-point (tp)
    to the corresponding T1, then it will register all the T1 images to a reference T1
    (the one that is the closest in time to the radiotherapy session), and finally the
    reference T1 to the BPLCT. At the end, all the MR images will be saved both in T1 space
    (for each tp) and in CT space.
    """
    reg2T1 = nipype.MapNode(interface=AntsRegSyn(),
                            iterfield=['input_file'],
                            name='reg2T1')
    reg2T1.inputs.transformation = 's'
    reg2T1.inputs.num_dimensions = 3
    reg2T1.inputs.num_threads = 6

    if reference:
        regT12CT = nipype.MapNode(interface=AntsRegSyn(),
                                  iterfield=['input_file'],
                                  name='regT12CT')
        regT12CT.inputs.transformation = 'r'
        regT12CT.inputs.num_dimensions = 3
        regT12CT.inputs.num_threads = 4

    reg_nodes = []
    for i in range(3):
        reg = nipype.MapNode(interface=AntsRegSyn(),
                             iterfield=['input_file', 'ref_file'],
                             name='ants_reg{}'.format(i))
        reg.inputs.transformation = 'r'
        reg.inputs.num_dimensions = 3
        reg.inputs.num_threads = 4
        reg.inputs.interpolation = 'BSpline'
        reg_nodes.append(reg)

    apply_mask_nodes = []
    for i in range(3):
        masking = nipype.MapNode(interface=ApplyMask(),
                                 iterfield=['in_file', 'mask_file'],
                                 name='masking{}'.format(i))
        apply_mask_nodes.append(masking)

    apply_ts_nodes = []
    for i in range(3):
        apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts{}'.format(i))
        apply_ts_nodes.append(apply_ts)
    # Apply ts nodes for T1_ref normalization
    apply_ts_nodes1 = []
    for i in range(3):
        apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts1{}'.format(i))
        apply_ts_nodes1.append(apply_ts)

    split_ds_nodes = []
    for i in range(4):
        split_ds = nipype.Node(interface=Split(), name='split_ds{}'.format(i))
        split_ds.inputs.splits = [1] * len(sessions)
        split_ds_nodes.append(split_ds)

    apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(),
                                 iterfield=['input_image', 'transforms'],
                                 name='apply_ts_t1')
    merge_nodes = []
    if reference:
        iterfields = ['in1', 'in2', 'in3', 'in4']
        iterfields_t1 = ['in1', 'in2', 'in3']
        if_0 = 2
    else:
        iterfields = ['in1', 'in2', 'in3']
        iterfields_t1 = ['in1', 'in2']
        if_0 = 1

    for i in range(3):
        merge = nipype.MapNode(interface=Merge(len(iterfields)),
                               iterfield=iterfields,
                               name='merge{}'.format(i))
        merge.inputs.ravel_inputs = True
        merge_nodes.append(merge)
    # Merging transforms for normalization to T1_ref
    merge_nodes1 = []
    for i in range(3):
        merge = nipype.MapNode(interface=Merge(3),
                               iterfield=['in1', 'in2', 'in3'],
                               name='merge1{}'.format(i))
        merge.inputs.ravel_inputs = True
        merge_nodes1.append(merge)

    merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                 iterfield=iterfields_t1,
                                 name='merge_t1')
    merge_ts_t1.inputs.ravel_inputs = True

    # have to create a fake merge of the transformation from t10 to CT in order
    # to have the same number if matrices as input in mapnode
    fake_merge = nipype.Node(interface=Merge(len(sessions)), name='fake_merge')

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('subid', sub_id)]
    for i, session in enumerate(sessions):
        substitutions += [('session'.format(i), session)]
        substitutions += [('_masking0{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'CT1_preproc.nii.gz')]
        substitutions += [('_reg2T1{}/antsreg0GenericAffine.mat'.format(i),
                           session + '/' + 'reg2T1_ref.mat')]
        substitutions += [('_reg2T1{}/antsreg1Warp.nii.gz'.format(i),
                           session + '/' + 'reg2T1_ref_warp.nii.gz')]
        substitutions += [('_reg2T1{}/antsregWarped.nii.gz'.format(i),
                           session + '/' + 'T1_reg2T1_ref.nii.gz')]
        substitutions += [('_regT12CT{}/antsreg0GenericAffine.mat'.format(i),
                           '/regT1_ref2CT.mat')]
        substitutions += [('_masking1{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'T2_preproc.nii.gz')]
        substitutions += [('_masking2{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'FLAIR_preproc.nii.gz')]
        substitutions += [('_apply_ts0{}/CT1_trans.nii.gz'.format(i),
                           session + '/' + 'CT1_reg2CT.nii.gz')]
        substitutions += [('_apply_ts1{}/T2_trans.nii.gz'.format(i),
                           session + '/' + 'T2_reg2CT.nii.gz')]
        substitutions += [('_apply_ts2{}/FLAIR_trans.nii.gz'.format(i),
                           session + '/' + 'FLAIR_reg2CT.nii.gz')]
        substitutions += [('_apply_ts_t1{}/T1_trans.nii.gz'.format(i),
                           session + '/' + 'T1_reg2CT.nii.gz')]
        substitutions += [('_apply_ts10{}/CT1_trans.nii.gz'.format(i),
                           session + '/' + 'CT1_reg2T1_ref.nii.gz')]
        substitutions += [('_apply_ts11{}/T2_trans.nii.gz'.format(i),
                           session + '/' + 'T2_reg2T1_ref.nii.gz')]
        substitutions += [('_apply_ts12{}/FLAIR_trans.nii.gz'.format(i),
                           session + '/' + 'FLAIR_reg2T1_ref.nii.gz')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache)

    for i, reg in enumerate(reg_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file')
        workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file')
    # bring every MR in CT space
    for i, node in enumerate(apply_ts_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image')
        if reference:
            workflow.connect(datasource, 'reference', node, 'reference_image')
        else:
            workflow.connect(datasource, 't1_0', node, 'reference_image')
        workflow.connect(merge_nodes[i], 'out', node, 'transforms')
        workflow.connect(node, 'output_image', datasink,
                         'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1]))
    # bring every MR in T1_ref space
    for i, node in enumerate(apply_ts_nodes1):
        workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image')
        workflow.connect(datasource, 't1_0', node, 'reference_image')
        workflow.connect(merge_nodes1[i], 'out', node, 'transforms')
        workflow.connect(
            node, 'output_image', datasink,
            'results.subid.@{}_reg2T1_ref'.format(SEQUENCES[i + 1]))

    for i, node in enumerate(merge_nodes):
        workflow.connect(reg_nodes[i], 'regmat', node, 'in{}'.format(if_0 + 2))
        workflow.connect(reg2T1, 'regmat', node, 'in{}'.format(if_0 + 1))
        workflow.connect(reg2T1, 'warp_file', node, 'in{}'.format(if_0))
        if reference:
            workflow.connect(fake_merge, 'out', node, 'in1')

    for i, node in enumerate(merge_nodes1):
        workflow.connect(reg_nodes[i], 'regmat', node, 'in3')
        workflow.connect(reg2T1, 'regmat', node, 'in2')
        workflow.connect(reg2T1, 'warp_file', node, 'in1')

    for i, mask in enumerate(apply_mask_nodes):
        workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file')
        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file')
        else:
            workflow.connect(datasource, 't1_mask', mask, 'mask_file')
        workflow.connect(mask, 'out_file', datasink,
                         'results.subid.@{}_preproc'.format(SEQUENCES[i + 1]))
    if bet_workflow is not None:
        workflow.connect(bet_workflow, 'bet.out_file', reg2T1, 'input_file')
        workflow.connect(bet_workflow, 't1_0_bet.out_file', reg2T1, 'ref_file')
    else:
        workflow.connect(datasource, 't1_bet', reg2T1, 'input_file')
        workflow.connect(datasource, 't1_0_bet', reg2T1, 'ref_file')

    if reference:
        for i, sess in enumerate(sessions):
            workflow.connect(regT12CT, 'regmat', fake_merge,
                             'in{}'.format(i + 1))
            workflow.connect(regT12CT, 'regmat', datasink,
                             'results.subid.{0}.@regT12CT_mat'.format(sess))
        workflow.connect(datasource, 'reference', regT12CT, 'ref_file')
        workflow.connect(datasource, 't1_0', regT12CT, 'input_file')
        workflow.connect(fake_merge, 'out', merge_ts_t1, 'in1')
        workflow.connect(datasource, 'reference', apply_ts_t1,
                         'reference_image')
    else:
        workflow.connect(datasource, 't1_0', apply_ts_t1, 'reference_image')

    workflow.connect(datasource, 't1', apply_ts_t1, 'input_image')

    workflow.connect(merge_ts_t1, 'out', apply_ts_t1, 'transforms')
    workflow.connect(reg2T1, 'regmat', merge_ts_t1, 'in{}'.format(if_0 + 1))
    workflow.connect(reg2T1, 'warp_file', merge_ts_t1, 'in{}'.format(if_0))

    workflow.connect(reg2T1, 'warp_file', datasink,
                     'results.subid.@reg2CT_warp')
    workflow.connect(reg2T1, 'regmat', datasink, 'results.subid.@reg2CT_mat')
    workflow.connect(reg2T1, 'reg_file', datasink, 'results.subid.@T12T1_ref')
    workflow.connect(apply_ts_t1, 'output_image', datasink,
                     'results.subid.@T1_reg2CT')

    if bet_workflow is not None:
        workflow = datasink_base(datasink, datasource, workflow, sessions,
                                 reference)
    else:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow,
                                 sessions,
                                 reference,
                                 extra_nodes=['t1_bet'])

    return workflow
Exemple #3
0
def tumor_segmentation(datasource,
                       sub_id,
                       sessions,
                       gtv_model,
                       tumor_model,
                       result_dir,
                       nipype_cache,
                       reference,
                       reg_workflow=None,
                       bet_workflow=None):

    if reg_workflow is None:
        if reference:
            iterfields_t1 = ['in1', 'in2', 'in3']
            if_0 = 2
        else:
            iterfields_t1 = ['in1', 'in2']
            if_0 = 1
        merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                     iterfield=iterfields_t1,
                                     name='merge_t1')
        merge_ts_t1.inputs.ravel_inputs = True

    apply_ts_gtv = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts_gtv')
    apply_ts_gtv.inputs.interpolation = 'NearestNeighbor'
    apply_ts_tumor = nipype.MapNode(interface=ApplyTransforms(),
                                    iterfield=['input_image', 'transforms'],
                                    name='apply_ts_tumor')
    apply_ts_tumor.inputs.interpolation = 'NearestNeighbor'
    apply_ts_tumor1 = nipype.MapNode(interface=ApplyTransforms(),
                                     iterfield=['input_image', 'transforms'],
                                     name='apply_ts_tumor1')
    apply_ts_tumor1.inputs.interpolation = 'NearestNeighbor'

    if reference:
        merge_ts_t1ref = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                        iterfield=['in1', 'in2'],
                                        name='merge_t1ref')
        merge_ts_t1ref.inputs.ravel_inputs = True
        apply_ts_gtv_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_gtv_t1ref')
        apply_ts_gtv_t1ref.inputs.interpolation = 'NearestNeighbor'
        apply_ts_tumor_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_tumor_t1ref')
        apply_ts_tumor_t1ref.inputs.interpolation = 'NearestNeighbor'
        apply_ts_tumor1_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_tumor1_t1ref')
        apply_ts_tumor1_t1ref.inputs.interpolation = 'NearestNeighbor'
        outname = 'reg2CT'
    else:
        outname = 'reg2T1ref'

    tumor_seg = nipype.MapNode(interface=HDGlioPredict(),
                               iterfield=['t1', 'ct1', 't2', 'flair'],
                               name='tumor_segmentation')
    tumor_seg.inputs.out_file = 'segmentation'

    mi = nipype.MapNode(Merge(2), iterfield=['in1', 'in2'], name='merge')

    gtv_seg_data_prep = nipype.MapNode(interface=NNUnetPreparation(),
                                       iterfield=['images'],
                                       name='gtv_seg_data_prep')

    gtv_seg = nipype.MapNode(interface=NNUnetInference(),
                             iterfield=['input_folder'],
                             name='gtv_segmentation')
    gtv_seg.inputs.model_folder = gtv_model

    tumor_seg_2mods = nipype.MapNode(interface=NNUnetInference(),
                                     iterfield=['input_folder'],
                                     name='tumor_seg_2mods')
    tumor_seg_2mods.inputs.model_folder = tumor_model

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('/segmentation.nii.gz', '/Tumor_predicted.nii.gz')]
    substitutions += [('subid', sub_id)]
    for i, session in enumerate(sessions):
        substitutions += [('_tumor_segmentation{}/'.format(i), session + '/')]
        substitutions += [('_gtv_segmentation{}/subject1'.format(i),
                           session + '/GTV_predicted')]
        substitutions += [('_tumor_seg_2mods{}/subject1'.format(i),
                           session + '/Tumor_predicted_2modalities')]
        substitutions += [
            ('_apply_ts_gtv{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'GTV_predicted_{}.nii.gz'.format(outname))
        ]
        substitutions += [
            ('_apply_ts_tumor1{}/subject1_trans.nii.gz'.format(i), session +
             '/' + 'Tumor_predicted_2modalities_{}.nii.gz'.format(outname))
        ]
        substitutions += [
            ('_apply_ts_tumor{}/segmentation_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_{}.nii.gz'.format(outname))
        ]

        substitutions += [
            ('_apply_ts_gtv_t1ref{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'GTV_predicted_reg2T1ref.nii.gz')
        ]
        substitutions += [
            ('_apply_ts_tumor1_t1ref{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_2modalities_reg2T1ref.nii.gz')
        ]
        substitutions += [
            ('_apply_ts_tumor_t1ref{}/segmentation_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_reg2T1ref.nii.gz')
        ]
    datasink.inputs.substitutions = substitutions

    # Create Workflow
    workflow = nipype.Workflow('tumor_segmentation_workflow',
                               base_dir=nipype_cache)

    # Connect from registration workflow, if provided
    if reg_workflow is not None:
        workflow.connect(reg_workflow, 'masking0.out_file', mi, 'in1')
        workflow.connect(reg_workflow, 'masking2.out_file', mi, 'in2')
        workflow.connect(reg_workflow, 'masking0.out_file', tumor_seg, 'ct1')
        workflow.connect(reg_workflow, 'masking1.out_file', tumor_seg, 't2')
        workflow.connect(reg_workflow, 'masking2.out_file', tumor_seg, 'flair')
        workflow.connect(bet_workflow, 'bet.out_file', tumor_seg, 't1')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor,
                         'transforms')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_gtv,
                         'transforms')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor1,
                         'transforms')
        if reference:
            workflow.connect(reg_workflow, 'reg2T1.regmat', merge_ts_t1ref,
                             'in2')
            workflow.connect(reg_workflow, 'reg2T1.warp_file', merge_ts_t1ref,
                             'in1')
    else:
        #         for i in range(len(sessions)):
        #             workflow.connect(datasource, 't12ct_mat', fake_merge,
        #                              'in{}'.format(i+1))
        workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1,
                         'in{}'.format(if_0 + 1))
        workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1,
                         'in{}'.format(if_0))
        if reference:
            workflow.connect(datasource, 't12ct_mat', merge_ts_t1, 'in1')
            workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1ref, 'in1')
            workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1ref, 'in2')
        workflow.connect(merge_ts_t1, 'out', apply_ts_tumor, 'transforms')
        workflow.connect(merge_ts_t1, 'out', apply_ts_gtv, 'transforms')
        workflow.connect(merge_ts_t1, 'out', apply_ts_tumor1, 'transforms')
        workflow.connect(datasource, 'ct1_preproc', mi, 'in1')
        workflow.connect(datasource, 'flair_preproc', mi, 'in2')
        workflow.connect(datasource, 'ct1_preproc', tumor_seg, 'ct1')
        workflow.connect(datasource, 't2_preproc', tumor_seg, 't2')
        workflow.connect(datasource, 'flair_preproc', tumor_seg, 'flair')
        workflow.connect(datasource, 't1_preproc', tumor_seg, 't1')

    # Connect from datasource
    if reference:
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor_t1ref,
                         'transforms')
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_gtv_t1ref,
                         'transforms')
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor1_t1ref,
                         'transforms')
        workflow.connect(datasource, 'reference', apply_ts_gtv,
                         'reference_image')
        workflow.connect(datasource, 'reference', apply_ts_tumor1,
                         'reference_image')
        workflow.connect(datasource, 'reference', apply_ts_tumor,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_gtv_t1ref,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor1_t1ref,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor_t1ref,
                         'reference_image')
    else:
        workflow.connect(datasource, 't1_0', apply_ts_gtv, 'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor1,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor, 'reference_image')

    # Connect other nodes

    # Nodes to prepare the data before nnUNet inference
    workflow.connect(mi, 'out', gtv_seg_data_prep, 'images')

    # Nodes to segment GTV and tumor using nnUNet
    workflow.connect(gtv_seg_data_prep, 'output_folder', gtv_seg,
                     'input_folder')
    workflow.connect(gtv_seg_data_prep, 'output_folder', tumor_seg_2mods,
                     'input_folder')

    # Nodes to normalize segmentations to CT space
    workflow.connect(gtv_seg, 'output_file', apply_ts_gtv, 'input_image')
    workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1,
                     'input_image')
    workflow.connect(tumor_seg, 'out_file', apply_ts_tumor, 'input_image')

    # Connect datasink nodes to save outputs
    workflow.connect(tumor_seg, 'out_file', datasink,
                     'results.subid.@tumor_seg')
    workflow.connect(gtv_seg, 'output_file', datasink,
                     'results.subid.@gtv_seg')
    workflow.connect(tumor_seg_2mods, 'output_file', datasink,
                     'results.subid.@tumor_seg_2mods')
    workflow.connect(apply_ts_gtv, 'output_image', datasink,
                     'results.subid.@gtv_reg2CT')
    workflow.connect(apply_ts_tumor, 'output_image', datasink,
                     'results.subid.@tumor_reg2CT')
    workflow.connect(apply_ts_tumor1, 'output_image', datasink,
                     'results.subid.@tumor1_reg2CT')
    if reference:
        workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1_t1ref,
                         'input_image')
        workflow.connect(tumor_seg, 'out_file', apply_ts_tumor_t1ref,
                         'input_image')
        workflow.connect(gtv_seg, 'output_file', apply_ts_gtv_t1ref,
                         'input_image')
        workflow.connect(apply_ts_gtv_t1ref, 'output_image', datasink,
                         'results.subid.@gtv_reg2T1ref')
        workflow.connect(apply_ts_tumor_t1ref, 'output_image', datasink,
                         'results.subid.@tumor_reg2T1ref')
        workflow.connect(apply_ts_tumor1_t1ref, 'output_image', datasink,
                         'results.subid.@tumor1_reg2T1ref')

    workflow = datasink_base(datasink, datasource, workflow, sessions,
                             reference)

    return workflow
Exemple #4
0
def single_tp_registration(sub_id,
                           datasource,
                           session,
                           reference,
                           result_dir,
                           nipype_cache,
                           bet_workflow=None):
    """
    This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their 
    reference T1 image, in one single time-point cohort. In particular, for each 
    subject, this workflow will register the MR images in the provided time-point (tp)
    to the corresponding T1, then it will register the T1 image to the BPLCT (if present)'
    '. At the end, all the MR images will be saved both in T1 space and in CT space.
    """
    session = session[0]
    if reference:
        regT12CT = nipype.MapNode(interface=AntsRegSyn(),
                                  iterfield=['input_file'],
                                  name='regT12CT')
        regT12CT.inputs.transformation = 'r'
        regT12CT.inputs.num_dimensions = 3
        regT12CT.inputs.num_threads = 4

    reg_nodes = []
    for i in range(3):
        reg = nipype.MapNode(interface=AntsRegSyn(),
                             iterfield=['input_file', 'ref_file'],
                             name='ants_reg{}'.format(i))
        reg.inputs.transformation = 'r'
        reg.inputs.num_dimensions = 3
        reg.inputs.num_threads = 4
        reg.inputs.interpolation = 'BSpline'
        reg_nodes.append(reg)

    apply_mask_nodes = []
    for i in range(3):
        masking = nipype.MapNode(interface=ApplyMask(),
                                 iterfield=['in_file', 'mask_file'],
                                 name='masking{}'.format(i))
        apply_mask_nodes.append(masking)

    if reference:
        apply_ts_nodes = []
        for i in range(3):
            apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                      iterfield=['input_image', 'transforms'],
                                      name='apply_ts{}'.format(i))
            apply_ts_nodes.append(apply_ts)

        apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(),
                                     iterfield=['input_image', 'transforms'],
                                     name='apply_ts_t1')

        merge_nodes = []
        for i in range(3):
            merge = nipype.MapNode(interface=Merge(2),
                                   iterfield=['in1', 'in2'],
                                   name='merge{}'.format(i))
            merge.inputs.ravel_inputs = True
            merge_nodes.append(merge)

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('subid', sub_id)]
    substitutions += [('session', session)]
    substitutions += [('_regT12CT0/antsreg0GenericAffine.mat',
                       '/reg2T1_ref.mat')]
    substitutions += [('_masking00/antsregWarped_masked.nii.gz',
                       session + '/' + 'CT1_preproc.nii.gz')]
    substitutions += [('_regT12CT/antsreg0GenericAffine.mat',
                       '/regT1_ref2CT.mat')]
    substitutions += [('_masking10/antsregWarped_masked.nii.gz',
                       session + '/' + 'T2_preproc.nii.gz')]
    substitutions += [('_masking20/antsregWarped_masked.nii.gz',
                       session + '/' + 'FLAIR_preproc.nii.gz')]
    substitutions += [('_apply_ts00/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'CT1_reg2CT.nii.gz')]
    substitutions += [('_apply_ts10/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'T2_reg2CT.nii.gz')]
    substitutions += [('_apply_ts20/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'FLAIR_reg2CT.nii.gz')]
    substitutions += [('_apply_ts_t10/T1_preproc_trans.nii.gz',
                       session + '/' + 'T1_reg2CT.nii.gz')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache)

    for i, reg in enumerate(reg_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file')
        workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file')
    # bring every MR in CT space
    if reference:
        for i, node in enumerate(merge_nodes):
            workflow.connect(reg_nodes[i], 'regmat', node, 'in2')
            workflow.connect(regT12CT, 'regmat', node, 'in1')
        for i, node in enumerate(apply_ts_nodes):
            workflow.connect(apply_mask_nodes[i], 'out_file', node,
                             'input_image')
            workflow.connect(datasource, 'reference', node, 'reference_image')
            workflow.connect(regT12CT, 'regmat', node, 'transforms')
            workflow.connect(
                node, 'output_image', datasink,
                'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1]))

        workflow.connect(regT12CT, 'regmat', datasink,
                         'results.subid.{0}.@regT12CT_mat'.format(session))
        workflow.connect(datasource, 'reference', regT12CT, 'ref_file')
        workflow.connect(datasource, 't1', regT12CT, 'input_file')

        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_file', apply_ts_t1,
                             'input_image')
        else:
            workflow.connect(datasource, 't1_bet', apply_ts_t1, 'input_image')
        workflow.connect(datasource, 'reference', apply_ts_t1,
                         'reference_image')
        workflow.connect(apply_ts_t1, 'output_image', datasink,
                         'results.subid.@T1_reg2CT')
        workflow.connect(regT12CT, 'regmat', apply_ts_t1, 'transforms')

    for i, mask in enumerate(apply_mask_nodes):
        workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file')
        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file')
        else:
            workflow.connect(datasource, 't1_mask', mask, 'mask_file')
        workflow.connect(mask, 'out_file', datasink,
                         'results.subid.@{}_preproc'.format(SEQUENCES[i + 1]))

    if bet_workflow is not None:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow, [session],
                                 reference,
                                 t10=False)
    else:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow, [session],
                                 reference,
                                 extra_nodes=['t1_bet'],
                                 t10=False)

    return workflow