Exemple #1
0
    def workflow(self):

        self.datasource()
        datasource = self.data_source
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id
        regex = self.regex
        roi_selection = self.roi_selection
        if datasource is not None:

            workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)
        
            datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
            substitutions = [('subid', sub_id)]
            substitutions += [('results/', '{}/'.format(self.workflow_name))]
    
            ss_convert = nipype.MapNode(interface=RTStructureCoverter(),
                                       iterfield=['reference_ct', 'input_ss'],
                                       name='ss_convert')
            mha_convert = nipype.MapNode(interface=MHA2NIIConverter(),
                                         iterfield=['input_folder'],
                                         name='mha_convert')
            
            if roi_selection:
                select = nipype.MapNode(interface=CheckRTStructures(),
                                        iterfield=['rois', 'dose_file'],
                                        name='select_gtv')
                workflow.connect(mha_convert, 'out_files', select, 'rois')
                workflow.connect(datasource, 'rt_dose', select, 'dose_file')
                workflow.connect(select, 'checked_roi', datasink,
                                 'results.subid.@masks')
            else:
                workflow.connect(mha_convert, 'out_files', datasink,
                                 'results.subid.@masks')

            for i, session in enumerate(self.rt['session']):
                substitutions += [(('_select_gtv{}/'.format(i), session+'/'))]
                substitutions += [(('_voxelizer{}/'.format(i), session+'/'))]
                substitutions += [(('_mha_convert{}/'.format(i), session+'/'))]

            datasink.inputs.substitutions =substitutions
        
            workflow.connect(datasource, 'rtct_nifti', ss_convert, 'reference_ct')
            workflow.connect(datasource, 'rts_dcm', ss_convert, 'input_ss')
            workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder')
    
            workflow = self.datasink(workflow, datasink)
        else:
            workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)

        return workflow
Exemple #2
0
def brain_extraction(sub_id,
                     datasource,
                     sessions,
                     RESULT_DIR,
                     NIPYPE_CACHE,
                     reference,
                     t10=True):

    bet = nipype.MapNode(interface=HDBet(),
                         iterfield=['input_file'],
                         name='bet')
    bet.inputs.save_mask = 1
    bet.inputs.out_file = 'T1_preproc'

    if t10:
        bet_t10 = nipype.Node(interface=HDBet(), name='t1_0_bet')
        bet_t10.inputs.save_mask = 1
        bet_t10.inputs.out_file = 'T1_0_bet'

    datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR),
                           "datasink")

    substitutions = [('subid', sub_id)]
    for i, session in enumerate(sessions):

        substitutions += [('_bet{}/'.format(i), session + '/')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('brain_extraction_workflow',
                               base_dir=NIPYPE_CACHE)

    workflow.connect(datasource, 't1', bet, 'input_file')
    if t10:
        workflow.connect(datasource, 't1_0', bet_t10, 'input_file')
        workflow.connect(bet_t10, 'out_file', datasink,
                         'results.subid.T10.@T1_ref_bet')

    workflow.connect(bet, 'out_file', datasink, 'results.subid.@T1_preproc')
    workflow.connect(bet, 'out_mask', datasink, 'results.subid.@T1_mask')

    workflow = datasink_base(datasink,
                             datasource,
                             workflow,
                             sessions,
                             reference,
                             t10=t10)

    return workflow
            datasource.inputs.base_directory = BASE_DIR
            datasource.inputs.template = '*'
            datasource.inputs.sort_filelist = True
            datasource.inputs.field_template = dict(
                reference='%s/%s/%s.nii.gz', to_reg='%s/%s/%s.nii.gz')
            datasource.inputs.template_args = dict(
                to_reg=[['sub_id', 'sessions', 'contrasts']],
                reference=[['sub_id', 'ref_tp', 'contrasts']])
            datasource.inputs.raise_on_empty = False
            datasource.inputs.contrasts = contrast
            datasource.inputs.sub_id = sub.split('/')[-1]
            datasource.inputs.sessions = sessions
            datasource.inputs.ref_tp = ref_tp

            rf_1 = nipype.MapNode(interface=fsl.RobustFOV(),
                                  iterfield=['in_file'],
                                  name='rf_1')
            rf_ref = nipype.Node(interface=fsl.RobustFOV(), name='rf_ref')

            bet_1 = nipype.MapNode(interface=HDBet(),
                                   iterfield=['input_file'],
                                   name='bet_1')
            bet_1.inputs.save_mask = 1
            bet_1.inputs.out_file = '{}_bet'.format(contrast)
            bet_ref = nipype.Node(interface=HDBet(), name='bet_ref')
            bet_ref.inputs.save_mask = 1
            bet_ref.inputs.out_file = '{}_bet'.format(contrast)

            datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR),
                                   "datasink")
            substitutions = [('contrast', contrast),
                interface=nipype.DataGrabber(infields=['contrasts', 'sub_id', 'sessions', 'ref_tp'],
                                             outfields=['reference', 'to_reg']), name='datasource')
            datasource.inputs.base_directory = base_dir
            datasource.inputs.template = '*'
            datasource.inputs.sort_filelist = True
            datasource.inputs.field_template = dict(reference='%s/%s/%sCT.nii.gz',
                                                    to_reg='%s/%s/%s.nii.gz')
            datasource.inputs.template_args = dict(to_reg=[['sub_id', 'sessions', 'contrasts']],
                                                   reference=[['sub_id', 'ref_tp', '']])
            datasource.inputs.raise_on_empty = False
            datasource.inputs.contrasts = contrast
            datasource.inputs.sub_id = sub
            datasource.inputs.sessions = sessions
            datasource.inputs.ref_tp = ref_tp

            reg = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='ants_reg')
            reg.inputs.transformation = 'r'
            reg.inputs.num_dimensions = 3
            reg.inputs.num_threads = 4

            datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
            substitutions = [('contrast', contrast), ('sub', sub)]
            for i, session in enumerate(sessions):
                substitutions += [('_ants_reg{}/'.format(i), session+'/')]
            datasink.inputs.substitutions =substitutions

            workflow = nipype.Workflow('registration_workflow', base_dir=cache_dir)
            workflow.connect(datasource, 'reference', reg, 'ref_file')
            workflow.connect(datasource, 'to_reg', reg, 'input_file')
            workflow.connect(reg, 'reg_file', datasink, 'registration.contrast.sub.@reg_image')
            workflow.connect(reg, 'regmat', datasink, 'registration.contrast.sub.@affine_mat')
Exemple #5
0
datasource.inputs.field_template = dict(directory='*/*/%s/1-*')

inputnode_rt = nipype.Node(
    interface=util.IdentityInterface(fields=['rt_files']),
    name='inputnode_rt')
inputnode_rt.iterables = ('rt_files', rt_files)

datasource_rt = nipype.Node(
    interface=nipype.DataGrabber(infields=['rt_files'], outfields=['directory']),
    name='datasource_rt')
datasource_rt.inputs.base_directory = base_dir
datasource_rt.inputs.template = '*'
datasource_rt.inputs.sort_filelist = True
datasource_rt.inputs.field_template = dict(directory='*/*/%s/1-*')

dc_rt = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc_rt')
dc_rt.inputs.working_dir = result_dir

dc = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc')
dc.inputs.working_dir = result_dir

converter = nipype.MapNode(interface=Dcm2niix(),
                           iterfield=['source_dir', 'out_filename', 'output_dir'],
                           name='converter')
converter.inputs.compress = 'y'
converter.inputs.philips_float = False
converter.inputs.merge_imgs = True

check = nipype.MapNode(interface=ConversionCheck(),
                       iterfield=['in_file', 'file_name'],
                       name='check_conversion')
            to_reg=[['sub_id', 'sessions', 'contrasts']],
            reference=[['sub_id', 'ref_tp', 'contrasts']])
        datasource.inputs.raise_on_empty = False
        datasource.inputs.contrasts = contrast
        datasource.inputs.sub_id = sub.split('/')[-1]
        datasource.inputs.sessions = sessions
        datasource.inputs.ref_tp = ref_tp

        rs_ref = nipype.Node(interface=ResampleImage(), name='rs_ref')
        rs_ref.inputs.new_size = '1x1x1'
        rs_ref.inputs.mode = 0
        rs_ref.inputs.interpolation = 0
        rs_ref.inputs.dimensions = 3

        merge_1 = nipype.MapNode(interface=Merge(2),
                                 iterfield=['in1', 'in2'],
                                 name='merge_1')
        merge_1.inputs.ravel_inputs = True

        split_1 = nipype.MapNode(interface=Split(),
                                 iterfield=['inlist'],
                                 name='split_1')
        split_1.inputs.squeeze = True
        split_1.inputs.splits = [1, 2]

        fast_1 = nipype.MapNode(interface=fsl.FAST(),
                                iterfield=['in_files'],
                                name='fast_1')
        fast_1.inputs.img_type = 1
        fast_1.inputs.segments = True
sub_list = creste_sub_list(base_dir)

datasource = nipype.Node(interface=nipype.DataGrabber(
    infields=['sub_id'], outfields=['ct', 'rtstruct']),
                         name='datasource')
datasource.inputs.base_directory = base_dir
datasource.inputs.template = '*'
datasource.inputs.sort_filelist = True
datasource.inputs.field_template = dict(ct='%s/CT.nii.gz',
                                        rtstruct='%s/RTSTRUCT/*.dcm')
datasource.inputs.template_args = dict(ct=[['sub_id']], rtstruct=[['sub_id']])
datasource.inputs.raise_on_empty = False
datasource.inputs.sub_id = sub_list

voxelizer = nipype.MapNode(interface=Voxelizer(),
                           iterfield=['reference', 'struct_file'],
                           name='voxelizer')
voxelizer.inputs.regular_expression = '.*PTV.*'
voxelizer.inputs.multi_structs = True
voxelizer.inputs.binarization = True

features = nipype.MapNode(interface=CLGlobalFeatures(),
                          iterfield=['in_file', 'mask'],
                          name='features_extraction')
features.inputs.first_order = True
features.inputs.cooccurence = True
features.inputs.run_length = True
features.inputs.int_vol_hist = True
features.inputs.local_intensity = True
features.inputs.volume = True
features.inputs.id = True
Exemple #8
0
def longitudinal_registration(sub_id,
                              datasource,
                              sessions,
                              reference,
                              result_dir,
                              nipype_cache,
                              bet_workflow=None):
    """
    This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their 
    reference T1 image, in multiple time-points cohort. In particular, for each 
    subject, this workflow will register the MR images in each time-point (tp)
    to the corresponding T1, then it will register all the T1 images to a reference T1
    (the one that is the closest in time to the radiotherapy session), and finally the
    reference T1 to the BPLCT. At the end, all the MR images will be saved both in T1 space
    (for each tp) and in CT space.
    """
    reg2T1 = nipype.MapNode(interface=AntsRegSyn(),
                            iterfield=['input_file'],
                            name='reg2T1')
    reg2T1.inputs.transformation = 's'
    reg2T1.inputs.num_dimensions = 3
    reg2T1.inputs.num_threads = 6

    if reference:
        regT12CT = nipype.MapNode(interface=AntsRegSyn(),
                                  iterfield=['input_file'],
                                  name='regT12CT')
        regT12CT.inputs.transformation = 'r'
        regT12CT.inputs.num_dimensions = 3
        regT12CT.inputs.num_threads = 4

    reg_nodes = []
    for i in range(3):
        reg = nipype.MapNode(interface=AntsRegSyn(),
                             iterfield=['input_file', 'ref_file'],
                             name='ants_reg{}'.format(i))
        reg.inputs.transformation = 'r'
        reg.inputs.num_dimensions = 3
        reg.inputs.num_threads = 4
        reg.inputs.interpolation = 'BSpline'
        reg_nodes.append(reg)

    apply_mask_nodes = []
    for i in range(3):
        masking = nipype.MapNode(interface=ApplyMask(),
                                 iterfield=['in_file', 'mask_file'],
                                 name='masking{}'.format(i))
        apply_mask_nodes.append(masking)

    apply_ts_nodes = []
    for i in range(3):
        apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts{}'.format(i))
        apply_ts_nodes.append(apply_ts)
    # Apply ts nodes for T1_ref normalization
    apply_ts_nodes1 = []
    for i in range(3):
        apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts1{}'.format(i))
        apply_ts_nodes1.append(apply_ts)

    split_ds_nodes = []
    for i in range(4):
        split_ds = nipype.Node(interface=Split(), name='split_ds{}'.format(i))
        split_ds.inputs.splits = [1] * len(sessions)
        split_ds_nodes.append(split_ds)

    apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(),
                                 iterfield=['input_image', 'transforms'],
                                 name='apply_ts_t1')
    merge_nodes = []
    if reference:
        iterfields = ['in1', 'in2', 'in3', 'in4']
        iterfields_t1 = ['in1', 'in2', 'in3']
        if_0 = 2
    else:
        iterfields = ['in1', 'in2', 'in3']
        iterfields_t1 = ['in1', 'in2']
        if_0 = 1

    for i in range(3):
        merge = nipype.MapNode(interface=Merge(len(iterfields)),
                               iterfield=iterfields,
                               name='merge{}'.format(i))
        merge.inputs.ravel_inputs = True
        merge_nodes.append(merge)
    # Merging transforms for normalization to T1_ref
    merge_nodes1 = []
    for i in range(3):
        merge = nipype.MapNode(interface=Merge(3),
                               iterfield=['in1', 'in2', 'in3'],
                               name='merge1{}'.format(i))
        merge.inputs.ravel_inputs = True
        merge_nodes1.append(merge)

    merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                 iterfield=iterfields_t1,
                                 name='merge_t1')
    merge_ts_t1.inputs.ravel_inputs = True

    # have to create a fake merge of the transformation from t10 to CT in order
    # to have the same number if matrices as input in mapnode
    fake_merge = nipype.Node(interface=Merge(len(sessions)), name='fake_merge')

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('subid', sub_id)]
    for i, session in enumerate(sessions):
        substitutions += [('session'.format(i), session)]
        substitutions += [('_masking0{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'CT1_preproc.nii.gz')]
        substitutions += [('_reg2T1{}/antsreg0GenericAffine.mat'.format(i),
                           session + '/' + 'reg2T1_ref.mat')]
        substitutions += [('_reg2T1{}/antsreg1Warp.nii.gz'.format(i),
                           session + '/' + 'reg2T1_ref_warp.nii.gz')]
        substitutions += [('_reg2T1{}/antsregWarped.nii.gz'.format(i),
                           session + '/' + 'T1_reg2T1_ref.nii.gz')]
        substitutions += [('_regT12CT{}/antsreg0GenericAffine.mat'.format(i),
                           '/regT1_ref2CT.mat')]
        substitutions += [('_masking1{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'T2_preproc.nii.gz')]
        substitutions += [('_masking2{}/antsregWarped_masked.nii.gz'.format(i),
                           session + '/' + 'FLAIR_preproc.nii.gz')]
        substitutions += [('_apply_ts0{}/CT1_trans.nii.gz'.format(i),
                           session + '/' + 'CT1_reg2CT.nii.gz')]
        substitutions += [('_apply_ts1{}/T2_trans.nii.gz'.format(i),
                           session + '/' + 'T2_reg2CT.nii.gz')]
        substitutions += [('_apply_ts2{}/FLAIR_trans.nii.gz'.format(i),
                           session + '/' + 'FLAIR_reg2CT.nii.gz')]
        substitutions += [('_apply_ts_t1{}/T1_trans.nii.gz'.format(i),
                           session + '/' + 'T1_reg2CT.nii.gz')]
        substitutions += [('_apply_ts10{}/CT1_trans.nii.gz'.format(i),
                           session + '/' + 'CT1_reg2T1_ref.nii.gz')]
        substitutions += [('_apply_ts11{}/T2_trans.nii.gz'.format(i),
                           session + '/' + 'T2_reg2T1_ref.nii.gz')]
        substitutions += [('_apply_ts12{}/FLAIR_trans.nii.gz'.format(i),
                           session + '/' + 'FLAIR_reg2T1_ref.nii.gz')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache)

    for i, reg in enumerate(reg_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file')
        workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file')
    # bring every MR in CT space
    for i, node in enumerate(apply_ts_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image')
        if reference:
            workflow.connect(datasource, 'reference', node, 'reference_image')
        else:
            workflow.connect(datasource, 't1_0', node, 'reference_image')
        workflow.connect(merge_nodes[i], 'out', node, 'transforms')
        workflow.connect(node, 'output_image', datasink,
                         'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1]))
    # bring every MR in T1_ref space
    for i, node in enumerate(apply_ts_nodes1):
        workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image')
        workflow.connect(datasource, 't1_0', node, 'reference_image')
        workflow.connect(merge_nodes1[i], 'out', node, 'transforms')
        workflow.connect(
            node, 'output_image', datasink,
            'results.subid.@{}_reg2T1_ref'.format(SEQUENCES[i + 1]))

    for i, node in enumerate(merge_nodes):
        workflow.connect(reg_nodes[i], 'regmat', node, 'in{}'.format(if_0 + 2))
        workflow.connect(reg2T1, 'regmat', node, 'in{}'.format(if_0 + 1))
        workflow.connect(reg2T1, 'warp_file', node, 'in{}'.format(if_0))
        if reference:
            workflow.connect(fake_merge, 'out', node, 'in1')

    for i, node in enumerate(merge_nodes1):
        workflow.connect(reg_nodes[i], 'regmat', node, 'in3')
        workflow.connect(reg2T1, 'regmat', node, 'in2')
        workflow.connect(reg2T1, 'warp_file', node, 'in1')

    for i, mask in enumerate(apply_mask_nodes):
        workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file')
        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file')
        else:
            workflow.connect(datasource, 't1_mask', mask, 'mask_file')
        workflow.connect(mask, 'out_file', datasink,
                         'results.subid.@{}_preproc'.format(SEQUENCES[i + 1]))
    if bet_workflow is not None:
        workflow.connect(bet_workflow, 'bet.out_file', reg2T1, 'input_file')
        workflow.connect(bet_workflow, 't1_0_bet.out_file', reg2T1, 'ref_file')
    else:
        workflow.connect(datasource, 't1_bet', reg2T1, 'input_file')
        workflow.connect(datasource, 't1_0_bet', reg2T1, 'ref_file')

    if reference:
        for i, sess in enumerate(sessions):
            workflow.connect(regT12CT, 'regmat', fake_merge,
                             'in{}'.format(i + 1))
            workflow.connect(regT12CT, 'regmat', datasink,
                             'results.subid.{0}.@regT12CT_mat'.format(sess))
        workflow.connect(datasource, 'reference', regT12CT, 'ref_file')
        workflow.connect(datasource, 't1_0', regT12CT, 'input_file')
        workflow.connect(fake_merge, 'out', merge_ts_t1, 'in1')
        workflow.connect(datasource, 'reference', apply_ts_t1,
                         'reference_image')
    else:
        workflow.connect(datasource, 't1_0', apply_ts_t1, 'reference_image')

    workflow.connect(datasource, 't1', apply_ts_t1, 'input_image')

    workflow.connect(merge_ts_t1, 'out', apply_ts_t1, 'transforms')
    workflow.connect(reg2T1, 'regmat', merge_ts_t1, 'in{}'.format(if_0 + 1))
    workflow.connect(reg2T1, 'warp_file', merge_ts_t1, 'in{}'.format(if_0))

    workflow.connect(reg2T1, 'warp_file', datasink,
                     'results.subid.@reg2CT_warp')
    workflow.connect(reg2T1, 'regmat', datasink, 'results.subid.@reg2CT_mat')
    workflow.connect(reg2T1, 'reg_file', datasink, 'results.subid.@T12T1_ref')
    workflow.connect(apply_ts_t1, 'output_image', datasink,
                     'results.subid.@T1_reg2CT')

    if bet_workflow is not None:
        workflow = datasink_base(datasink, datasource, workflow, sessions,
                                 reference)
    else:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow,
                                 sessions,
                                 reference,
                                 extra_nodes=['t1_bet'])

    return workflow
Exemple #9
0
def tumor_segmentation(datasource,
                       sub_id,
                       sessions,
                       gtv_model,
                       tumor_model,
                       result_dir,
                       nipype_cache,
                       reference,
                       reg_workflow=None,
                       bet_workflow=None):

    if reg_workflow is None:
        if reference:
            iterfields_t1 = ['in1', 'in2', 'in3']
            if_0 = 2
        else:
            iterfields_t1 = ['in1', 'in2']
            if_0 = 1
        merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                     iterfield=iterfields_t1,
                                     name='merge_t1')
        merge_ts_t1.inputs.ravel_inputs = True

    apply_ts_gtv = nipype.MapNode(interface=ApplyTransforms(),
                                  iterfield=['input_image', 'transforms'],
                                  name='apply_ts_gtv')
    apply_ts_gtv.inputs.interpolation = 'NearestNeighbor'
    apply_ts_tumor = nipype.MapNode(interface=ApplyTransforms(),
                                    iterfield=['input_image', 'transforms'],
                                    name='apply_ts_tumor')
    apply_ts_tumor.inputs.interpolation = 'NearestNeighbor'
    apply_ts_tumor1 = nipype.MapNode(interface=ApplyTransforms(),
                                     iterfield=['input_image', 'transforms'],
                                     name='apply_ts_tumor1')
    apply_ts_tumor1.inputs.interpolation = 'NearestNeighbor'

    if reference:
        merge_ts_t1ref = nipype.MapNode(interface=Merge(len(iterfields_t1)),
                                        iterfield=['in1', 'in2'],
                                        name='merge_t1ref')
        merge_ts_t1ref.inputs.ravel_inputs = True
        apply_ts_gtv_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_gtv_t1ref')
        apply_ts_gtv_t1ref.inputs.interpolation = 'NearestNeighbor'
        apply_ts_tumor_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_tumor_t1ref')
        apply_ts_tumor_t1ref.inputs.interpolation = 'NearestNeighbor'
        apply_ts_tumor1_t1ref = nipype.MapNode(
            interface=ApplyTransforms(),
            iterfield=['input_image', 'transforms'],
            name='apply_ts_tumor1_t1ref')
        apply_ts_tumor1_t1ref.inputs.interpolation = 'NearestNeighbor'
        outname = 'reg2CT'
    else:
        outname = 'reg2T1ref'

    tumor_seg = nipype.MapNode(interface=HDGlioPredict(),
                               iterfield=['t1', 'ct1', 't2', 'flair'],
                               name='tumor_segmentation')
    tumor_seg.inputs.out_file = 'segmentation'

    mi = nipype.MapNode(Merge(2), iterfield=['in1', 'in2'], name='merge')

    gtv_seg_data_prep = nipype.MapNode(interface=NNUnetPreparation(),
                                       iterfield=['images'],
                                       name='gtv_seg_data_prep')

    gtv_seg = nipype.MapNode(interface=NNUnetInference(),
                             iterfield=['input_folder'],
                             name='gtv_segmentation')
    gtv_seg.inputs.model_folder = gtv_model

    tumor_seg_2mods = nipype.MapNode(interface=NNUnetInference(),
                                     iterfield=['input_folder'],
                                     name='tumor_seg_2mods')
    tumor_seg_2mods.inputs.model_folder = tumor_model

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('/segmentation.nii.gz', '/Tumor_predicted.nii.gz')]
    substitutions += [('subid', sub_id)]
    for i, session in enumerate(sessions):
        substitutions += [('_tumor_segmentation{}/'.format(i), session + '/')]
        substitutions += [('_gtv_segmentation{}/subject1'.format(i),
                           session + '/GTV_predicted')]
        substitutions += [('_tumor_seg_2mods{}/subject1'.format(i),
                           session + '/Tumor_predicted_2modalities')]
        substitutions += [
            ('_apply_ts_gtv{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'GTV_predicted_{}.nii.gz'.format(outname))
        ]
        substitutions += [
            ('_apply_ts_tumor1{}/subject1_trans.nii.gz'.format(i), session +
             '/' + 'Tumor_predicted_2modalities_{}.nii.gz'.format(outname))
        ]
        substitutions += [
            ('_apply_ts_tumor{}/segmentation_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_{}.nii.gz'.format(outname))
        ]

        substitutions += [
            ('_apply_ts_gtv_t1ref{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'GTV_predicted_reg2T1ref.nii.gz')
        ]
        substitutions += [
            ('_apply_ts_tumor1_t1ref{}/subject1_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_2modalities_reg2T1ref.nii.gz')
        ]
        substitutions += [
            ('_apply_ts_tumor_t1ref{}/segmentation_trans.nii.gz'.format(i),
             session + '/' + 'Tumor_predicted_reg2T1ref.nii.gz')
        ]
    datasink.inputs.substitutions = substitutions

    # Create Workflow
    workflow = nipype.Workflow('tumor_segmentation_workflow',
                               base_dir=nipype_cache)

    # Connect from registration workflow, if provided
    if reg_workflow is not None:
        workflow.connect(reg_workflow, 'masking0.out_file', mi, 'in1')
        workflow.connect(reg_workflow, 'masking2.out_file', mi, 'in2')
        workflow.connect(reg_workflow, 'masking0.out_file', tumor_seg, 'ct1')
        workflow.connect(reg_workflow, 'masking1.out_file', tumor_seg, 't2')
        workflow.connect(reg_workflow, 'masking2.out_file', tumor_seg, 'flair')
        workflow.connect(bet_workflow, 'bet.out_file', tumor_seg, 't1')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor,
                         'transforms')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_gtv,
                         'transforms')
        workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor1,
                         'transforms')
        if reference:
            workflow.connect(reg_workflow, 'reg2T1.regmat', merge_ts_t1ref,
                             'in2')
            workflow.connect(reg_workflow, 'reg2T1.warp_file', merge_ts_t1ref,
                             'in1')
    else:
        #         for i in range(len(sessions)):
        #             workflow.connect(datasource, 't12ct_mat', fake_merge,
        #                              'in{}'.format(i+1))
        workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1,
                         'in{}'.format(if_0 + 1))
        workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1,
                         'in{}'.format(if_0))
        if reference:
            workflow.connect(datasource, 't12ct_mat', merge_ts_t1, 'in1')
            workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1ref, 'in1')
            workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1ref, 'in2')
        workflow.connect(merge_ts_t1, 'out', apply_ts_tumor, 'transforms')
        workflow.connect(merge_ts_t1, 'out', apply_ts_gtv, 'transforms')
        workflow.connect(merge_ts_t1, 'out', apply_ts_tumor1, 'transforms')
        workflow.connect(datasource, 'ct1_preproc', mi, 'in1')
        workflow.connect(datasource, 'flair_preproc', mi, 'in2')
        workflow.connect(datasource, 'ct1_preproc', tumor_seg, 'ct1')
        workflow.connect(datasource, 't2_preproc', tumor_seg, 't2')
        workflow.connect(datasource, 'flair_preproc', tumor_seg, 'flair')
        workflow.connect(datasource, 't1_preproc', tumor_seg, 't1')

    # Connect from datasource
    if reference:
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor_t1ref,
                         'transforms')
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_gtv_t1ref,
                         'transforms')
        workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor1_t1ref,
                         'transforms')
        workflow.connect(datasource, 'reference', apply_ts_gtv,
                         'reference_image')
        workflow.connect(datasource, 'reference', apply_ts_tumor1,
                         'reference_image')
        workflow.connect(datasource, 'reference', apply_ts_tumor,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_gtv_t1ref,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor1_t1ref,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor_t1ref,
                         'reference_image')
    else:
        workflow.connect(datasource, 't1_0', apply_ts_gtv, 'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor1,
                         'reference_image')
        workflow.connect(datasource, 't1_0', apply_ts_tumor, 'reference_image')

    # Connect other nodes

    # Nodes to prepare the data before nnUNet inference
    workflow.connect(mi, 'out', gtv_seg_data_prep, 'images')

    # Nodes to segment GTV and tumor using nnUNet
    workflow.connect(gtv_seg_data_prep, 'output_folder', gtv_seg,
                     'input_folder')
    workflow.connect(gtv_seg_data_prep, 'output_folder', tumor_seg_2mods,
                     'input_folder')

    # Nodes to normalize segmentations to CT space
    workflow.connect(gtv_seg, 'output_file', apply_ts_gtv, 'input_image')
    workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1,
                     'input_image')
    workflow.connect(tumor_seg, 'out_file', apply_ts_tumor, 'input_image')

    # Connect datasink nodes to save outputs
    workflow.connect(tumor_seg, 'out_file', datasink,
                     'results.subid.@tumor_seg')
    workflow.connect(gtv_seg, 'output_file', datasink,
                     'results.subid.@gtv_seg')
    workflow.connect(tumor_seg_2mods, 'output_file', datasink,
                     'results.subid.@tumor_seg_2mods')
    workflow.connect(apply_ts_gtv, 'output_image', datasink,
                     'results.subid.@gtv_reg2CT')
    workflow.connect(apply_ts_tumor, 'output_image', datasink,
                     'results.subid.@tumor_reg2CT')
    workflow.connect(apply_ts_tumor1, 'output_image', datasink,
                     'results.subid.@tumor1_reg2CT')
    if reference:
        workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1_t1ref,
                         'input_image')
        workflow.connect(tumor_seg, 'out_file', apply_ts_tumor_t1ref,
                         'input_image')
        workflow.connect(gtv_seg, 'output_file', apply_ts_gtv_t1ref,
                         'input_image')
        workflow.connect(apply_ts_gtv_t1ref, 'output_image', datasink,
                         'results.subid.@gtv_reg2T1ref')
        workflow.connect(apply_ts_tumor_t1ref, 'output_image', datasink,
                         'results.subid.@tumor_reg2T1ref')
        workflow.connect(apply_ts_tumor1_t1ref, 'output_image', datasink,
                         'results.subid.@tumor1_reg2T1ref')

    workflow = datasink_base(datasink, datasource, workflow, sessions,
                             reference)

    return workflow
Exemple #10
0
def single_tp_registration(sub_id,
                           datasource,
                           session,
                           reference,
                           result_dir,
                           nipype_cache,
                           bet_workflow=None):
    """
    This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their 
    reference T1 image, in one single time-point cohort. In particular, for each 
    subject, this workflow will register the MR images in the provided time-point (tp)
    to the corresponding T1, then it will register the T1 image to the BPLCT (if present)'
    '. At the end, all the MR images will be saved both in T1 space and in CT space.
    """
    session = session[0]
    if reference:
        regT12CT = nipype.MapNode(interface=AntsRegSyn(),
                                  iterfield=['input_file'],
                                  name='regT12CT')
        regT12CT.inputs.transformation = 'r'
        regT12CT.inputs.num_dimensions = 3
        regT12CT.inputs.num_threads = 4

    reg_nodes = []
    for i in range(3):
        reg = nipype.MapNode(interface=AntsRegSyn(),
                             iterfield=['input_file', 'ref_file'],
                             name='ants_reg{}'.format(i))
        reg.inputs.transformation = 'r'
        reg.inputs.num_dimensions = 3
        reg.inputs.num_threads = 4
        reg.inputs.interpolation = 'BSpline'
        reg_nodes.append(reg)

    apply_mask_nodes = []
    for i in range(3):
        masking = nipype.MapNode(interface=ApplyMask(),
                                 iterfield=['in_file', 'mask_file'],
                                 name='masking{}'.format(i))
        apply_mask_nodes.append(masking)

    if reference:
        apply_ts_nodes = []
        for i in range(3):
            apply_ts = nipype.MapNode(interface=ApplyTransforms(),
                                      iterfield=['input_image', 'transforms'],
                                      name='apply_ts{}'.format(i))
            apply_ts_nodes.append(apply_ts)

        apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(),
                                     iterfield=['input_image', 'transforms'],
                                     name='apply_ts_t1')

        merge_nodes = []
        for i in range(3):
            merge = nipype.MapNode(interface=Merge(2),
                                   iterfield=['in1', 'in2'],
                                   name='merge{}'.format(i))
            merge.inputs.ravel_inputs = True
            merge_nodes.append(merge)

    datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                           "datasink")

    substitutions = [('subid', sub_id)]
    substitutions += [('session', session)]
    substitutions += [('_regT12CT0/antsreg0GenericAffine.mat',
                       '/reg2T1_ref.mat')]
    substitutions += [('_masking00/antsregWarped_masked.nii.gz',
                       session + '/' + 'CT1_preproc.nii.gz')]
    substitutions += [('_regT12CT/antsreg0GenericAffine.mat',
                       '/regT1_ref2CT.mat')]
    substitutions += [('_masking10/antsregWarped_masked.nii.gz',
                       session + '/' + 'T2_preproc.nii.gz')]
    substitutions += [('_masking20/antsregWarped_masked.nii.gz',
                       session + '/' + 'FLAIR_preproc.nii.gz')]
    substitutions += [('_apply_ts00/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'CT1_reg2CT.nii.gz')]
    substitutions += [('_apply_ts10/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'T2_reg2CT.nii.gz')]
    substitutions += [('_apply_ts20/antsregWarped_masked_trans.nii.gz',
                       session + '/' + 'FLAIR_reg2CT.nii.gz')]
    substitutions += [('_apply_ts_t10/T1_preproc_trans.nii.gz',
                       session + '/' + 'T1_reg2CT.nii.gz')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache)

    for i, reg in enumerate(reg_nodes):
        workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file')
        workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file')
    # bring every MR in CT space
    if reference:
        for i, node in enumerate(merge_nodes):
            workflow.connect(reg_nodes[i], 'regmat', node, 'in2')
            workflow.connect(regT12CT, 'regmat', node, 'in1')
        for i, node in enumerate(apply_ts_nodes):
            workflow.connect(apply_mask_nodes[i], 'out_file', node,
                             'input_image')
            workflow.connect(datasource, 'reference', node, 'reference_image')
            workflow.connect(regT12CT, 'regmat', node, 'transforms')
            workflow.connect(
                node, 'output_image', datasink,
                'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1]))

        workflow.connect(regT12CT, 'regmat', datasink,
                         'results.subid.{0}.@regT12CT_mat'.format(session))
        workflow.connect(datasource, 'reference', regT12CT, 'ref_file')
        workflow.connect(datasource, 't1', regT12CT, 'input_file')

        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_file', apply_ts_t1,
                             'input_image')
        else:
            workflow.connect(datasource, 't1_bet', apply_ts_t1, 'input_image')
        workflow.connect(datasource, 'reference', apply_ts_t1,
                         'reference_image')
        workflow.connect(apply_ts_t1, 'output_image', datasink,
                         'results.subid.@T1_reg2CT')
        workflow.connect(regT12CT, 'regmat', apply_ts_t1, 'transforms')

    for i, mask in enumerate(apply_mask_nodes):
        workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file')
        if bet_workflow is not None:
            workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file')
        else:
            workflow.connect(datasource, 't1_mask', mask, 'mask_file')
        workflow.connect(mask, 'out_file', datasink,
                         'results.subid.@{}_preproc'.format(SEQUENCES[i + 1]))

    if bet_workflow is not None:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow, [session],
                                 reference,
                                 t10=False)
    else:
        workflow = datasink_base(datasink,
                                 datasource,
                                 workflow, [session],
                                 reference,
                                 extra_nodes=['t1_bet'],
                                 t10=False)

    return workflow
Exemple #11
0
    def convertion_workflow(self):

        self.datasource()

        datasource = self.data_source
        ref_sequence = self.ref_sequence
        t10 = self.t10
        sub_id = self.sub_id
        result_dir = self.result_dir
        nipype_cache = self.nipype_cache
        sequences = self.sequences
        reference = self.reference
        rt_data = self.rt
        if rt_data is not None:
            rt_session = rt_data['session']

        workflow = nipype.Workflow('data_convertion_workflow',
                                   base_dir=nipype_cache)

        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]
        if type(ref_sequence) == list:
            to_convert = sequences + ref_sequence
        else:
            to_convert = sequences + [ref_sequence]
        if rt_data is not None:
            rt_sequences = [
                x for x in rt_data.keys()
                if rt_data[x] and x != 'session' and x != 'labels'
            ]
            workflow.connect(datasource, 'rt', datasink, 'results.subid.@rt')
            to_convert = to_convert + rt_sequences
        else:
            rt_sequences = []

        if reference:
            to_convert.append('reference')
        if t10:
            to_convert.append('t1_0')
        if self.ct_sessions:
            to_convert.append('ct')

        for seq in to_convert:
            if seq not in rt_sequences:
                dc = nipype.MapNode(interface=DicomCheck(),
                                    iterfield=['dicom_dir'],
                                    name='dc{}'.format(seq))
                workflow.connect(datasource, seq, dc, 'dicom_dir')
                converter = nipype.MapNode(
                    interface=Dcm2niix(),
                    iterfield=['source_dir', 'out_filename'],
                    name='converter{}'.format(seq))
                converter.inputs.compress = 'y'
                converter.inputs.philips_float = False
                if seq == 'reference' or seq == 'ct':
                    converter.inputs.merge_imgs = True
                else:
                    converter.inputs.merge_imgs = False
                check = nipype.MapNode(interface=ConversionCheck(),
                                       iterfield=['in_file', 'file_name'],
                                       name='check_conversion{}'.format(seq))

                workflow.connect(dc, 'outdir', converter, 'source_dir')
                workflow.connect(dc, 'scan_name', converter, 'out_filename')
                workflow.connect(dc, 'scan_name', check, 'file_name')
                workflow.connect(converter, 'converted_files', check,
                                 'in_file')
                if seq == 'reference':
                    workflow.connect(
                        check, 'out_file', datasink,
                        'results.subid.REF.@{}_converted'.format(seq))
                elif seq == 't1_0':
                    workflow.connect(
                        check, 'out_file', datasink,
                        'results.subid.T10.@{}_converted'.format(seq))
                else:
                    workflow.connect(check, 'out_file', datasink,
                                     'results.subid.@{}_converted'.format(seq))
                    for i, session in enumerate(self.session_names[seq]):
                        substitutions += [(('_converter{0}{1}/'.format(seq, i),
                                            session + '/'))]
            else:
                if seq != 'rtstruct':
                    if seq == 'rtct':
                        converter = nipype.MapNode(
                            interface=Dcm2niix(),
                            iterfield=['source_dir', 'out_filename'],
                            name='converter{}'.format(seq))
                        converter.inputs.compress = 'y'
                        converter.inputs.philips_float = False
                        converter.inputs.merge_imgs = True
                    else:
                        converter = nipype.MapNode(
                            interface=DoseConverter(),
                            iterfield=['input_dose', 'out_name'],
                            name='converter{}'.format(seq))
                    if seq == 'doses':
                        converter = nipype.MapNode(
                            interface=DoseConverter(),
                            iterfield=['input_dose'],
                            name='converter{}'.format(seq))
                        get_dose = nipype.MapNode(interface=GetRefRTDose(),
                                                  iterfield=['doses'],
                                                  name='get_doses')
                        workflow.connect(datasource, 'doses', get_dose,
                                         'doses')
                        workflow.connect(get_dose, 'dose_file', converter,
                                         'input_dose')
                        converter.inputs.out_name = 'Unused_RTDOSE.nii.gz'
                        workflow.connect(
                            converter, 'out_file', datasink,
                            'results.subid.@{}_converted'.format(seq))
                    else:
                        dc = nipype.MapNode(interface=DicomCheck(),
                                            iterfield=['dicom_dir'],
                                            name='dc{}'.format(seq))
                        workflow.connect(datasource, seq, dc, 'dicom_dir')
                        if seq == 'rtct':
                            check = nipype.MapNode(
                                interface=ConversionCheck(),
                                iterfield=['in_file', 'file_name'],
                                name='check_conversion{}'.format(seq))

                            workflow.connect(dc, 'outdir', converter,
                                             'source_dir')
                            workflow.connect(dc, 'scan_name', converter,
                                             'out_filename')
                            workflow.connect(dc, 'scan_name', check,
                                             'file_name')
                            workflow.connect(converter, 'converted_files',
                                             check, 'in_file')
                            workflow.connect(
                                check, 'out_file', datasink,
                                'results.subid.@{}_converted'.format(seq))
                        else:
                            workflow.connect(dc, 'dose_file', converter,
                                             'input_dose')
                            workflow.connect(dc, 'scan_name', converter,
                                             'out_name')
                            workflow.connect(
                                converter, 'out_file', datasink,
                                'results.subid.@{}_converted'.format(seq))
                else:
                    dc = nipype.MapNode(interface=DicomCheck(),
                                        iterfield=['dicom_dir'],
                                        name='dc{}'.format(seq))
                    workflow.connect(datasource, seq, dc, 'dicom_dir')
                    workflow.connect(dc, 'outdir', datasink,
                                     'results.subid.@rtstruct')
                    for i, session in enumerate(rt_session):
                        substitutions += [
                            (('_dc{0}{1}/checked_dicoms'.format(seq, i),
                              session + '/RTSTRUCT_used'))
                        ]
                for i, session in enumerate(rt_session):
                    substitutions += [(('_converter{0}{1}/'.format(seq, i),
                                        session + '/'))]

        substitutions += [('_converterreference0/', '')]
        substitutions += [('_convertert1_00/', '')]

        datasink.inputs.substitutions = substitutions

        return workflow
Exemple #12
0
    def sorting_workflow(self,
                         subject_name_position=-3,
                         renaming=False,
                         mr_classiffication=True,
                         checkpoints=None,
                         sub_checkpoints=None):

        nipype_cache = os.path.join(self.nipype_cache, 'data_sorting')
        result_dir = self.result_dir

        workflow = nipype.Workflow('sorting_workflow', base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")

        #         prep = nipype.Node(interface=FolderPreparation(), name='prep')
        #         prep.inputs.input_dir = self.base_dir
        #         create_list = nipype.Node(interface=CreateSubjectsList(), name='cl')
        #         create_list.inputs.input_dir = self.base_dir
        file_check = nipype.Node(interface=FileCheck(), name='fc')
        file_check.inputs.input_dir = self.base_dir
        file_check.inputs.subject_name_position = subject_name_position
        file_check.inputs.renaming = renaming
        prep = nipype.MapNode(interface=FolderPreparation(),
                              name='prep',
                              iterfield=['input_list'])
        sort = nipype.MapNode(interface=FolderSorting(),
                              name='sort',
                              iterfield=['input_dir'])
        mr_rt_merge = nipype.MapNode(interface=Merge(2),
                                     name='mr_rt_merge',
                                     iterfield=['in1', 'in2'])
        mr_rt_merge.inputs.ravel_inputs = True
        merging = nipype.Node(interface=FolderMerge(), name='merge')
        if mr_classiffication:
            if checkpoints is None or sub_checkpoints is None:
                raise Exception('MRClass weights were not provided, MR image '
                                'classification cannot be performed!')
            mrclass = nipype.MapNode(interface=MRClass(),
                                     name='mrclass',
                                     iterfield=['mr_images'])
            mrclass.inputs.checkpoints = checkpoints
            mrclass.inputs.sub_checkpoints = sub_checkpoints
        else:
            mr_rt_merge.inputs.in1 = None
        rt_sorting = nipype.MapNode(interface=RTDataSorting(),
                                    name='rt_sorting',
                                    iterfield=['input_dir'])

        #         workflow.connect(create_list, 'file_list', file_check, 'input_file')
        workflow.connect(file_check, 'out_list', prep, 'input_list')
        workflow.connect(prep, 'out_folder', sort, 'input_dir')
        workflow.connect(sort, 'out_folder', rt_sorting, 'input_dir')
        if mr_classiffication:
            workflow.connect(sort, 'mr_images', mrclass, 'mr_images')
            workflow.connect(mrclass, 'out_folder', mr_rt_merge, 'in1')

            workflow.connect(rt_sorting, 'out_folder', mr_rt_merge, 'in2')
            workflow.connect(mr_rt_merge, 'out', merging, 'input_list')
            workflow.connect(merging, 'out_folder', datasink, '@rt_sorted')
        else:
            workflow.connect(rt_sorting, 'out_folder', datasink, '@rt_sorted')
            substitutions = [('_rt_sorting\d+/', '')]
            datasink.inputs.regexp_substitutions = substitutions

        return workflow
Exemple #13
0
    def sorting_workflow(self,
                         bp_class_mr_cp,
                         mrclass_cp,
                         mrclass_sub_cp,
                         bp_class_ct_cp,
                         bp=['hnc', 'hncKM'],
                         subject_name_position=-3,
                         renaming=False,
                         mrrt_max_time_diff=15,
                         rert_max_time=42,
                         bp_class_ct_th=0.33,
                         bp_class_mr_th=0.5,
                         mr_classification=True):

        mrclass_bp = [x for x in bp if x in ['hnc', 'abd-pel']]
        if mr_classification and not mrclass_bp:
            print('MRClass will not run')
            mr_classification = False
        folder2merge = 4
        folder2merge_iterfields = ['in1', 'in2', 'in3', 'in4']
        #         else:
        #             mr_classiffication = True
        #             folder2merge = 3
        #             folder2merge_iterfields = ['in1', 'in2', 'in3']

        nipype_cache = os.path.join(self.nipype_cache, 'data_sorting')
        result_dir = self.result_dir

        workflow = nipype.Workflow('sorting_workflow', base_dir=nipype_cache)
        datasink = nipype.Node(interface=SinkSorting(), name='datasink')
        datasink.inputs.out_folder = result_dir

        file_check = nipype.Node(interface=FileCheck(), name='fc')
        file_check.inputs.input_dir = self.input_dir
        file_check.inputs.subject_name_position = subject_name_position
        file_check.inputs.renaming = renaming
        prep = nipype.MapNode(interface=FolderPreparation(),
                              name='prep',
                              iterfield=['input_list'])
        bp_class_ct = nipype.MapNode(interface=ImageClassification(),
                                     name='bpclass_ct',
                                     iterfield=['images2label'])
        bp_class_ct.inputs.checkpoints = bp_class_ct_cp
        bp_class_ct.inputs.body_part = bp
        bp_class_ct.inputs.network = 'bpclass'
        bp_class_ct.inputs.modality = 'CT'
        bp_class_ct.inputs.probability_th = bp_class_ct_th
        mr_rt_merge = nipype.MapNode(interface=Merge(folder2merge),
                                     name='mr_rt_merge',
                                     iterfield=folder2merge_iterfields)
        mr_rt_merge.inputs.ravel_inputs = True
        merging = nipype.Node(interface=FolderMerge(), name='merge')
        merging.inputs.mrrt_max_time_diff = mrrt_max_time_diff
        merging.inputs.rert_max_time = rert_max_time
        if mr_classification:
            if mrclass_cp is None or mrclass_sub_cp is None:
                raise Exception('MRClass weights were not provided, MR image '
                                'classification cannot be performed!')
            mrclass = nipype.MapNode(interface=ImageClassification(),
                                     name='mrclass',
                                     iterfield=['images2label'])
            mrclass.inputs.checkpoints = mrclass_cp
            mrclass.inputs.sub_checkpoints = mrclass_sub_cp
            mrclass.inputs.body_part = mrclass_bp
            mrclass.inputs.network = 'mrclass'
            mrclass.inputs.modality = 'MR'

        bp_class_mr = nipype.MapNode(interface=ImageClassification(),
                                     name='bpclass_mr',
                                     iterfield=['images2label'])
        bp_class_mr.inputs.checkpoints = bp_class_mr_cp
        bp_class_mr.inputs.body_part = mrclass_bp
        bp_class_mr.inputs.network = 'bpclass'
        bp_class_mr.inputs.modality = 'MR'
        bp_class_mr.inputs.probability_th = bp_class_mr_th
        #         else:
        #             mr_rt_merge.inputs.in3 = 'None'
        rt_sorting = nipype.MapNode(interface=RTDataSorting(),
                                    name='rt_sorting',
                                    iterfield=['input_dir'])

        pet_sorting = nipype.MapNode(interface=PETDataSorting(),
                                     name='pet_sorting',
                                     iterfield=['input_dir'])

        workflow.connect(file_check, 'out_list', prep, 'input_list')
        workflow.connect(prep, 'out_folder', rt_sorting, 'input_dir')
        workflow.connect(prep, 'out_folder', pet_sorting, 'input_dir')
        workflow.connect(prep, 'for_inference_ct', bp_class_ct, 'images2label')
        workflow.connect(prep, 'for_inference_mr', bp_class_mr, 'images2label')
        workflow.connect(bp_class_ct, 'output_dict', mr_rt_merge, 'in1')
        workflow.connect(rt_sorting, 'output_dict', mr_rt_merge, 'in2')
        workflow.connect(pet_sorting, 'output_dict', mr_rt_merge, 'in4')
        workflow.connect(mr_rt_merge, 'out', merging, 'input_list')
        workflow.connect(merging, 'out_folder', datasink, 'tosink')
        if mr_classification:
            workflow.connect(bp_class_mr, 'labeled_images', mrclass,
                             'images2label')
            workflow.connect(mrclass, 'output_dict', mr_rt_merge, 'in3')
        else:
            workflow.connect(bp_class_mr, 'output_dict', mr_rt_merge, 'in3')

        return workflow
Exemple #14
0
        datasource.inputs.field_template = dict(t1='%s/%s/T1.nii.gz', ct1='%s/%s/CT1.nii.gz',
                                                t2='%s/%s/T2.nii.gz', flair='%s/%s/FLAIR.nii.gz',
                                                reference='%s/%s/CT.nii.gz',
                                                t1_0='%s/%s/T1.nii.gz')
        datasource.inputs.template_args = dict(t1=[['sub_id', 'sessions']],
                                               ct1=[['sub_id', 'sessions']],
                                               t2=[['sub_id', 'sessions']],
                                               flair=[['sub_id', 'sessions']],
                                               reference=[['sub_id', 'ref_ct']],
                                               t1_0=[['sub_id', 'ref_t1']])
        datasource.inputs.sub_id = sub_id
        datasource.inputs.sessions = sessions
        datasource.inputs.ref_ct = 'REF'
        datasource.inputs.ref_t1 = 'T10'

        bet = nipype.MapNode(interface=HDBet(), iterfield=['input_file'], name='bet')
        bet.inputs.save_mask = 1
        bet.inputs.out_file = 'T1_bet'

        bet_t10 = nipype.Node(interface=HDBet(), name='t1_0_bet')
        bet_t10.inputs.save_mask = 1
        bet_t10.inputs.out_file = 'T1_0_bet'

        reg2T1 = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='reg2T1')
        reg2T1.inputs.transformation = 's'
        reg2T1.inputs.num_dimensions = 3
        reg2T1.inputs.num_threads = 6

        regT12CT = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='regT12CT')
        regT12CT.inputs.transformation = 'r'
        regT12CT.inputs.num_dimensions = 3