def workflow(self): images = self.images rois = self.rois datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id toextract = {**dict_sequences['MR-RT'], **dict_sequences['OT']} workflow = nipype.Workflow('features_extraction_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] for key in toextract: session = toextract[key] if session['scans'] is not None: scans = session['scans'] reg_scans = [x for x in scans if x.endswith('_reg')] segmented_masks = [x for x in scans if x in ['GTVPredicted', 'TumorPredicted', 'GTVPredicted-2modalities']] add_scans = [x for x in scans if x in images] add_masks = [x for x in scans if x in rois] for image in reg_scans: for roi in segmented_masks: image_name = '{}_{}_reg'.format(key, image.split('_')[0]) roi_name = '{}_{}'.format(key, roi.split('.nii.gz')[0]) features = nipype.Node( interface=FeatureExtraction(), name='features_extraction_{}{}'.format(image_name, roi_name)) features.inputs.parameter_file = '/home/fsforazz/git/core/resources/Params_MR.yaml' workflow.connect(datasource, image_name, features, 'input_image') workflow.connect(datasource, roi_name, features, 'rois') workflow.connect(features, 'feature_files', datasink, 'results.subid.{0}.@csv_file_{1}{2}'.format( key, image_name, roi_name)) for image in add_scans: for roi in add_masks: image_name = '{}_{}'.format(key, image) roi_name = '{}_{}'.format(key, roi.split('.nii.gz')[0]) features = nipype.Node( interface=FeatureExtraction(), name='features_extraction_{}{}'.format(image_name, roi_name)) features.inputs.parameter_file = '/home/fsforazz/git/core/resources/Params_MR.yaml' workflow.connect(datasource, image_name, features, 'input_image') workflow.connect(datasource, roi_name, features, 'rois') workflow.connect(features, 'feature_files', datasink, 'results.subid.{0}.@csv_file_{1}{2}'.format( key, image_name, roi_name)) datasink.inputs.substitutions = substitutions return workflow
def workflow(self): # self.datasource() datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id tobet = {**dict_sequences['MR-RT'], **dict_sequences['OT']} workflow = nipype.Workflow('brain_extraction_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] substitutions += [('_preproc_corrected.', '_preproc.')] datasink.inputs.substitutions = substitutions for key in tobet: files = [] # if tobet[key]['ref'] is not None: # files.append(tobet[key]['ref']) if tobet[key]['scans'] is not None: files = files + tobet[key]['scans'] for el in files: el = el.strip(self.extention) node_name = '{0}_{1}'.format(key, el) bet = nipype.Node(interface=HDBet(), name='{}_bet'.format(node_name), serial=True) bet.inputs.save_mask = 1 bet.inputs.out_file = '{}_preproc'.format(el) reorient = nipype.Node(interface=Reorient2Std(), name='{}_reorient'.format(node_name)) if el in TON4: n4 = nipype.Node(interface=N4BiasFieldCorrection(), name='{}_n4'.format(node_name)) workflow.connect(bet, 'out_file', n4, 'input_image') workflow.connect(bet, 'out_mask', n4, 'mask_image') workflow.connect( n4, 'output_image', datasink, 'results.subid.{0}.@{1}_preproc'.format(key, el)) else: workflow.connect( bet, 'out_file', datasink, 'results.subid.{0}.@{1}_preproc'.format(key, el)) workflow.connect( bet, 'out_mask', datasink, 'results.subid.{0}.@{1}_preproc_mask'.format(key, el)) workflow.connect(reorient, 'out_file', bet, 'input_file') workflow.connect(datasource, node_name, reorient, 'in_file') return workflow
def workflow(self): self.datasource() datasource = self.data_source nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id regex = self.regex roi_selection = self.roi_selection if datasource is not None: workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] ss_convert = nipype.MapNode(interface=RTStructureCoverter(), iterfield=['reference_ct', 'input_ss'], name='ss_convert') mha_convert = nipype.MapNode(interface=MHA2NIIConverter(), iterfield=['input_folder'], name='mha_convert') if roi_selection: select = nipype.MapNode(interface=CheckRTStructures(), iterfield=['rois', 'dose_file'], name='select_gtv') workflow.connect(mha_convert, 'out_files', select, 'rois') workflow.connect(datasource, 'rt_dose', select, 'dose_file') workflow.connect(select, 'checked_roi', datasink, 'results.subid.@masks') else: workflow.connect(mha_convert, 'out_files', datasink, 'results.subid.@masks') for i, session in enumerate(self.rt['session']): substitutions += [(('_select_gtv{}/'.format(i), session+'/'))] substitutions += [(('_voxelizer{}/'.format(i), session+'/'))] substitutions += [(('_mha_convert{}/'.format(i), session+'/'))] datasink.inputs.substitutions =substitutions workflow.connect(datasource, 'rtct_nifti', ss_convert, 'reference_ct') workflow.connect(datasource, 'rts_dcm', ss_convert, 'input_ss') workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder') workflow = self.datasink(workflow, datasink) else: workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache) return workflow
def brain_extraction(sub_id, datasource, sessions, RESULT_DIR, NIPYPE_CACHE, reference, t10=True): bet = nipype.MapNode(interface=HDBet(), iterfield=['input_file'], name='bet') bet.inputs.save_mask = 1 bet.inputs.out_file = 'T1_preproc' if t10: bet_t10 = nipype.Node(interface=HDBet(), name='t1_0_bet') bet_t10.inputs.save_mask = 1 bet_t10.inputs.out_file = 'T1_0_bet' datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR), "datasink") substitutions = [('subid', sub_id)] for i, session in enumerate(sessions): substitutions += [('_bet{}/'.format(i), session + '/')] datasink.inputs.substitutions = substitutions # Create Workflow workflow = nipype.Workflow('brain_extraction_workflow', base_dir=NIPYPE_CACHE) workflow.connect(datasource, 't1', bet, 'input_file') if t10: workflow.connect(datasource, 't1_0', bet_t10, 'input_file') workflow.connect(bet_t10, 'out_file', datasink, 'results.subid.T10.@T1_ref_bet') workflow.connect(bet, 'out_file', datasink, 'results.subid.@T1_preproc') workflow.connect(bet, 'out_mask', datasink, 'results.subid.@T1_mask') workflow = datasink_base(datasink, datasource, workflow, sessions, reference, t10=t10) return workflow
def workflow(self): datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id toseg = {**dict_sequences['OT']} workflow = nipype.Workflow('lung_segmentation_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] substitutions += [('_preproc_corrected.', '_preproc.')] datasink.inputs.substitutions = substitutions for key in toseg: files = [] # if tobet[key]['ref'] is not None: # files.append(tobet[key]['ref']) if toseg[key]['scans'] is not None: files = files + toseg[key]['scans'] for el in files: el = el.strip(self.extention) node_name = '{0}_{1}'.format(key, el) preproc = nipype.Node(interface=LungSegmentationPreproc(), name='{}_ls_preproc'.format(node_name)) preproc.inputs.new_spacing = self.new_spacing lung_seg = nipype.Node(interface=LungSegmentationInference(), name='{}_ls'.format(node_name)) lung_seg.inputs.weights = self.network_weights workflow.connect(datasource, node_name, preproc, 'in_file') workflow.connect(preproc, 'tensor', lung_seg, 'tensor') workflow.connect(preproc, 'image_info', lung_seg, 'image_info') workflow.connect( lung_seg, 'segmented_lungs', datasink, 'results.subid.{0}.@{1}_segmented_lungs'.format(key, el)) return workflow
rf_1 = nipype.MapNode(interface=fsl.RobustFOV(), iterfield=['in_file'], name='rf_1') rf_ref = nipype.Node(interface=fsl.RobustFOV(), name='rf_ref') bet_1 = nipype.MapNode(interface=HDBet(), iterfield=['input_file'], name='bet_1') bet_1.inputs.save_mask = 1 bet_1.inputs.out_file = '{}_bet'.format(contrast) bet_ref = nipype.Node(interface=HDBet(), name='bet_ref') bet_ref.inputs.save_mask = 1 bet_ref.inputs.out_file = '{}_bet'.format(contrast) datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR), "datasink") substitutions = [('contrast', contrast), ('sub', sub.split('/')[-1]), ('session', ref_tp + '_reference_tp')] for i, session in enumerate(sessions): substitutions += [('_bet_1{}/'.format(i), session + '/')] datasink.inputs.substitutions = substitutions workflow = nipype.Workflow('temporal_analysis_preproc_workflow', base_dir=os.path.join( CACHE_DIR, sub_name + '_' + contrast)) workflow.connect(datasource, 'reference', rf_ref, 'in_file') workflow.connect(datasource, 'to_reg', rf_1, 'in_file') workflow.connect(rf_1, 'out_roi', bet_1, 'input_file')
datasource.inputs.field_template = dict(reference='%s/%s/%sCT.nii.gz', to_reg='%s/%s/%s.nii.gz') datasource.inputs.template_args = dict(to_reg=[['sub_id', 'sessions', 'contrasts']], reference=[['sub_id', 'ref_tp', '']]) datasource.inputs.raise_on_empty = False datasource.inputs.contrasts = contrast datasource.inputs.sub_id = sub datasource.inputs.sessions = sessions datasource.inputs.ref_tp = ref_tp reg = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='ants_reg') reg.inputs.transformation = 'r' reg.inputs.num_dimensions = 3 reg.inputs.num_threads = 4 datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('contrast', contrast), ('sub', sub)] for i, session in enumerate(sessions): substitutions += [('_ants_reg{}/'.format(i), session+'/')] datasink.inputs.substitutions =substitutions workflow = nipype.Workflow('registration_workflow', base_dir=cache_dir) workflow.connect(datasource, 'reference', reg, 'ref_file') workflow.connect(datasource, 'to_reg', reg, 'input_file') workflow.connect(reg, 'reg_file', datasink, 'registration.contrast.sub.@reg_image') workflow.connect(reg, 'regmat', datasink, 'registration.contrast.sub.@affine_mat') workflow.connect(datasource, 'reference', datasink, 'registration.contrast.sub.@reference') # workflow.run() workflow.run('MultiProc', plugin_args={'n_procs': 4})
def workflow(self): datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id toreg = {**dict_sequences['MR-RT'], **dict_sequences['OT']} workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] mr_rt_ref = None rtct = None if dict_sequences['MR-RT'] and self.normilize_mr_rt: ref_session = list(dict_sequences['MR-RT'].keys())[0] ref_scans = dict_sequences['MR-RT'][ref_session]['scans'] for pr in POSSIBLE_REF: for scan in ref_scans: if pr in scan.split('_')[0]: mr_rt_ref = '{0}_{1}_preproc'.format( ref_session, scan.split('_')[0]) mr_rt_ref_name = '{}_preproc'.format( scan.split('_')[0]) break else: continue break if dict_sequences['RT'] and self.normilize_rtct: rt_session = list(dict_sequences['RT'].keys())[0] ct_name = dict_sequences['RT'][rt_session]['rtct'] if ct_name is not None and mr_rt_ref is not None: rtct = '{0}_rtct'.format(rt_session, ct_name) reg_mr2ct = nipype.Node(interface=AntsRegSyn(), name='{}_lin_reg'.format(rt_session)) reg_mr2ct.inputs.transformation = 'r' reg_mr2ct.inputs.num_dimensions = 3 reg_mr2ct.inputs.num_threads = 4 reg_mr2ct.inputs.out_prefix = '{}_reg2RTCT'.format( mr_rt_ref_name) reg_mr2ct.inputs.interpolation = 'BSpline' workflow.connect(datasource, mr_rt_ref, reg_mr2ct, 'input_file') workflow.connect(datasource, rtct, reg_mr2ct, 'ref_file') workflow.connect( reg_mr2ct, 'regmat', datasink, 'results.subid.{0}.@{1}_reg2RTCT_mat'.format( ref_session, mr_rt_ref_name)) workflow.connect( reg_mr2ct, 'reg_file', datasink, 'results.subid.{0}.@{1}_reg2RTCT'.format( ref_session, mr_rt_ref_name)) substitutions += [ ('{}_reg2RTCTWarped.nii.gz'.format(mr_rt_ref_name), '{}_reg2RTCT.nii.gz'.format(mr_rt_ref_name)) ] substitutions += [ ('{}_reg2RTCT0GenericAffine.mat'.format(mr_rt_ref_name), '{}_reg2RTCT_linear_mat.mat'.format(mr_rt_ref_name)) ] for key in toreg: session = toreg[key] if session['scans'] is not None: scans = session['scans'] scans = [x for x in scans if 'mask' not in x] ref = None for pr in POSSIBLE_REF: for scan in scans: if pr in scan: ref = '{0}_{1}_preproc'.format( key, scan.split('_')[0]) scans.remove('{}_preproc'.format( scan.split('_')[0])) ref_name = scan.split('_')[0] workflow.connect( datasource, ref, datasink, 'results.subid.{0}.@{1}_reg'.format( key, ref_name)) substitutions += [ ('{}_preproc'.format(scan.split('_')[0]), '{}_reg'.format(scan.split('_')[0])) ] break else: continue break if ref is not None: if mr_rt_ref is not None and key != ref_session: reg_mr_rt = nipype.Node(interface=AntsRegSyn(), name='{}_def_reg'.format(key)) reg_mr_rt.inputs.transformation = 's' reg_mr_rt.inputs.num_dimensions = 3 reg_mr_rt.inputs.num_threads = 6 reg_mr_rt.inputs.out_prefix = '{}_reg2MR_RT'.format( ref_name) workflow.connect(datasource, ref, reg_mr_rt, 'input_file') workflow.connect(datasource, mr_rt_ref, reg_mr_rt, 'ref_file') workflow.connect( reg_mr_rt, 'regmat', datasink, 'results.subid.{0}.@{1}_reg2MR_RT_linear_mat'. format(key, ref_name)) workflow.connect( reg_mr_rt, 'reg_file', datasink, 'results.subid.{0}.@{1}_reg2MR_RT'.format( key, ref_name)) workflow.connect( reg_mr_rt, 'warp_file', datasink, 'results.subid.{0}.@{1}_reg2MR_RT_warp'.format( key, ref_name)) substitutions += [ ('{}_reg2MR_RT0GenericAffine.mat'.format(ref_name), '{}_reg2MR_RT_linear_mat.mat'.format(ref_name)) ] substitutions += [ ('{}_reg2MR_RT1Warp.nii.gz'.format(ref_name), '{}_reg2MR_RT_warp.nii.gz'.format(ref_name)) ] substitutions += [ ('{}_reg2MR_RTWarped.nii.gz'.format(ref_name), '{}_reg2MR_RT.nii.gz'.format(ref_name)) ] if rtct is not None and key != ref_session: apply_ts_rt_ref = nipype.Node( interface=ApplyTransforms(), name='{}_norm2RT'.format(ref_name)) apply_ts_rt_ref.inputs.output_image = ( '{}_reg2RTCT.nii.gz'.format(ref_name)) workflow.connect(datasource, ref, apply_ts_rt_ref, 'input_image') workflow.connect(datasource, rtct, apply_ts_rt_ref, 'reference_image') workflow.connect( apply_ts_rt_ref, 'output_image', datasink, 'results.subid.{0}.@{1}_reg2RTCT'.format( key, ref_name)) merge_rt_ref = nipype.Node( interface=Merge(4), name='{}_merge_rt'.format(ref_name)) merge_rt_ref.inputs.ravel_inputs = True workflow.connect(reg_mr2ct, 'regmat', merge_rt_ref, 'in1') workflow.connect(reg_mr_rt, 'regmat', merge_rt_ref, 'in3') workflow.connect(reg_mr_rt, 'warp_file', merge_rt_ref, 'in2') workflow.connect(merge_rt_ref, 'out', apply_ts_rt_ref, 'transforms') for el in scans: el = el.strip(self.extention) el_name = el.split('_')[0] node_name = '{0}_{1}'.format(key, el) reg = nipype.Node(interface=AntsRegSyn(), name='{}_lin_reg'.format(node_name)) reg.inputs.transformation = 'r' reg.inputs.num_dimensions = 3 reg.inputs.num_threads = 4 reg.inputs.interpolation = 'BSpline' reg.inputs.out_prefix = '{}_reg'.format(el_name) workflow.connect(datasource, node_name, reg, 'input_file') workflow.connect(datasource, ref, reg, 'ref_file') workflow.connect( reg, 'reg_file', datasink, 'results.subid.{0}.@{1}_reg'.format(key, el_name)) workflow.connect( reg, 'regmat', datasink, 'results.subid.{0}.@{1}_regmat'.format( key, el_name)) substitutions += [ ('{}_regWarped.nii.gz'.format(el_name), '{}_reg.nii.gz'.format(el_name)) ] substitutions += [ ('{}_reg0GenericAffine.mat'.format(el_name), '{}_linear_regmat.mat'.format(el_name)) ] if mr_rt_ref is not None and key != ref_session: merge = nipype.Node( interface=Merge(3), name='{}_merge_MR_RT'.format(node_name)) merge.inputs.ravel_inputs = True workflow.connect(reg, 'regmat', merge, 'in3') workflow.connect(reg_mr_rt, 'regmat', merge, 'in2') workflow.connect(reg_mr_rt, 'warp_file', merge, 'in1') apply_ts = nipype.Node( interface=ApplyTransforms(), name='{}_norm2MR_RT'.format(node_name)) apply_ts.inputs.output_image = '{}_reg2MR_RT.nii.gz'.format( el_name) workflow.connect(merge, 'out', apply_ts, 'transforms') workflow.connect(datasource, node_name, apply_ts, 'input_image') workflow.connect(datasource, mr_rt_ref, apply_ts, 'reference_image') workflow.connect( apply_ts, 'output_image', datasink, 'results.subid.{0}.@{1}_reg2MR_RT'.format( key, el_name)) if rtct is not None: apply_ts_rt = nipype.Node( interface=ApplyTransforms(), name='{}_norm2RT'.format(node_name)) apply_ts_rt.inputs.output_image = '{}_reg2RTCT.nii.gz'.format( el_name) workflow.connect(datasource, node_name, apply_ts_rt, 'input_image') workflow.connect(datasource, rtct, apply_ts_rt, 'reference_image') workflow.connect( apply_ts_rt, 'output_image', datasink, 'results.subid.{0}.@{1}_reg2RTCT'.format( key, el_name)) if key != ref_session: merge_rt = nipype.Node( interface=Merge(4), name='{}_merge_rt'.format(node_name)) merge_rt.inputs.ravel_inputs = True workflow.connect(reg_mr2ct, 'regmat', merge_rt, 'in1') workflow.connect(reg, 'regmat', merge_rt, 'in4') workflow.connect(reg_mr_rt, 'regmat', merge_rt, 'in3') workflow.connect(reg_mr_rt, 'warp_file', merge_rt, 'in2') workflow.connect(merge_rt, 'out', apply_ts_rt, 'transforms') else: merge_rt = nipype.Node( interface=Merge(2), name='{}_merge_rt'.format(node_name)) merge_rt.inputs.ravel_inputs = True workflow.connect(reg_mr2ct, 'regmat', merge_rt, 'in1') workflow.connect(reg, 'regmat', merge_rt, 'in2') workflow.connect(merge_rt, 'out', apply_ts_rt, 'transforms') datasink.inputs.substitutions = substitutions return workflow
def workflow(self): # self.datasource() datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id regex = self.regex roi_selection = self.roi_selection workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] substitutions += [('_mha_convert/', '/')] rt_sessions = dict_sequences['RT'] for key in rt_sessions: rt_files = rt_sessions[key] if rt_files['phy_dose'] is not None: dose_name = '{0}_phy_dose'.format(key) elif rt_files['rbe_dose'] is not None: dose_name = '{0}_rbe_dose'.format(key) elif rt_files['ot_dose'] is not None: dose_name = '{0}_ot_dose'.format(key) else: roi_selection = False if rt_files['rtct'] is not None and rt_files[ 'rtstruct'] is not None: ss_convert = nipype.Node(interface=RTStructureCoverter(), name='ss_convert') mha_convert = nipype.Node(interface=MHA2NIIConverter(), name='mha_convert') if roi_selection: select = nipype.Node(interface=CheckRTStructures(), name='select_gtv') workflow.connect(mha_convert, 'out_files', select, 'rois') workflow.connect(datasource, dose_name, select, 'dose_file') workflow.connect(select, 'checked_roi', datasink, 'results.subid.{}.@masks'.format(key)) else: workflow.connect(mha_convert, 'out_files', datasink, 'results.subid.{}.@masks'.format(key)) datasink.inputs.substitutions = substitutions workflow.connect(datasource, '{0}_rtct'.format(key), ss_convert, 'reference_ct') workflow.connect(datasource, '{0}_rtstruct'.format(key), ss_convert, 'input_ss') workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder') else: print( 'NO RTCT OR RTSTRUCT!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') # if datasource is not None: # # workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache) # # datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") # substitutions = [('subid', sub_id)] # substitutions += [('results/', '{}/'.format(self.workflow_name))] # # ss_convert = nipype.MapNode(interface=RTStructureCoverter(), # iterfield=['reference_ct', 'input_ss'], # name='ss_convert') # mha_convert = nipype.MapNode(interface=MHA2NIIConverter(), # iterfield=['input_folder'], # name='mha_convert') # # if roi_selection: # select = nipype.MapNode(interface=CheckRTStructures(), # iterfield=['rois', 'dose_file'], # name='select_gtv') # workflow.connect(mha_convert, 'out_files', select, 'rois') # workflow.connect(datasource, 'rt_dose', select, 'dose_file') # workflow.connect(select, 'checked_roi', datasink, # 'results.subid.@masks') # else: # workflow.connect(mha_convert, 'out_files', datasink, # 'results.subid.@masks') # # for i, session in enumerate(self.rt['session']): # substitutions += [(('_select_gtv{}/'.format(i), session+'/'))] # substitutions += [(('_voxelizer{}/'.format(i), session+'/'))] # substitutions += [(('_mha_convert{}/'.format(i), session+'/'))] # # datasink.inputs.substitutions =substitutions # # workflow.connect(datasource, 'rtct_nifti', ss_convert, 'reference_ct') # workflow.connect(datasource, 'rts_dcm', ss_convert, 'input_ss') # workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder') # # workflow = self.datasink(workflow, datasink) # else: # workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache) return workflow
def tumor_segmentation(datasource, sub_id, sessions, gtv_model, tumor_model, result_dir, nipype_cache, reference, reg_workflow=None, bet_workflow=None): if reg_workflow is None: if reference: iterfields_t1 = ['in1', 'in2', 'in3'] if_0 = 2 else: iterfields_t1 = ['in1', 'in2'] if_0 = 1 merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)), iterfield=iterfields_t1, name='merge_t1') merge_ts_t1.inputs.ravel_inputs = True apply_ts_gtv = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_gtv') apply_ts_gtv.inputs.interpolation = 'NearestNeighbor' apply_ts_tumor = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_tumor') apply_ts_tumor.inputs.interpolation = 'NearestNeighbor' apply_ts_tumor1 = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_tumor1') apply_ts_tumor1.inputs.interpolation = 'NearestNeighbor' if reference: merge_ts_t1ref = nipype.MapNode(interface=Merge(len(iterfields_t1)), iterfield=['in1', 'in2'], name='merge_t1ref') merge_ts_t1ref.inputs.ravel_inputs = True apply_ts_gtv_t1ref = nipype.MapNode( interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_gtv_t1ref') apply_ts_gtv_t1ref.inputs.interpolation = 'NearestNeighbor' apply_ts_tumor_t1ref = nipype.MapNode( interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_tumor_t1ref') apply_ts_tumor_t1ref.inputs.interpolation = 'NearestNeighbor' apply_ts_tumor1_t1ref = nipype.MapNode( interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_tumor1_t1ref') apply_ts_tumor1_t1ref.inputs.interpolation = 'NearestNeighbor' outname = 'reg2CT' else: outname = 'reg2T1ref' tumor_seg = nipype.MapNode(interface=HDGlioPredict(), iterfield=['t1', 'ct1', 't2', 'flair'], name='tumor_segmentation') tumor_seg.inputs.out_file = 'segmentation' mi = nipype.MapNode(Merge(2), iterfield=['in1', 'in2'], name='merge') gtv_seg_data_prep = nipype.MapNode(interface=NNUnetPreparation(), iterfield=['images'], name='gtv_seg_data_prep') gtv_seg = nipype.MapNode(interface=NNUnetInference(), iterfield=['input_folder'], name='gtv_segmentation') gtv_seg.inputs.model_folder = gtv_model tumor_seg_2mods = nipype.MapNode(interface=NNUnetInference(), iterfield=['input_folder'], name='tumor_seg_2mods') tumor_seg_2mods.inputs.model_folder = tumor_model datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('/segmentation.nii.gz', '/Tumor_predicted.nii.gz')] substitutions += [('subid', sub_id)] for i, session in enumerate(sessions): substitutions += [('_tumor_segmentation{}/'.format(i), session + '/')] substitutions += [('_gtv_segmentation{}/subject1'.format(i), session + '/GTV_predicted')] substitutions += [('_tumor_seg_2mods{}/subject1'.format(i), session + '/Tumor_predicted_2modalities')] substitutions += [ ('_apply_ts_gtv{}/subject1_trans.nii.gz'.format(i), session + '/' + 'GTV_predicted_{}.nii.gz'.format(outname)) ] substitutions += [ ('_apply_ts_tumor1{}/subject1_trans.nii.gz'.format(i), session + '/' + 'Tumor_predicted_2modalities_{}.nii.gz'.format(outname)) ] substitutions += [ ('_apply_ts_tumor{}/segmentation_trans.nii.gz'.format(i), session + '/' + 'Tumor_predicted_{}.nii.gz'.format(outname)) ] substitutions += [ ('_apply_ts_gtv_t1ref{}/subject1_trans.nii.gz'.format(i), session + '/' + 'GTV_predicted_reg2T1ref.nii.gz') ] substitutions += [ ('_apply_ts_tumor1_t1ref{}/subject1_trans.nii.gz'.format(i), session + '/' + 'Tumor_predicted_2modalities_reg2T1ref.nii.gz') ] substitutions += [ ('_apply_ts_tumor_t1ref{}/segmentation_trans.nii.gz'.format(i), session + '/' + 'Tumor_predicted_reg2T1ref.nii.gz') ] datasink.inputs.substitutions = substitutions # Create Workflow workflow = nipype.Workflow('tumor_segmentation_workflow', base_dir=nipype_cache) # Connect from registration workflow, if provided if reg_workflow is not None: workflow.connect(reg_workflow, 'masking0.out_file', mi, 'in1') workflow.connect(reg_workflow, 'masking2.out_file', mi, 'in2') workflow.connect(reg_workflow, 'masking0.out_file', tumor_seg, 'ct1') workflow.connect(reg_workflow, 'masking1.out_file', tumor_seg, 't2') workflow.connect(reg_workflow, 'masking2.out_file', tumor_seg, 'flair') workflow.connect(bet_workflow, 'bet.out_file', tumor_seg, 't1') workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor, 'transforms') workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_gtv, 'transforms') workflow.connect(reg_workflow, 'merge_t1.out', apply_ts_tumor1, 'transforms') if reference: workflow.connect(reg_workflow, 'reg2T1.regmat', merge_ts_t1ref, 'in2') workflow.connect(reg_workflow, 'reg2T1.warp_file', merge_ts_t1ref, 'in1') else: # for i in range(len(sessions)): # workflow.connect(datasource, 't12ct_mat', fake_merge, # 'in{}'.format(i+1)) workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1, 'in{}'.format(if_0 + 1)) workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1, 'in{}'.format(if_0)) if reference: workflow.connect(datasource, 't12ct_mat', merge_ts_t1, 'in1') workflow.connect(datasource, 'reg2t1_mat', merge_ts_t1ref, 'in1') workflow.connect(datasource, 'reg2t1_warp', merge_ts_t1ref, 'in2') workflow.connect(merge_ts_t1, 'out', apply_ts_tumor, 'transforms') workflow.connect(merge_ts_t1, 'out', apply_ts_gtv, 'transforms') workflow.connect(merge_ts_t1, 'out', apply_ts_tumor1, 'transforms') workflow.connect(datasource, 'ct1_preproc', mi, 'in1') workflow.connect(datasource, 'flair_preproc', mi, 'in2') workflow.connect(datasource, 'ct1_preproc', tumor_seg, 'ct1') workflow.connect(datasource, 't2_preproc', tumor_seg, 't2') workflow.connect(datasource, 'flair_preproc', tumor_seg, 'flair') workflow.connect(datasource, 't1_preproc', tumor_seg, 't1') # Connect from datasource if reference: workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor_t1ref, 'transforms') workflow.connect(merge_ts_t1ref, 'out', apply_ts_gtv_t1ref, 'transforms') workflow.connect(merge_ts_t1ref, 'out', apply_ts_tumor1_t1ref, 'transforms') workflow.connect(datasource, 'reference', apply_ts_gtv, 'reference_image') workflow.connect(datasource, 'reference', apply_ts_tumor1, 'reference_image') workflow.connect(datasource, 'reference', apply_ts_tumor, 'reference_image') workflow.connect(datasource, 't1_0', apply_ts_gtv_t1ref, 'reference_image') workflow.connect(datasource, 't1_0', apply_ts_tumor1_t1ref, 'reference_image') workflow.connect(datasource, 't1_0', apply_ts_tumor_t1ref, 'reference_image') else: workflow.connect(datasource, 't1_0', apply_ts_gtv, 'reference_image') workflow.connect(datasource, 't1_0', apply_ts_tumor1, 'reference_image') workflow.connect(datasource, 't1_0', apply_ts_tumor, 'reference_image') # Connect other nodes # Nodes to prepare the data before nnUNet inference workflow.connect(mi, 'out', gtv_seg_data_prep, 'images') # Nodes to segment GTV and tumor using nnUNet workflow.connect(gtv_seg_data_prep, 'output_folder', gtv_seg, 'input_folder') workflow.connect(gtv_seg_data_prep, 'output_folder', tumor_seg_2mods, 'input_folder') # Nodes to normalize segmentations to CT space workflow.connect(gtv_seg, 'output_file', apply_ts_gtv, 'input_image') workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1, 'input_image') workflow.connect(tumor_seg, 'out_file', apply_ts_tumor, 'input_image') # Connect datasink nodes to save outputs workflow.connect(tumor_seg, 'out_file', datasink, 'results.subid.@tumor_seg') workflow.connect(gtv_seg, 'output_file', datasink, 'results.subid.@gtv_seg') workflow.connect(tumor_seg_2mods, 'output_file', datasink, 'results.subid.@tumor_seg_2mods') workflow.connect(apply_ts_gtv, 'output_image', datasink, 'results.subid.@gtv_reg2CT') workflow.connect(apply_ts_tumor, 'output_image', datasink, 'results.subid.@tumor_reg2CT') workflow.connect(apply_ts_tumor1, 'output_image', datasink, 'results.subid.@tumor1_reg2CT') if reference: workflow.connect(tumor_seg_2mods, 'output_file', apply_ts_tumor1_t1ref, 'input_image') workflow.connect(tumor_seg, 'out_file', apply_ts_tumor_t1ref, 'input_image') workflow.connect(gtv_seg, 'output_file', apply_ts_gtv_t1ref, 'input_image') workflow.connect(apply_ts_gtv_t1ref, 'output_image', datasink, 'results.subid.@gtv_reg2T1ref') workflow.connect(apply_ts_tumor_t1ref, 'output_image', datasink, 'results.subid.@tumor_reg2T1ref') workflow.connect(apply_ts_tumor1_t1ref, 'output_image', datasink, 'results.subid.@tumor1_reg2T1ref') workflow = datasink_base(datasink, datasource, workflow, sessions, reference) return workflow
def longitudinal_registration(sub_id, datasource, sessions, reference, result_dir, nipype_cache, bet_workflow=None): """ This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their reference T1 image, in multiple time-points cohort. In particular, for each subject, this workflow will register the MR images in each time-point (tp) to the corresponding T1, then it will register all the T1 images to a reference T1 (the one that is the closest in time to the radiotherapy session), and finally the reference T1 to the BPLCT. At the end, all the MR images will be saved both in T1 space (for each tp) and in CT space. """ reg2T1 = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='reg2T1') reg2T1.inputs.transformation = 's' reg2T1.inputs.num_dimensions = 3 reg2T1.inputs.num_threads = 6 if reference: regT12CT = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='regT12CT') regT12CT.inputs.transformation = 'r' regT12CT.inputs.num_dimensions = 3 regT12CT.inputs.num_threads = 4 reg_nodes = [] for i in range(3): reg = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file', 'ref_file'], name='ants_reg{}'.format(i)) reg.inputs.transformation = 'r' reg.inputs.num_dimensions = 3 reg.inputs.num_threads = 4 reg.inputs.interpolation = 'BSpline' reg_nodes.append(reg) apply_mask_nodes = [] for i in range(3): masking = nipype.MapNode(interface=ApplyMask(), iterfield=['in_file', 'mask_file'], name='masking{}'.format(i)) apply_mask_nodes.append(masking) apply_ts_nodes = [] for i in range(3): apply_ts = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts{}'.format(i)) apply_ts_nodes.append(apply_ts) # Apply ts nodes for T1_ref normalization apply_ts_nodes1 = [] for i in range(3): apply_ts = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts1{}'.format(i)) apply_ts_nodes1.append(apply_ts) split_ds_nodes = [] for i in range(4): split_ds = nipype.Node(interface=Split(), name='split_ds{}'.format(i)) split_ds.inputs.splits = [1] * len(sessions) split_ds_nodes.append(split_ds) apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_t1') merge_nodes = [] if reference: iterfields = ['in1', 'in2', 'in3', 'in4'] iterfields_t1 = ['in1', 'in2', 'in3'] if_0 = 2 else: iterfields = ['in1', 'in2', 'in3'] iterfields_t1 = ['in1', 'in2'] if_0 = 1 for i in range(3): merge = nipype.MapNode(interface=Merge(len(iterfields)), iterfield=iterfields, name='merge{}'.format(i)) merge.inputs.ravel_inputs = True merge_nodes.append(merge) # Merging transforms for normalization to T1_ref merge_nodes1 = [] for i in range(3): merge = nipype.MapNode(interface=Merge(3), iterfield=['in1', 'in2', 'in3'], name='merge1{}'.format(i)) merge.inputs.ravel_inputs = True merge_nodes1.append(merge) merge_ts_t1 = nipype.MapNode(interface=Merge(len(iterfields_t1)), iterfield=iterfields_t1, name='merge_t1') merge_ts_t1.inputs.ravel_inputs = True # have to create a fake merge of the transformation from t10 to CT in order # to have the same number if matrices as input in mapnode fake_merge = nipype.Node(interface=Merge(len(sessions)), name='fake_merge') datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] for i, session in enumerate(sessions): substitutions += [('session'.format(i), session)] substitutions += [('_masking0{}/antsregWarped_masked.nii.gz'.format(i), session + '/' + 'CT1_preproc.nii.gz')] substitutions += [('_reg2T1{}/antsreg0GenericAffine.mat'.format(i), session + '/' + 'reg2T1_ref.mat')] substitutions += [('_reg2T1{}/antsreg1Warp.nii.gz'.format(i), session + '/' + 'reg2T1_ref_warp.nii.gz')] substitutions += [('_reg2T1{}/antsregWarped.nii.gz'.format(i), session + '/' + 'T1_reg2T1_ref.nii.gz')] substitutions += [('_regT12CT{}/antsreg0GenericAffine.mat'.format(i), '/regT1_ref2CT.mat')] substitutions += [('_masking1{}/antsregWarped_masked.nii.gz'.format(i), session + '/' + 'T2_preproc.nii.gz')] substitutions += [('_masking2{}/antsregWarped_masked.nii.gz'.format(i), session + '/' + 'FLAIR_preproc.nii.gz')] substitutions += [('_apply_ts0{}/CT1_trans.nii.gz'.format(i), session + '/' + 'CT1_reg2CT.nii.gz')] substitutions += [('_apply_ts1{}/T2_trans.nii.gz'.format(i), session + '/' + 'T2_reg2CT.nii.gz')] substitutions += [('_apply_ts2{}/FLAIR_trans.nii.gz'.format(i), session + '/' + 'FLAIR_reg2CT.nii.gz')] substitutions += [('_apply_ts_t1{}/T1_trans.nii.gz'.format(i), session + '/' + 'T1_reg2CT.nii.gz')] substitutions += [('_apply_ts10{}/CT1_trans.nii.gz'.format(i), session + '/' + 'CT1_reg2T1_ref.nii.gz')] substitutions += [('_apply_ts11{}/T2_trans.nii.gz'.format(i), session + '/' + 'T2_reg2T1_ref.nii.gz')] substitutions += [('_apply_ts12{}/FLAIR_trans.nii.gz'.format(i), session + '/' + 'FLAIR_reg2T1_ref.nii.gz')] datasink.inputs.substitutions = substitutions # Create Workflow workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache) for i, reg in enumerate(reg_nodes): workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file') workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file') # bring every MR in CT space for i, node in enumerate(apply_ts_nodes): workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image') if reference: workflow.connect(datasource, 'reference', node, 'reference_image') else: workflow.connect(datasource, 't1_0', node, 'reference_image') workflow.connect(merge_nodes[i], 'out', node, 'transforms') workflow.connect(node, 'output_image', datasink, 'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1])) # bring every MR in T1_ref space for i, node in enumerate(apply_ts_nodes1): workflow.connect(datasource, SEQUENCES[i + 1], node, 'input_image') workflow.connect(datasource, 't1_0', node, 'reference_image') workflow.connect(merge_nodes1[i], 'out', node, 'transforms') workflow.connect( node, 'output_image', datasink, 'results.subid.@{}_reg2T1_ref'.format(SEQUENCES[i + 1])) for i, node in enumerate(merge_nodes): workflow.connect(reg_nodes[i], 'regmat', node, 'in{}'.format(if_0 + 2)) workflow.connect(reg2T1, 'regmat', node, 'in{}'.format(if_0 + 1)) workflow.connect(reg2T1, 'warp_file', node, 'in{}'.format(if_0)) if reference: workflow.connect(fake_merge, 'out', node, 'in1') for i, node in enumerate(merge_nodes1): workflow.connect(reg_nodes[i], 'regmat', node, 'in3') workflow.connect(reg2T1, 'regmat', node, 'in2') workflow.connect(reg2T1, 'warp_file', node, 'in1') for i, mask in enumerate(apply_mask_nodes): workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file') if bet_workflow is not None: workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file') else: workflow.connect(datasource, 't1_mask', mask, 'mask_file') workflow.connect(mask, 'out_file', datasink, 'results.subid.@{}_preproc'.format(SEQUENCES[i + 1])) if bet_workflow is not None: workflow.connect(bet_workflow, 'bet.out_file', reg2T1, 'input_file') workflow.connect(bet_workflow, 't1_0_bet.out_file', reg2T1, 'ref_file') else: workflow.connect(datasource, 't1_bet', reg2T1, 'input_file') workflow.connect(datasource, 't1_0_bet', reg2T1, 'ref_file') if reference: for i, sess in enumerate(sessions): workflow.connect(regT12CT, 'regmat', fake_merge, 'in{}'.format(i + 1)) workflow.connect(regT12CT, 'regmat', datasink, 'results.subid.{0}.@regT12CT_mat'.format(sess)) workflow.connect(datasource, 'reference', regT12CT, 'ref_file') workflow.connect(datasource, 't1_0', regT12CT, 'input_file') workflow.connect(fake_merge, 'out', merge_ts_t1, 'in1') workflow.connect(datasource, 'reference', apply_ts_t1, 'reference_image') else: workflow.connect(datasource, 't1_0', apply_ts_t1, 'reference_image') workflow.connect(datasource, 't1', apply_ts_t1, 'input_image') workflow.connect(merge_ts_t1, 'out', apply_ts_t1, 'transforms') workflow.connect(reg2T1, 'regmat', merge_ts_t1, 'in{}'.format(if_0 + 1)) workflow.connect(reg2T1, 'warp_file', merge_ts_t1, 'in{}'.format(if_0)) workflow.connect(reg2T1, 'warp_file', datasink, 'results.subid.@reg2CT_warp') workflow.connect(reg2T1, 'regmat', datasink, 'results.subid.@reg2CT_mat') workflow.connect(reg2T1, 'reg_file', datasink, 'results.subid.@T12T1_ref') workflow.connect(apply_ts_t1, 'output_image', datasink, 'results.subid.@T1_reg2CT') if bet_workflow is not None: workflow = datasink_base(datasink, datasource, workflow, sessions, reference) else: workflow = datasink_base(datasink, datasource, workflow, sessions, reference, extra_nodes=['t1_bet']) return workflow
def single_tp_registration(sub_id, datasource, session, reference, result_dir, nipype_cache, bet_workflow=None): """ This is a workflow to register multi-modalities MR (T2, T1KM, FLAIR) to their reference T1 image, in one single time-point cohort. In particular, for each subject, this workflow will register the MR images in the provided time-point (tp) to the corresponding T1, then it will register the T1 image to the BPLCT (if present)' '. At the end, all the MR images will be saved both in T1 space and in CT space. """ session = session[0] if reference: regT12CT = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='regT12CT') regT12CT.inputs.transformation = 'r' regT12CT.inputs.num_dimensions = 3 regT12CT.inputs.num_threads = 4 reg_nodes = [] for i in range(3): reg = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file', 'ref_file'], name='ants_reg{}'.format(i)) reg.inputs.transformation = 'r' reg.inputs.num_dimensions = 3 reg.inputs.num_threads = 4 reg.inputs.interpolation = 'BSpline' reg_nodes.append(reg) apply_mask_nodes = [] for i in range(3): masking = nipype.MapNode(interface=ApplyMask(), iterfield=['in_file', 'mask_file'], name='masking{}'.format(i)) apply_mask_nodes.append(masking) if reference: apply_ts_nodes = [] for i in range(3): apply_ts = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts{}'.format(i)) apply_ts_nodes.append(apply_ts) apply_ts_t1 = nipype.MapNode(interface=ApplyTransforms(), iterfield=['input_image', 'transforms'], name='apply_ts_t1') merge_nodes = [] for i in range(3): merge = nipype.MapNode(interface=Merge(2), iterfield=['in1', 'in2'], name='merge{}'.format(i)) merge.inputs.ravel_inputs = True merge_nodes.append(merge) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('session', session)] substitutions += [('_regT12CT0/antsreg0GenericAffine.mat', '/reg2T1_ref.mat')] substitutions += [('_masking00/antsregWarped_masked.nii.gz', session + '/' + 'CT1_preproc.nii.gz')] substitutions += [('_regT12CT/antsreg0GenericAffine.mat', '/regT1_ref2CT.mat')] substitutions += [('_masking10/antsregWarped_masked.nii.gz', session + '/' + 'T2_preproc.nii.gz')] substitutions += [('_masking20/antsregWarped_masked.nii.gz', session + '/' + 'FLAIR_preproc.nii.gz')] substitutions += [('_apply_ts00/antsregWarped_masked_trans.nii.gz', session + '/' + 'CT1_reg2CT.nii.gz')] substitutions += [('_apply_ts10/antsregWarped_masked_trans.nii.gz', session + '/' + 'T2_reg2CT.nii.gz')] substitutions += [('_apply_ts20/antsregWarped_masked_trans.nii.gz', session + '/' + 'FLAIR_reg2CT.nii.gz')] substitutions += [('_apply_ts_t10/T1_preproc_trans.nii.gz', session + '/' + 'T1_reg2CT.nii.gz')] datasink.inputs.substitutions = substitutions # Create Workflow workflow = nipype.Workflow('registration_workflow', base_dir=nipype_cache) for i, reg in enumerate(reg_nodes): workflow.connect(datasource, SEQUENCES[i + 1], reg, 'input_file') workflow.connect(datasource, SEQUENCES[0], reg, 'ref_file') # bring every MR in CT space if reference: for i, node in enumerate(merge_nodes): workflow.connect(reg_nodes[i], 'regmat', node, 'in2') workflow.connect(regT12CT, 'regmat', node, 'in1') for i, node in enumerate(apply_ts_nodes): workflow.connect(apply_mask_nodes[i], 'out_file', node, 'input_image') workflow.connect(datasource, 'reference', node, 'reference_image') workflow.connect(regT12CT, 'regmat', node, 'transforms') workflow.connect( node, 'output_image', datasink, 'results.subid.@{}_reg2CT'.format(SEQUENCES[i + 1])) workflow.connect(regT12CT, 'regmat', datasink, 'results.subid.{0}.@regT12CT_mat'.format(session)) workflow.connect(datasource, 'reference', regT12CT, 'ref_file') workflow.connect(datasource, 't1', regT12CT, 'input_file') if bet_workflow is not None: workflow.connect(bet_workflow, 'bet.out_file', apply_ts_t1, 'input_image') else: workflow.connect(datasource, 't1_bet', apply_ts_t1, 'input_image') workflow.connect(datasource, 'reference', apply_ts_t1, 'reference_image') workflow.connect(apply_ts_t1, 'output_image', datasink, 'results.subid.@T1_reg2CT') workflow.connect(regT12CT, 'regmat', apply_ts_t1, 'transforms') for i, mask in enumerate(apply_mask_nodes): workflow.connect(reg_nodes[i], 'reg_file', mask, 'in_file') if bet_workflow is not None: workflow.connect(bet_workflow, 'bet.out_mask', mask, 'mask_file') else: workflow.connect(datasource, 't1_mask', mask, 'mask_file') workflow.connect(mask, 'out_file', datasink, 'results.subid.@{}_preproc'.format(SEQUENCES[i + 1])) if bet_workflow is not None: workflow = datasink_base(datasink, datasource, workflow, [session], reference, t10=False) else: workflow = datasink_base(datasink, datasource, workflow, [session], reference, extra_nodes=['t1_bet'], t10=False) return workflow
def convertion_workflow(self): self.datasource() datasource = self.data_source ref_sequence = self.ref_sequence t10 = self.t10 sub_id = self.sub_id result_dir = self.result_dir nipype_cache = self.nipype_cache sequences = self.sequences reference = self.reference rt_data = self.rt if rt_data is not None: rt_session = rt_data['session'] workflow = nipype.Workflow('data_convertion_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] if type(ref_sequence) == list: to_convert = sequences + ref_sequence else: to_convert = sequences + [ref_sequence] if rt_data is not None: rt_sequences = [ x for x in rt_data.keys() if rt_data[x] and x != 'session' and x != 'labels' ] workflow.connect(datasource, 'rt', datasink, 'results.subid.@rt') to_convert = to_convert + rt_sequences else: rt_sequences = [] if reference: to_convert.append('reference') if t10: to_convert.append('t1_0') if self.ct_sessions: to_convert.append('ct') for seq in to_convert: if seq not in rt_sequences: dc = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc{}'.format(seq)) workflow.connect(datasource, seq, dc, 'dicom_dir') converter = nipype.MapNode( interface=Dcm2niix(), iterfield=['source_dir', 'out_filename'], name='converter{}'.format(seq)) converter.inputs.compress = 'y' converter.inputs.philips_float = False if seq == 'reference' or seq == 'ct': converter.inputs.merge_imgs = True else: converter.inputs.merge_imgs = False check = nipype.MapNode(interface=ConversionCheck(), iterfield=['in_file', 'file_name'], name='check_conversion{}'.format(seq)) workflow.connect(dc, 'outdir', converter, 'source_dir') workflow.connect(dc, 'scan_name', converter, 'out_filename') workflow.connect(dc, 'scan_name', check, 'file_name') workflow.connect(converter, 'converted_files', check, 'in_file') if seq == 'reference': workflow.connect( check, 'out_file', datasink, 'results.subid.REF.@{}_converted'.format(seq)) elif seq == 't1_0': workflow.connect( check, 'out_file', datasink, 'results.subid.T10.@{}_converted'.format(seq)) else: workflow.connect(check, 'out_file', datasink, 'results.subid.@{}_converted'.format(seq)) for i, session in enumerate(self.session_names[seq]): substitutions += [(('_converter{0}{1}/'.format(seq, i), session + '/'))] else: if seq != 'rtstruct': if seq == 'rtct': converter = nipype.MapNode( interface=Dcm2niix(), iterfield=['source_dir', 'out_filename'], name='converter{}'.format(seq)) converter.inputs.compress = 'y' converter.inputs.philips_float = False converter.inputs.merge_imgs = True else: converter = nipype.MapNode( interface=DoseConverter(), iterfield=['input_dose', 'out_name'], name='converter{}'.format(seq)) if seq == 'doses': converter = nipype.MapNode( interface=DoseConverter(), iterfield=['input_dose'], name='converter{}'.format(seq)) get_dose = nipype.MapNode(interface=GetRefRTDose(), iterfield=['doses'], name='get_doses') workflow.connect(datasource, 'doses', get_dose, 'doses') workflow.connect(get_dose, 'dose_file', converter, 'input_dose') converter.inputs.out_name = 'Unused_RTDOSE.nii.gz' workflow.connect( converter, 'out_file', datasink, 'results.subid.@{}_converted'.format(seq)) else: dc = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc{}'.format(seq)) workflow.connect(datasource, seq, dc, 'dicom_dir') if seq == 'rtct': check = nipype.MapNode( interface=ConversionCheck(), iterfield=['in_file', 'file_name'], name='check_conversion{}'.format(seq)) workflow.connect(dc, 'outdir', converter, 'source_dir') workflow.connect(dc, 'scan_name', converter, 'out_filename') workflow.connect(dc, 'scan_name', check, 'file_name') workflow.connect(converter, 'converted_files', check, 'in_file') workflow.connect( check, 'out_file', datasink, 'results.subid.@{}_converted'.format(seq)) else: workflow.connect(dc, 'dose_file', converter, 'input_dose') workflow.connect(dc, 'scan_name', converter, 'out_name') workflow.connect( converter, 'out_file', datasink, 'results.subid.@{}_converted'.format(seq)) else: dc = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc{}'.format(seq)) workflow.connect(datasource, seq, dc, 'dicom_dir') workflow.connect(dc, 'outdir', datasink, 'results.subid.@rtstruct') for i, session in enumerate(rt_session): substitutions += [ (('_dc{0}{1}/checked_dicoms'.format(seq, i), session + '/RTSTRUCT_used')) ] for i, session in enumerate(rt_session): substitutions += [(('_converter{0}{1}/'.format(seq, i), session + '/'))] substitutions += [('_converterreference0/', '')] substitutions += [('_convertert1_00/', '')] datasink.inputs.substitutions = substitutions return workflow
def sorting_workflow(self, subject_name_position=-3, renaming=False, mr_classiffication=True, checkpoints=None, sub_checkpoints=None): nipype_cache = os.path.join(self.nipype_cache, 'data_sorting') result_dir = self.result_dir workflow = nipype.Workflow('sorting_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") # prep = nipype.Node(interface=FolderPreparation(), name='prep') # prep.inputs.input_dir = self.base_dir # create_list = nipype.Node(interface=CreateSubjectsList(), name='cl') # create_list.inputs.input_dir = self.base_dir file_check = nipype.Node(interface=FileCheck(), name='fc') file_check.inputs.input_dir = self.base_dir file_check.inputs.subject_name_position = subject_name_position file_check.inputs.renaming = renaming prep = nipype.MapNode(interface=FolderPreparation(), name='prep', iterfield=['input_list']) sort = nipype.MapNode(interface=FolderSorting(), name='sort', iterfield=['input_dir']) mr_rt_merge = nipype.MapNode(interface=Merge(2), name='mr_rt_merge', iterfield=['in1', 'in2']) mr_rt_merge.inputs.ravel_inputs = True merging = nipype.Node(interface=FolderMerge(), name='merge') if mr_classiffication: if checkpoints is None or sub_checkpoints is None: raise Exception('MRClass weights were not provided, MR image ' 'classification cannot be performed!') mrclass = nipype.MapNode(interface=MRClass(), name='mrclass', iterfield=['mr_images']) mrclass.inputs.checkpoints = checkpoints mrclass.inputs.sub_checkpoints = sub_checkpoints else: mr_rt_merge.inputs.in1 = None rt_sorting = nipype.MapNode(interface=RTDataSorting(), name='rt_sorting', iterfield=['input_dir']) # workflow.connect(create_list, 'file_list', file_check, 'input_file') workflow.connect(file_check, 'out_list', prep, 'input_list') workflow.connect(prep, 'out_folder', sort, 'input_dir') workflow.connect(sort, 'out_folder', rt_sorting, 'input_dir') if mr_classiffication: workflow.connect(sort, 'mr_images', mrclass, 'mr_images') workflow.connect(mrclass, 'out_folder', mr_rt_merge, 'in1') workflow.connect(rt_sorting, 'out_folder', mr_rt_merge, 'in2') workflow.connect(mr_rt_merge, 'out', merging, 'input_list') workflow.connect(merging, 'out_folder', datasink, '@rt_sorted') else: workflow.connect(rt_sorting, 'out_folder', datasink, '@rt_sorted') substitutions = [('_rt_sorting\d+/', '')] datasink.inputs.regexp_substitutions = substitutions return workflow
def convertion_workflow(self): self.datasource() datasource = self.data_source dict_sequences = self.dict_sequences nipype_cache = self.nipype_cache result_dir = self.result_dir sub_id = self.sub_id toprocess = {**dict_sequences['MR-RT'], **dict_sequences['OT']} workflow = nipype.Workflow('data_convertion_workflow', base_dir=nipype_cache) datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink") substitutions = [('subid', sub_id)] substitutions += [('results/', '{}/'.format(self.workflow_name))] substitutions += [('checked_dicoms', 'RTSTRUCT_used')] datasink.inputs.substitutions = substitutions for key in toprocess: files = [] if toprocess[key]['scans'] is not None: files = files + toprocess[key]['scans'] for el in files: el = el.strip(self.extention) node_name = '{0}_{1}'.format(key, el) dc = nipype.Node(interface=DicomCheck(), name='{}_dc'.format(node_name)) workflow.connect(datasource, node_name, dc, 'dicom_dir') converter = nipype.Node(interface=Dcm2niix(), name='{}_convert'.format(node_name)) converter.inputs.compress = 'y' converter.inputs.philips_float = False if el == 'CT': converter.inputs.merge_imgs = True else: converter.inputs.merge_imgs = False check = nipype.Node(interface=ConversionCheck(), name='{}_cc'.format(node_name)) workflow.connect(dc, 'outdir', converter, 'source_dir') workflow.connect(dc, 'scan_name', converter, 'out_filename') workflow.connect(dc, 'scan_name', check, 'file_name') workflow.connect(converter, 'converted_files', check, 'in_file') workflow.connect( check, 'out_file', datasink, 'results.subid.{0}.@{1}_converted'.format(key, el)) check = nipype.Node(interface=ConversionCheck(), name='{}_cc'.format(node_name)) for key in dict_sequences['RT']: doses = [] if dict_sequences['RT'][key]['phy_dose'] is not None: doses.append('{}_phy_dose'.format(key)) if dict_sequences['RT'][key]['rbe_dose'] is not None: doses.append('{}_rbe_dose'.format(key)) for el in doses: el = el.strip(self.extention) node_name = el.strip(self.extention) converter = nipype.Node(interface=DoseConverter(), name='{}_dose_conv'.format(node_name)) dc = nipype.Node(interface=DicomCheck(), name='{}_dc'.format(node_name)) workflow.connect(datasource, node_name, dc, 'dicom_dir') workflow.connect(dc, 'dose_file', converter, 'input_dose') workflow.connect(dc, 'scan_name', converter, 'out_name') workflow.connect( converter, 'out_file', datasink, 'results.subid.{0}.@{1}_converted'.format(key, el)) if dict_sequences['RT'][key]['ot_dose'] is not None: el = '{}_ot_dose'.format(key) node_name = el.strip(self.extention) converter = nipype.Node(interface=DoseConverter(), name='{}_convert'.format(node_name)) get_dose = nipype.Node(interface=GetRefRTDose(), name='{}_get_dose'.format(node_name)) workflow.connect(datasource, node_name, get_dose, 'doses') workflow.connect(get_dose, 'dose_file', converter, 'input_dose') converter.inputs.out_name = 'Unused_RTDOSE.nii.gz' workflow.connect( converter, 'out_file', datasink, 'results.subid.{0}.@{1}_converted'.format(key, el)) if dict_sequences['RT'][key]['rtct'] is not None: el = '{}_rtct'.format(key) node_name = el.strip(self.extention) converter = nipype.Node(interface=Dcm2niix(), name='{}_convert'.format(node_name)) converter.inputs.compress = 'y' converter.inputs.philips_float = False converter.inputs.merge_imgs = True dc = nipype.Node(interface=DicomCheck(), name='{}_dc'.format(node_name)) workflow.connect(datasource, node_name, dc, 'dicom_dir') check = nipype.Node(interface=ConversionCheck(), name='{}_cc'.format(node_name)) workflow.connect(dc, 'outdir', converter, 'source_dir') workflow.connect(dc, 'scan_name', converter, 'out_filename') workflow.connect(dc, 'scan_name', check, 'file_name') workflow.connect(converter, 'converted_files', check, 'in_file') workflow.connect( check, 'out_file', datasink, 'results.subid.{0}.@{1}_converted'.format(key, el)) if dict_sequences['RT'][key]['rtstruct'] is not None: el = '{}_rtstruct'.format(key) node_name = el.strip(self.extention) dc = nipype.Node(interface=DicomCheck(), name='{}_dc'.format(node_name)) workflow.connect(datasource, node_name, dc, 'dicom_dir') workflow.connect(dc, 'outdir', datasink, 'results.subid.{0}.@rtstruct'.format(key, el)) return workflow