def baseline_workflow(projectid, subjectid, sessionid, master_config, phase, interpMode, pipeline_name): """ Run autoworkup on a single session This is the main function to call when processing a data set with T1 & T2 data. ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images are the lists of images to be used in the auto-workup. atlas_fname_wpath is the path and filename of the atlas to use. """ if not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']: print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!" raise NotImplementedError # master_config['components'].append('auxlmk') # master_config['components'].append('tissue_classify') assert phase in ['atlas-based-reference', 'subject-based-reference'], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'" baw201 = pe.Workflow(name=pipeline_name) inputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s', 'PDs', 'FLs', 'OTHERs']), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', 'outputLabels', 'outputHeadLabels', 'tc_atlas2session_tx', 'tc_atlas2sessionInverse_tx', 'BCD_ACPC_T1_CROPPED', 'outputLandmarksInACPCAlignedSpace', 'outputLandmarksInInputSpace', 'output_tx', 'LMIatlasToSubject_tx', 'writeBranded2DImage', 'UpdatedPosteriorsList' # Longitudinal ]), run_without_submitting=True, name='outputspec') print """ denoise image filter """ print """ Merge all T1 and T2 List """ makeDenoiseInImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList','postfix','PrimaryT1'], output_names=['inImageList','outImageList','imageTypeList']), run_without_submitting=True, name="99_makeDenoiseInImageList") baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List') baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List') baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList') makeDenoiseInImageList.inputs.FLList= []# an emptyList HACK makeDenoiseInImageList.inputs.PrimaryT1= None # an emptyList HACK makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz" # HACK tissueClassifyWF.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' ) baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList, 'OtherList') print """ Denoise: """ DenoiseInputImgs = pe.MapNode( interface=UnbiasedNonLocalMeans(), name='denoiseInputImgs', iterfield=['inputVolume', 'outputVolume']) DenoiseInputImgs.inputs.rc= [1,1,1] DenoiseInputImgs.inputs.rs= [4,4,4] DenoiseInputImgs.plugin_args = modify_qsub_args(master_config['queue'], '200M', 1, 2, hard=False) baw201.connect([ (makeDenoiseInImageList, DenoiseInputImgs, [('inImageList', 'inputVolume')]), (makeDenoiseInImageList, DenoiseInputImgs, [('outImageList','outputVolume')]) ]) makeDenoiseOutImageList = pe.Node(Function(function=GenerateSeparateImageTypeList, input_names=['inFileList','inTypeList'], output_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList']), run_without_submitting=True, name="99_makeDenoiseOutImageList") baw201.connect(DenoiseInputImgs, 'outputVolume', makeDenoiseOutImageList, 'inFileList') baw201.connect(makeDenoiseInImageList, 'imageTypeList', makeDenoiseOutImageList, 'inTypeList') DoReverseMapping = False # Set to true for debugging outputs if 'auxlmk' in master_config['components']: DoReverseMapping = True myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", interpMode, DoReverseMapping) baw201.connect([(makeDenoiseOutImageList, myLocalLMIWF, [(('T1List', get_list_element,0), 'inputspec.inputVolume' )]), (inputsSpec, myLocalLMIWF, [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'), ('atlasWeightFilename', 'inputspec.atlasWeightFilename'), ('LLSModel', 'inputspec.LLSModel'), ('inputTemplateModel', 'inputspec.inputTemplateModel'), ('template_t1', 'inputspec.atlasVolume')]), (myLocalLMIWF, outputsSpec, [('outputspec.outputResampledCroppedVolume','BCD_ACPC_T1_CROPPED'), ('outputspec.outputLandmarksInACPCAlignedSpace', 'outputLandmarksInACPCAlignedSpace'), ('outputspec.outputLandmarksInInputSpace', 'outputLandmarksInInputSpace'), ('outputspec.outputTransform', 'output_tx'), ('outputspec.atlasToSubjectTransform','LMIatlasToSubject_tx'), ('outputspec.writeBranded2DImage', 'writeBranded2DImage')]) ]) if 'tissue_classify' in master_config['components']: myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", master_config['queue'], master_config['long_q'], interpMode) baw201.connect([(makeDenoiseOutImageList,myLocalTCWF, [('T1List','inputspec.T1List')]), (makeDenoiseOutImageList,myLocalTCWF, [('T2List','inputspec.T2List')]), (inputsSpec, myLocalTCWF, [('atlasDefinition', 'inputspec.atlasDefinition'), (('T1s', getAllT1sLength), 'inputspec.T1_count'), ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'), ('OTHERs', 'inputspec.OtherList')]), (myLocalLMIWF, myLocalTCWF, [('outputspec.outputResampledCroppedVolume', 'inputspec.PrimaryT1'), ('outputspec.atlasToSubjectTransform', 'inputspec.atlasToSubjectInitialTransform')]), (myLocalTCWF, outputsSpec, [('outputspec.t1_average', 't1_average'), ('outputspec.t2_average', 't2_average'), ('outputspec.pd_average', 'pd_average'), ('outputspec.fl_average', 'fl_average'), ('outputspec.posteriorImages', 'posteriorImages'), ('outputspec.outputLabels', 'outputLabels'), ('outputspec.outputHeadLabels', 'outputHeadLabels'), ('outputspec.atlasToSubjectTransform', 'tc_atlas2session_tx'), ('outputspec.atlasToSubjectInverseTransform', 'tc_atlas2sessionInverse_tx')]), ]) dsName = "{0}_ds_{1}".format(phase, sessionid) DataSink = pe.Node(name=dsName, interface=nio.DataSink()) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid, sessionid) DataSink.inputs.base_directory = master_config['resultdir'] baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalTCWF -> DataSink [(('t1_average', convertToList), 'TissueClassify.@t1'), (('t2_average', convertToList), 'TissueClassify.@t2'), (('pd_average', convertToList), 'TissueClassify.@pd'), (('fl_average', convertToList), 'TissueClassify.@fl')]), ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalLMIWF -> DataSink [('outputLandmarksInACPCAlignedSpace', 'ACPCAlign.@outputLandmarks_ACPC'), ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'), ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'), ('outputLandmarksInInputSpace', 'ACPCAlign.@outputLandmarks_Input'), ('output_tx', 'ACPCAlign.@output_tx'), ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'),] ) ] ) currentFixWMPartitioningName = "_".join(['FixWMPartitioning', str(subjectid), str(sessionid)]) FixWMNode = pe.Node(interface=Function(function=FixWMPartitioning, input_names=['brainMask', 'PosteriorsList'], output_names=['UpdatedPosteriorsList', 'MatchingFGCodeList', 'MatchingLabelList', 'nonAirRegionMask']), name=currentFixWMPartitioningName) baw201.connect([(myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'), (('outputspec.posteriorImages', flattenDict), 'PosteriorsList')]), (FixWMNode, outputsSpec, [('UpdatedPosteriorsList', 'UpdatedPosteriorsList')]), ]) currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(subjectid) + "_" + str(sessionid) BRAINSCreateLabelMapNode = pe.Node(interface=BRAINSCreateLabelMapFromProbabilityMaps(), name=currentBRAINSCreateLabelMapName) ## TODO: Fix the file names BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz' BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz' baw201.connect([(FixWMNode, BRAINSCreateLabelMapNode, [('UpdatedPosteriorsList','inputProbabilityVolume'), ('MatchingFGCodeList', 'foregroundPriors'), ('MatchingLabelList', 'priorLabelCodes'), ('nonAirRegionMask', 'nonAirRegionMask')]), (BRAINSCreateLabelMapNode, DataSink, [('cleanLabelVolume', 'TissueClassify.@outputLabels'), ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')]), (myLocalTCWF, DataSink, [('outputspec.atlasToSubjectTransform', 'TissueClassify.@atlas2session_tx'), ('outputspec.atlasToSubjectInverseTransform', 'TissueClassify.@atlas2sessionInverse_tx')]), (FixWMNode, DataSink, [('UpdatedPosteriorsList', 'TissueClassify.@posteriors')]), ]) currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str(sessionid) AccumulateLikeTissuePosteriorsNode = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, input_names=['posteriorImages'], output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), name=currentAccumulateLikeTissuePosteriorsName) baw201.connect([(FixWMNode, AccumulateLikeTissuePosteriorsNode, [('UpdatedPosteriorsList', 'posteriorImages')]), (AccumulateLikeTissuePosteriorsNode, DataSink, [('AccumulatePriorsList', 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir')])]) return baw201
def CreateMALFWorkflow(WFname, onlyT1, master_config,BASE_DATA_GRABBER_DIR, runFixFusionLabelMap=True): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_t2_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename', #The static weights file name 'labelBaseFilename' #Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_HDAtlas20_2015_label', 'MALF_HDAtlas20_2015_CSFVBInjected_label', 'MALF_HDAtlas20_2015_fs_standard_label', 'MALF_HDAtlas20_2015_lobar_label', 'MALF_extended_snapshot']), run_without_submitting=True, name='outputspec') BLICreator = dict() A2SantsRegistrationPreMALF_SyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() NewlabelMapResample = dict() malf_atlas_mergeindex = 1; print 'malf_atlas_db_base' print master_config['malf_atlas_db_base'] malfAtlasDict = readMalfAtlasDbBase( master_config['malf_atlas_db_base'] ) malfAtlases = dict() sessionMakeMultimodalInput = dict() atlasMakeMultimodalInput = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(malfAtlasDict)),name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(malfAtlasDict)),name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(len(malfAtlasDict)),name="fswmLblMergeAtlas") for malf_atlas_subject in malfAtlasDict: ## Need DataGrabber Here For the Atlas malfAtlases[malf_atlas_subject] = pe.Node(interface = IdentityInterface( fields=['t1', 't2', 'label', 'lmks']), name='malfAtlasInput'+malf_atlas_subject) malfAtlases[malf_atlas_subject].inputs.t1 = malfAtlasDict[malf_atlas_subject]['t1'] malfAtlases[malf_atlas_subject].inputs.t2 = malfAtlasDict[malf_atlas_subject]['t2'] malfAtlases[malf_atlas_subject].inputs.label = malfAtlasDict[malf_atlas_subject]['label'] malfAtlases[malf_atlas_subject].inputs.lmks = malfAtlasDict[malf_atlas_subject]['lmks'] ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(malfAtlases[malf_atlas_subject], 'lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For SyN currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreMALF_'+malf_atlas_subject A2SantsRegistrationPreMALF_SyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,16), 'overwrite': True} A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.dimension = 3 #### DEBUGGIN A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.transforms = ["Affine","Affine","SyN","SyN"] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.transform_parameters = [[0.1],[0.1],[0.1, 3, 0],[0.1, 3, 0]] if onlyT1: A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric = ['MI','MI','CC','CC'] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0,1.0,1.0] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_percentage = [.5,.5,1.0,1.0] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32,4,4] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_strategy = ['Regular','Regular',None,None] else: A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric = ['MI',['MI','MI'],'CC',['CC','CC']] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric_weight = [1.0,[1.0,1.0],1.0,[1.0,1.0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_percentage = [.5,[.5,0.5],1.0,[1.0,1.0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,[32,32],4,[4,4]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_strategy = ['Regular',['Regular','Regular'],None,[None,None]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000,500],[500,500],[500,500],[500,70]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-6,1e-8,1e-6] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True,True,True] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2],[2, 1],[8, 4],[2, 1]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1],[1, 0],[3, 2],[1, 0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox","vox","vox"] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False,False,False] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreMALF_SyN' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(malfAtlases[malf_atlas_subject], 't1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(BLICreator[malf_atlas_subject],'outputTransformFilename', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'initial_moving_transform') """ multimodal ants registration if t2 exists """ sessionMakeMultimodalInput[malf_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeMultimodalInput_"+malf_atlas_subject) MALFWF.connect(inputsSpec, 'subj_t1_image', sessionMakeMultimodalInput[malf_atlas_subject], 'inFN1') if not onlyT1: MALFWF.connect(inputsSpec, 'subj_t2_image', sessionMakeMultimodalInput[malf_atlas_subject], 'inFN2') else: pass atlasMakeMultimodalInput[malf_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2'], output_names=['outFNs']), run_without_submitting=True, name="atlasMakeMultimodalInput"+malf_atlas_subject) MALFWF.connect(malfAtlases[malf_atlas_subject], 't1', atlasMakeMultimodalInput[malf_atlas_subject], 'inFN1') if not onlyT1: MALFWF.connect(malfAtlases[malf_atlas_subject], 't2', atlasMakeMultimodalInput[malf_atlas_subject], 'inFN2') else: pass MALFWF.connect(sessionMakeMultimodalInput[malf_atlas_subject], 'outFNs', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'fixed_image') MALFWF.connect(atlasMakeMultimodalInput[malf_atlas_subject], 'outFNs', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'moving_image') MALFWF.connect(A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'warped_image', warpedAtlasT1MergeNode,'in'+str(malf_atlas_mergeindex) ) ### Original labelmap resampling labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="WLABEL_"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( malfAtlases[malf_atlas_subject], 'label', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) ### New labelmap resampling NewlabelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="FSWM_WLABEL_"+malf_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} NewlabelMapResample[malf_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[malf_atlas_subject].inputs.dimension=3 NewlabelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' NewlabelMapResample[malf_atlas_subject].inputs.default_value=0 NewlabelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'composite_transform', NewlabelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', NewlabelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( malfAtlases[malf_atlas_subject], 'label', NewlabelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(NewlabelMapResample[malf_atlas_subject],'output_image',NewwarpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.modalities=1 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='MALF_HDAtlas20_2015_label.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') MALFWF.connect(jointFusion, 'output_label_image', outputsSpec,'MALF_HDAtlas20_2015_label') ## post processing of jointfusion injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'MALF_HDAtlas20_2015_CSFVBInjected_label.nii.gz' FREESURFER_DICT = { 'BRAINSTEM': 16, 'RH_CSF':24, 'LH_CSF':24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11,12,13,9,17,26,50,51,52,48,53,58] } injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT MALFWF.connect(jointFusion, 'output_label_image', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071,47),(15072,47),(15073,47),(15145,1011),(15157,1011),(15161,1011), (15179,1012),(15141,1014),(15151,1017),(15163,1018),(15165,1019),(15143,1027), (15191,1028),(15193,1028),(15185,1030),(15201,1030),(15175,1031),(15195,1031), (15173,1035),(15144,2011),(15156,2011),(15160,2011),(15178,2012),(15140,2014), (15150,2017),(15162,2018),(15164,2019),(15142,2027),(15190,2028),(15192,2028), (15184,2030),(15174,2031),(15194,2031),(15172,2035),(15200,2030)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'MALF_HDAtlas20_2015_fs_standard_label.nii.gz' MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN',RecodeToStandardFSWM,'InputFileName') MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap,'fixedFusionLabelFN',outputsSpec,'MALF_HDAtlas20_2015_CSFVBInjected_label') MALFWF.connect(RecodeToStandardFSWM,'OutputFileName',outputsSpec,'MALF_HDAtlas20_2015_fs_standard_label') ## MALF_SNAPSHOT_WRITER for Segmented result checking: MALF_SNAPSHOT_WRITERNodeName = "MALF_ExtendedMALF_SNAPSHOT_WRITER" MALF_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=MALF_SNAPSHOT_WRITERNodeName) MALF_SNAPSHOT_WRITER.inputs.outputFilename = 'MALF_HDAtlas20_2015_CSFVBInjected_label.png' # output specification MALF_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] MALF_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] MALFWF.connect([(inputsSpec, MALF_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), (injectSurfaceCSFandVBIntoLabelMap, MALF_SNAPSHOT_WRITER, [('fixedFusionLabelFN', 'inputBinaryVolumes')]) ]) MALFWF.connect(MALF_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'MALF_extended_snapshot') ## Lobar Pacellation by recoding if master_config['relabel2lobes_filename'] != None: print "Generate relabeled version based on {0}".format(master_config['relabel2lobes_filename']) RECODE_LABELS_2_LobarPacellation = readRecodingList( master_config['relabel2lobes_filename'] ) RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobarPacellation RecordToFSLobes.inputs.OutputFileName = 'MALF_HDAtlas20_2015_lobar_label.nii.gz' MALFWF.connect(RecodeToStandardFSWM, 'OutputFileName',RecordToFSLobes,'InputFileName') MALFWF.connect(RecordToFSLobes,'OutputFileName',outputsSpec,'MALF_HDAtlas20_2015_lobar_label') return MALFWF
def segmentation(projectid, subjectid, sessionid, master_config, onlyT1=True, pipeline_name=''): import os.path import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces import ants from nipype.interfaces.utility import IdentityInterface, Function, Merge # Set universal pipeline options from nipype import config config.update_config(master_config) assert config.get('execution', 'plugin') == master_config['execution']['plugin'] from PipeLineFunctionHelpers import ClipT1ImageWithBrainMask from WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow from utilities.distributed import modify_qsub_args from SEMTools import BRAINSSnapShotWriter baw200 = pe.Workflow(name=pipeline_name) # HACK: print for debugging for key, itme in master_config.items(): print "-" * 30 print key, ":", itme print "-" * 30 #END HACK inputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'template_t1', 'hncma-atlas', 'LMIatlasToSubject_tx', 'inputLabels', 'inputHeadLabels', 'posteriorImages', 'TissueClassifyatlasToSubjectInverseTransform', 'UpdatedPosteriorsList']), run_without_submitting=True, name='inputspec') # outputsSpec = pe.Node(interface=IdentityInterface(fields=[...]), # run_without_submitting=True, name='outputspec') currentClipT1ImageWithBrainMaskName = 'ClipT1ImageWithBrainMask_' + str(subjectid) + "_" + str(sessionid) ClipT1ImageWithBrainMaskNode = pe.Node(interface=Function(function=ClipT1ImageWithBrainMask, input_names=['t1_image', 'brain_labels', 'clipped_file_name'], output_names=['clipped_file']), name=currentClipT1ImageWithBrainMaskName) ClipT1ImageWithBrainMaskNode.inputs.clipped_file_name = 'clipped_from_BABC_labels_t1.nii.gz' baw200.connect([(inputsSpec, ClipT1ImageWithBrainMaskNode, [('t1_average', 't1_image'), ('inputLabels', 'brain_labels')])]) currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsRegistration_' + str(subjectid) + "_" + str(sessionid) AtlasToSubjectantsRegistration = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) AtlasToSubjectantsRegistration.inputs.dimension = 3 AtlasToSubjectantsRegistration.inputs.transforms = ["Affine", "SyN"] AtlasToSubjectantsRegistration.inputs.transform_parameters = [[0.1], [0.15, 3.0, 0.0]] AtlasToSubjectantsRegistration.inputs.metric = ['Mattes', 'CC'] AtlasToSubjectantsRegistration.inputs.sampling_strategy = ['Regular', None] AtlasToSubjectantsRegistration.inputs.sampling_percentage = [1.0, 1.0] AtlasToSubjectantsRegistration.inputs.metric_weight = [1.0, 1.0] AtlasToSubjectantsRegistration.inputs.radius_or_number_of_bins = [32, 4] AtlasToSubjectantsRegistration.inputs.number_of_iterations = [[1000, 1000, 1000], [10000, 500, 500, 200]] AtlasToSubjectantsRegistration.inputs.convergence_threshold = [5e-7, 5e-7] AtlasToSubjectantsRegistration.inputs.convergence_window_size = [25, 25] AtlasToSubjectantsRegistration.inputs.use_histogram_matching = [True, True] AtlasToSubjectantsRegistration.inputs.shrink_factors = [[4, 2, 1], [5, 4, 2, 1]] AtlasToSubjectantsRegistration.inputs.smoothing_sigmas = [[4, 2, 0], [5, 4, 2, 0]] AtlasToSubjectantsRegistration.inputs.sigma_units = ["vox","vox"] AtlasToSubjectantsRegistration.inputs.use_estimate_learning_rate_once = [False, False] AtlasToSubjectantsRegistration.inputs.write_composite_transform = True AtlasToSubjectantsRegistration.inputs.collapse_output_transforms = True AtlasToSubjectantsRegistration.inputs.output_transform_prefix = 'AtlasToSubject_' AtlasToSubjectantsRegistration.inputs.winsorize_lower_quantile = 0.025 AtlasToSubjectantsRegistration.inputs.winsorize_upper_quantile = 0.975 AtlasToSubjectantsRegistration.inputs.collapse_linear_transforms_to_fixed_image_header = False AtlasToSubjectantsRegistration.inputs.output_warped_image = 'atlas2subject.nii.gz' AtlasToSubjectantsRegistration.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' baw200.connect([(inputsSpec, AtlasToSubjectantsRegistration, [('LMIatlasToSubject_tx', 'initial_moving_transform'), ('t1_average', 'fixed_image'), ('template_t1', 'moving_image')]) ]) myLocalSegWF = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid, master_config['queue'], master_config['long_q'], t1Only=onlyT1) MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str(sessionid) MergeStage2AverageImages = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName) baw200.connect([(inputsSpec, myLocalSegWF, [('t1_average', 'inputspec.T1Volume'), ('posteriorImages', "inputspec.posteriorDictionary"), ('inputLabels', 'inputspec.RegistrationROI'),]), (inputsSpec, MergeStage2AverageImages, [('t1_average', 'in1')]), (AtlasToSubjectantsRegistration, myLocalSegWF, [('composite_transform', 'inputspec.atlasToSubjectTransform')]) ]) if not onlyT1: baw200.connect([(inputsSpec, myLocalSegWF, [('t2_average', 'inputspec.T2Volume')]), (inputsSpec, MergeStage2AverageImages, [('t2_average', 'in2')])]) file_count = 15 # Count of files to merge into MergeSessionSubjectToAtlas else: file_count = 14 # Count of files to merge into MergeSessionSubjectToAtlas ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue # baw200.connect([(AccumulateLikeTissuePosteriorsNode, myLocalSegWF, # [(('AccumulatePriorsList', getListIndex, 0), "inputspec.TotalGM")]), # ]) ### Now define where the final organized outputs should go. DataSink = pe.Node(nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid)) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.base_directory = master_config['resultdir'] # DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut') # DataSink.inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) DataSink.inputs.substitutions = [('Segmentations', os.path.join(projectid, subjectid, sessionid, 'CleanedDenoisedRFSegmentations')), ('subjectANNLabel_', ''), ('ANNContinuousPrediction', ''), ('subject.nii.gz', '.nii.gz'), ('_seg.nii.gz', '_seg.nii.gz'), ('.nii.gz', '_seg.nii.gz'), ('_seg_seg', '_seg')] baw200.connect([(myLocalSegWF, DataSink, [('outputspec.outputBinaryLeftCaudate', 'Segmentations.@LeftCaudate'), ('outputspec.outputBinaryRightCaudate', 'Segmentations.@RightCaudate'), ('outputspec.outputBinaryLeftHippocampus', 'Segmentations.@LeftHippocampus'), ('outputspec.outputBinaryRightHippocampus', 'Segmentations.@RightHippocampus'), ('outputspec.outputBinaryLeftPutamen', 'Segmentations.@LeftPutamen'), ('outputspec.outputBinaryRightPutamen', 'Segmentations.@RightPutamen'), ('outputspec.outputBinaryLeftThalamus', 'Segmentations.@LeftThalamus'), ('outputspec.outputBinaryRightThalamus', 'Segmentations.@RightThalamus'), ('outputspec.outputBinaryLeftAccumben', 'Segmentations.@LeftAccumben'), ('outputspec.outputBinaryRightAccumben', 'Segmentations.@RightAccumben'), ('outputspec.outputBinaryLeftGlobus', 'Segmentations.@LeftGlobus'), ('outputspec.outputBinaryRightGlobus', 'Segmentations.@RightGlobus'), ('outputspec.outputLabelImageName', 'Segmentations.@LabelImageName'), ('outputspec.outputCSVFileName', 'Segmentations.@CSVFileName')]), # (myLocalSegWF, DataSink, [('outputspec.cleaned_labels', 'Segmentations.@cleaned_labels')]) ]) MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str(sessionid) MergeStage2BinaryVolumes = pe.Node(interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName) baw200.connect([(myLocalSegWF, MergeStage2BinaryVolumes, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')]) ]) ## SnapShotWriter for Segmented result checking: SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) SnapShotWriter = pe.Node(interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) SnapShotWriter.inputs.outputFilename = 'snapShot' + str(sessionid) + '.png' # output specification SnapShotWriter.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] SnapShotWriter.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] baw200.connect([(MergeStage2AverageImages, SnapShotWriter, [('out', 'inputVolumes')]), (MergeStage2BinaryVolumes, SnapShotWriter, [('out', 'inputBinaryVolumes')]), (SnapShotWriter, DataSink, [('outputFilename', 'Segmentations.@outputSnapShot')]) ]) currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str(subjectid) + "_" + str(sessionid) AntsLabelWarpToSubject = pe.Node(interface=ants.ApplyTransforms(), name=currentAntsLabelWarpToSubject) AntsLabelWarpToSubject.inputs.dimension = 3 AntsLabelWarpToSubject.inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' AntsLabelWarpToSubject.inputs.interpolation = "MultiLabel" baw200.connect([(AtlasToSubjectantsRegistration, AntsLabelWarpToSubject, [('composite_transform', 'transforms')]), (inputsSpec, AntsLabelWarpToSubject, [('t1_average', 'reference_image'), ('hncma-atlas', 'input_image')]) ]) ##### ### Now define where the final organized outputs should go. AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str(sessionid) AntsLabelWarpedToSubject_DS = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) AntsLabelWarpedToSubject_DS.overwrite = master_config['ds_overwrite'] AntsLabelWarpedToSubject_DS.inputs.base_directory = master_config['resultdir'] AntsLabelWarpedToSubject_DS.inputs.substitutions = [('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject'))] baw200.connect([(AntsLabelWarpToSubject, AntsLabelWarpedToSubject_DS, [('output_image', 'AntsLabelWarpedToSubject')])]) MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str(sessionid) MergeSessionSubjectToAtlas = pe.Node(interface=Merge(file_count), run_without_submitting=True, name=MergeSessionSubjectToAtlasName) baw200.connect([(myLocalSegWF, MergeSessionSubjectToAtlas, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')]), # (FixWMPartitioningNode, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('t1_average', 'in14')]) ]) if not onlyT1: assert file_count == 15 baw200.connect([(inputsSpec, MergeSessionSubjectToAtlas, [('t2_average', 'in15')])]) LinearSubjectToAtlasANTsApplyTransformsName = 'LinearSubjectToAtlasANTsApplyTransforms_' + str(sessionid) LinearSubjectToAtlasANTsApplyTransforms = pe.MapNode(interface=ants.ApplyTransforms(), iterfield=['input_image'], name=LinearSubjectToAtlasANTsApplyTransformsName) LinearSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'Linear' baw200.connect([(AtlasToSubjectantsRegistration, LinearSubjectToAtlasANTsApplyTransforms, [('inverse_composite_transform', 'transforms')]), (inputsSpec, LinearSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeSessionSubjectToAtlas, LinearSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) MergeMultiLabelSessionSubjectToAtlasName = "99_MergeMultiLabelSessionSubjectToAtlas_" + str(sessionid) MergeMultiLabelSessionSubjectToAtlas = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName) baw200.connect([(inputsSpec, MergeMultiLabelSessionSubjectToAtlas, [('inputLabels', 'in1'), ('inputHeadLabels', 'in2')]) ]) ### This is taking this sessions RF label map back into NAC atlas space. #{ MultiLabelSubjectToAtlasANTsApplyTransformsName = 'MultiLabelSubjectToAtlasANTsApplyTransforms_' + str(sessionid) + '_map' MultiLabelSubjectToAtlasANTsApplyTransforms = pe.MapNode(interface=ants.ApplyTransforms(), iterfield=['input_image'], name=MultiLabelSubjectToAtlasANTsApplyTransformsName) MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'MultiLabel' baw200.connect([(AtlasToSubjectantsRegistration, MultiLabelSubjectToAtlasANTsApplyTransforms, [('inverse_composite_transform', 'transforms')]), (inputsSpec, MultiLabelSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeMultiLabelSessionSubjectToAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) #} ### Now we must take the sessions to THIS SUBJECTS personalized atlas. #{ #} ### Now define where the final organized outputs should go. Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) Subj2Atlas_DS = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) Subj2Atlas_DS.overwrite = master_config['ds_overwrite'] Subj2Atlas_DS.inputs.base_directory = master_config['resultdir'] Subj2Atlas_DS.inputs.regexp_substitutions = [(r'_LinearSubjectToAtlasANTsApplyTransforms_[^/]*', r'' + sessionid + '/')] baw200.connect([(LinearSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@linear_output_images')])]) Subj2AtlasTransforms_DSName = "SubjectToAtlasTransforms_DS_" + str(sessionid) Subj2AtlasTransforms_DS = pe.Node(nio.DataSink(), name=Subj2AtlasTransforms_DSName) Subj2AtlasTransforms_DS.overwrite = master_config['ds_overwrite'] Subj2AtlasTransforms_DS.inputs.base_directory = master_config['resultdir'] Subj2AtlasTransforms_DS.inputs.regexp_substitutions = [(r'SubjectToAtlasWarped', r'SubjectToAtlasWarped/' + sessionid + '/')] baw200.connect([(AtlasToSubjectantsRegistration, Subj2AtlasTransforms_DS, [('composite_transform', 'SubjectToAtlasWarped.@composite_transform'), ('inverse_composite_transform', 'SubjectToAtlasWarped.@inverse_composite_transform')])]) # baw200.connect([(MultiLabelSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@multilabel_output_images')])]) if master_config['execution']['plugin'] == 'SGE': # for some nodes, the qsub call needs to be modified on the cluster AtlasToSubjectantsRegistration.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '9000M', 4, hard=False)} SnapShotWriter.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, 1, hard=False)} LinearSubjectToAtlasANTsApplyTransforms.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, hard=True)} MultiLabelSubjectToAtlasANTsApplyTransforms.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, hard=True)} return baw200
def generate_single_session_template_WF(projectid, subjectid, sessionid, onlyT1, master_config, phase, interpMode, pipeline_name, doDenoise=True): """ Run autoworkup on a single sessionid This is the main function to call when processing a data set with T1 & T2 data. ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images are the lists of images to be used in the auto-workup. atlas_fname_wpath is the path and filename of the atlas to use. """ #if not 'landmark' in master_config['components'] or not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']: # print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!" # raise NotImplementedError # master_config['components'].append('auxlmk') # master_config['components'].append('tissue_classify') assert phase in ['atlas-based-reference', 'subject-based-reference'], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'" if 'tissue_classify' in master_config['components']: assert ('landmark' in master_config['components'] ), "tissue_classify Requires landmark step!" # NOT TRUE if 'landmark' in master_config['components']: # assert 'denoise' in master_config['components'], "landmark Requires denoise step!" if 'malf_2015_wholebrain' in master_config['components']: assert ('warp_atlas_to_subject' in master_config['components'] ), "malf_2015_wholebrain requires warp_atlas_to_subject!" from workflows.atlasNode import MakeAtlasNode baw201 = pe.Workflow(name=pipeline_name) inputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s', 'PDs', 'FLs', 'OTHERs', 'hncma_atlas', 'template_rightHemisphere', 'template_leftHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles']), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', 'outputLabels', 'outputHeadLabels', 'atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'BCD_ACPC_T1_CROPPED', 'outputLandmarksInACPCAlignedSpace', 'outputLandmarksInInputSpace', 'output_tx', 'LMIatlasToSubject_tx', 'writeBranded2DImage', 'brainStemMask', 'UpdatedPosteriorsList' # Longitudinal ]), run_without_submitting=True, name='outputspec') dsName = "{0}_ds_{1}".format(phase, sessionid) DataSink = pe.Node(name=dsName, interface=nio.DataSink()) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid, sessionid) DataSink.inputs.base_directory = master_config['resultdir'] atlas_static_directory = master_config['atlascache'] if master_config['workflow_phase'] == 'atlas-based-reference': atlas_warped_directory = master_config['atlascache'] atlasABCNode_XML = MakeAtlasNode(atlas_warped_directory, 'BABCXMLAtlas_{0}'.format(sessionid), ['W_BRAINSABCSupport']) baw201.connect(atlasABCNode_XML, 'ExtendedAtlasDefinition_xml', inputsSpec, 'atlasDefinition') atlasABCNode_W = MakeAtlasNode(atlas_warped_directory, 'BABCAtlas_W{0}'.format(sessionid), ['W_BRAINSABCSupport', 'W_LabelMapsSupport']) baw201.connect([( atlasABCNode_W, inputsSpec, [ ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case atlasBCDNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCDAtlas_W{0}'.format(sessionid), ['W_BCDSupport']) baw201.connect([(atlasBCDNode_W, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) ## Needed for both segmentation and template building prep atlasBCUTNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCUTAtlas_W{0}'.format(sessionid), ['W_BRAINSCutSupport']) elif master_config['workflow_phase'] == 'subject-based-reference': print(master_config['previousresult']) atlas_warped_directory = os.path.join(master_config['previousresult'], subjectid, 'Atlas') atlasBCUTNode_W = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=[ "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap", "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap", "l_globus_ProbabilityMap", "r_globus_ProbabilityMap", "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap", "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap", "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap", "phi", "rho", "theta" ]), name='PerSubject_atlasBCUTNode_W') atlasBCUTNode_W.inputs.base_directory = master_config['previousresult'] atlasBCUTNode_W.inputs.subject = subjectid atlasBCUTNode_W.inputs.field_template = { 'l_accumben_ProbabilityMap': '%s/Atlas/AVG_l_accumben_ProbabilityMap.nii.gz', 'r_accumben_ProbabilityMap': '%s/Atlas/AVG_r_accumben_ProbabilityMap.nii.gz', 'l_caudate_ProbabilityMap': '%s/Atlas/AVG_l_caudate_ProbabilityMap.nii.gz', 'r_caudate_ProbabilityMap': '%s/Atlas/AVG_r_caudate_ProbabilityMap.nii.gz', 'l_globus_ProbabilityMap': '%s/Atlas/AVG_l_globus_ProbabilityMap.nii.gz', 'r_globus_ProbabilityMap': '%s/Atlas/AVG_r_globus_ProbabilityMap.nii.gz', 'l_hippocampus_ProbabilityMap': '%s/Atlas/AVG_l_hippocampus_ProbabilityMap.nii.gz', 'r_hippocampus_ProbabilityMap': '%s/Atlas/AVG_r_hippocampus_ProbabilityMap.nii.gz', 'l_putamen_ProbabilityMap': '%s/Atlas/AVG_l_putamen_ProbabilityMap.nii.gz', 'r_putamen_ProbabilityMap': '%s/Atlas/AVG_r_putamen_ProbabilityMap.nii.gz', 'l_thalamus_ProbabilityMap': '%s/Atlas/AVG_l_thalamus_ProbabilityMap.nii.gz', 'r_thalamus_ProbabilityMap': '%s/Atlas/AVG_r_thalamus_ProbabilityMap.nii.gz', 'phi': '%s/Atlas/AVG_phi.nii.gz', 'rho': '%s/Atlas/AVG_rho.nii.gz', 'theta': '%s/Atlas/AVG_theta.nii.gz' } atlasBCUTNode_W.inputs.template_args = { 'l_accumben_ProbabilityMap': [['subject']], 'r_accumben_ProbabilityMap': [['subject']], 'l_caudate_ProbabilityMap': [['subject']], 'r_caudate_ProbabilityMap': [['subject']], 'l_globus_ProbabilityMap': [['subject']], 'r_globus_ProbabilityMap': [['subject']], 'l_hippocampus_ProbabilityMap': [['subject']], 'r_hippocampus_ProbabilityMap': [['subject']], 'l_putamen_ProbabilityMap': [['subject']], 'r_putamen_ProbabilityMap': [['subject']], 'l_thalamus_ProbabilityMap': [['subject']], 'r_thalamus_ProbabilityMap': [['subject']], 'phi': [['subject']], 'rho': [['subject']], 'theta': [['subject']] } atlasBCUTNode_W.inputs.template = '*' atlasBCUTNode_W.inputs.sort_filelist = True atlasBCUTNode_W.inputs.raise_on_empty = True template_DG = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['outAtlasXMLFullPath', 'hncma_atlas', 'template_leftHemisphere', 'template_rightHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles', 'template_t1', 'template_landmarks_50Lmks_fcsv' ]), name='Template_DG') template_DG.inputs.base_directory = master_config['previousresult'] template_DG.inputs.subject = subjectid template_DG.inputs.field_template = {'outAtlasXMLFullPath': '%s/Atlas/AtlasDefinition_%s.xml', 'hncma_atlas': '%s/Atlas/AVG_hncma_atlas.nii.gz', 'template_leftHemisphere': '%s/Atlas/AVG_template_leftHemisphere.nii.gz', 'template_rightHemisphere': '%s/Atlas/AVG_template_rightHemisphere.nii.gz', 'template_WMPM2_labels': '%s/Atlas/AVG_template_WMPM2_labels.nii.gz', 'template_nac_labels': '%s/Atlas/AVG_template_nac_labels.nii.gz', 'template_ventricles': '%s/Atlas/AVG_template_ventricles.nii.gz', 'template_t1': '%s/Atlas/AVG_T1.nii.gz', 'template_landmarks_50Lmks_fcsv': '%s/Atlas/AVG_LMKS.fcsv', } template_DG.inputs.template_args = {'outAtlasXMLFullPath': [['subject', 'subject']], 'hncma_atlas': [['subject']], 'template_leftHemisphere': [['subject']], 'template_rightHemisphere': [['subject']], 'template_WMPM2_labels': [['subject']], 'template_nac_labels': [['subject']], 'template_ventricles': [['subject']], 'template_t1': [['subject']], 'template_landmarks_50Lmks_fcsv': [['subject']] } template_DG.inputs.template = '*' template_DG.inputs.sort_filelist = True template_DG.inputs.raise_on_empty = True baw201.connect(template_DG, 'outAtlasXMLFullPath', inputsSpec, 'atlasDefinition') baw201.connect([(template_DG, inputsSpec, [ ## Already connected ('template_t1','template_t1'), ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case baw201.connect([(template_DG, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) else: assert 0 == 1, "Invalid workflow type specified for singleSession" atlasBCDNode_S = MakeAtlasNode(atlas_static_directory, 'BBCDAtlas_S{0}'.format(sessionid), ['S_BCDSupport']) baw201.connect([(atlasBCDNode_S, inputsSpec, [('template_weights_50Lmks_wts', 'atlasWeightFilename'), ('LLSModel_50Lmks_h5', 'LLSModel'), ('T1_50Lmks_mdl', 'inputTemplateModel') ]), ]) if doDenoise: print("\ndenoise image filter\n") makeDenoiseInImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix', 'PrimaryT1'], output_names=['inImageList', 'outImageList', 'imageTypeList']), run_without_submitting=True, name="99_makeDenoiseInImageList") baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List') baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List') baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList') makeDenoiseInImageList.inputs.FLList = [] # an emptyList HACK makeDenoiseInImageList.inputs.PrimaryT1 = None # an emptyList HACK makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz" # HACK baw201.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' ) baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList, 'OtherList') print("\nDenoise:\n") DenoiseInputImgs = pe.MapNode(interface=UnbiasedNonLocalMeans(), name='denoiseInputImgs', iterfield=['inputVolume', 'outputVolume']) DenoiseInputImgs.inputs.rc = [1, 1, 1] DenoiseInputImgs.inputs.rs = [4, 4, 4] DenoiseInputImgs.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], .2, 1, 1), 'overwrite': True} baw201.connect([(makeDenoiseInImageList, DenoiseInputImgs, [('inImageList', 'inputVolume')]), (makeDenoiseInImageList, DenoiseInputImgs, [('outImageList', 'outputVolume')]) ]) print("\nMerge all T1 and T2 List\n") makePreprocessingOutList = pe.Node(Function(function=GenerateSeparateImageTypeList, input_names=['inFileList', 'inTypeList'], output_names=['T1s', 'T2s', 'PDs', 'FLs', 'OtherList']), run_without_submitting=True, name="99_makePreprocessingOutList") baw201.connect(DenoiseInputImgs, 'outputVolume', makePreprocessingOutList, 'inFileList') baw201.connect(makeDenoiseInImageList, 'imageTypeList', makePreprocessingOutList, 'inTypeList') else: makePreprocessingOutList = inputsSpec if 'landmark' in master_config['components']: DoReverseMapping = False # Set to true for debugging outputs if 'auxlmk' in master_config['components']: DoReverseMapping = True myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", interpMode, DoReverseMapping) baw201.connect([(makePreprocessingOutList, myLocalLMIWF, [(('T1s', get_list_element, 0), 'inputspec.inputVolume' )]), (inputsSpec, myLocalLMIWF, [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'), ('atlasWeightFilename', 'inputspec.atlasWeightFilename'), ('LLSModel', 'inputspec.LLSModel'), ('inputTemplateModel', 'inputspec.inputTemplateModel'), ('template_t1', 'inputspec.atlasVolume')]), (myLocalLMIWF, outputsSpec, [('outputspec.outputResampledCroppedVolume', 'BCD_ACPC_T1_CROPPED'), ('outputspec.outputLandmarksInACPCAlignedSpace', 'outputLandmarksInACPCAlignedSpace'), ('outputspec.outputLandmarksInInputSpace', 'outputLandmarksInInputSpace'), ('outputspec.outputTransform', 'output_tx'), ('outputspec.atlasToSubjectTransform', 'LMIatlasToSubject_tx'), ('outputspec.writeBranded2DImage', 'writeBranded2DImage')]) ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalLMIWF -> DataSink [('outputLandmarksInACPCAlignedSpace', 'ACPCAlign.@outputLandmarks_ACPC'), ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'), ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'), ('outputLandmarksInInputSpace', 'ACPCAlign.@outputLandmarks_Input'), ('output_tx', 'ACPCAlign.@output_tx'), ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'), ] ) ] ) if 'tissue_classify' in master_config['components']: useRegistrationMask = master_config['use_registration_masking'] myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", master_config, interpMode,useRegistrationMask) baw201.connect([(makePreprocessingOutList, myLocalTCWF, [('T1s', 'inputspec.T1List')]), (makePreprocessingOutList, myLocalTCWF, [('T2s', 'inputspec.T2List')]), (inputsSpec, myLocalTCWF, [('atlasDefinition', 'inputspec.atlasDefinition'), ('template_t1', 'inputspec.atlasVolume'), (('T1s', getAllT1sLength), 'inputspec.T1_count'), ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'), ('OTHERs', 'inputspec.OtherList') ]), (myLocalLMIWF, myLocalTCWF, [('outputspec.outputResampledCroppedVolume', 'inputspec.PrimaryT1'), ('outputspec.atlasToSubjectTransform', 'inputspec.atlasToSubjectInitialTransform')]), (myLocalTCWF, outputsSpec, [('outputspec.t1_average', 't1_average'), ('outputspec.t2_average', 't2_average'), ('outputspec.pd_average', 'pd_average'), ('outputspec.fl_average', 'fl_average'), ('outputspec.posteriorImages', 'posteriorImages'), ('outputspec.outputLabels', 'outputLabels'), ('outputspec.outputHeadLabels', 'outputHeadLabels'), ('outputspec.atlasToSubjectTransform', 'atlasToSubjectTransform'), ('outputspec.atlasToSubjectInverseTransform', 'atlasToSubjectInverseTransform'), ('outputspec.atlasToSubjectRegistrationState', 'atlasToSubjectRegistrationState') ]), ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalTCWF -> DataSink [(('t1_average', convertToList), 'TissueClassify.@t1'), (('t2_average', convertToList), 'TissueClassify.@t2'), (('pd_average', convertToList), 'TissueClassify.@pd'), (('fl_average', convertToList), 'TissueClassify.@fl')]) ]) currentFixWMPartitioningName = "_".join(['FixWMPartitioning', str(subjectid), str(sessionid)]) FixWMNode = pe.Node(interface=Function(function=FixWMPartitioning, input_names=['brainMask', 'PosteriorsList'], output_names=['UpdatedPosteriorsList', 'MatchingFGCodeList', 'MatchingLabelList', 'nonAirRegionMask']), name=currentFixWMPartitioningName) baw201.connect([(myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'), (('outputspec.posteriorImages', flattenDict), 'PosteriorsList')]), (FixWMNode, outputsSpec, [('UpdatedPosteriorsList', 'UpdatedPosteriorsList')]), ]) currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(subjectid) + "_" + str( sessionid) BRAINSCreateLabelMapNode = pe.Node(interface=BRAINSCreateLabelMapFromProbabilityMaps(), name=currentBRAINSCreateLabelMapName) ## TODO: Fix the file names BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz' BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz' baw201.connect([(FixWMNode, BRAINSCreateLabelMapNode, [('UpdatedPosteriorsList', 'inputProbabilityVolume'), ('MatchingFGCodeList', 'foregroundPriors'), ('MatchingLabelList', 'priorLabelCodes'), ('nonAirRegionMask', 'nonAirRegionMask')]), (BRAINSCreateLabelMapNode, DataSink, [ # brainstem code below replaces this ('cleanLabelVolume', 'TissueClassify.@outputLabels'), ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')]), (myLocalTCWF, DataSink, [('outputspec.atlasToSubjectTransform', 'TissueClassify.@atlas2session_tx'), ('outputspec.atlasToSubjectInverseTransform', 'TissueClassify.@atlas2sessionInverse_tx')]), (FixWMNode, DataSink, [('UpdatedPosteriorsList', 'TissueClassify.@posteriors')]), ]) currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str( sessionid) AccumulateLikeTissuePosteriorsNode = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, input_names=['posteriorImages'], output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), name=currentAccumulateLikeTissuePosteriorsName) baw201.connect([(FixWMNode, AccumulateLikeTissuePosteriorsNode, [('UpdatedPosteriorsList', 'posteriorImages')]), (AccumulateLikeTissuePosteriorsNode, DataSink, [('AccumulatePriorsList', 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir')])]) """ brain stem adds on feature inputs: - landmark (fcsv) file - fixed brainlabels seg.nii.gz output: - complete_brainlabels_seg.nii.gz Segmentation """ myLocalBrainStemWF = CreateBrainstemWorkflow("BrainStem", master_config['queue'], "complete_brainlabels_seg.nii.gz") baw201.connect([(myLocalLMIWF, myLocalBrainStemWF, [('outputspec.outputLandmarksInACPCAlignedSpace', 'inputspec.inputLandmarkFilename')]), (BRAINSCreateLabelMapNode, myLocalBrainStemWF, [('cleanLabelVolume', 'inputspec.inputTissueLabelFilename')]) ]) baw201.connect(myLocalBrainStemWF, 'outputspec.ouputTissuelLabelFilename', DataSink, 'TissueClassify.@complete_brainlabels_seg') ########################### do_BRAINSCut_Segmentation = DetermineIfSegmentationShouldBeDone(master_config) if do_BRAINSCut_Segmentation: from workflows.segmentation import segmentation from workflows.WorkupT1T2BRAINSCut import GenerateWFName sname = 'segmentation' segWF = segmentation(projectid, subjectid, sessionid, master_config, onlyT1, pipeline_name=sname) baw201.connect([(inputsSpec, segWF, [ ('template_t1', 'inputspec.template_t1') ]) ]) baw201.connect([(atlasBCUTNode_W, segWF, [ ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'), ('theta', 'inputspec.theta'), ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'), ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'), ('l_hippocampus_ProbabilityMap', 'inputspec.l_hippocampus_ProbabilityMap'), ('r_hippocampus_ProbabilityMap', 'inputspec.r_hippocampus_ProbabilityMap'), ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'), ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'), ('l_thalamus_ProbabilityMap', 'inputspec.l_thalamus_ProbabilityMap'), ('r_thalamus_ProbabilityMap', 'inputspec.r_thalamus_ProbabilityMap'), ('l_accumben_ProbabilityMap', 'inputspec.l_accumben_ProbabilityMap'), ('r_accumben_ProbabilityMap', 'inputspec.r_accumben_ProbabilityMap'), ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'), ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap') ] )]) atlasBCUTNode_S = MakeAtlasNode(atlas_static_directory, 'BBCUTAtlas_S{0}'.format(sessionid), ['S_BRAINSCutSupport']) baw201.connect(atlasBCUTNode_S, 'trainModelFile_txtD0060NT0060_gz', segWF, 'inputspec.trainModelFile_txtD0060NT0060_gz') ## baw201_outputspec = baw201.get_node('outputspec') baw201.connect([(myLocalTCWF, segWF, [('outputspec.t1_average', 'inputspec.t1_average'), ('outputspec.atlasToSubjectRegistrationState', 'inputspec.atlasToSubjectRegistrationState'), ('outputspec.outputLabels', 'inputspec.inputLabels'), ('outputspec.posteriorImages', 'inputspec.posteriorImages'), ('outputspec.outputHeadLabels', 'inputspec.inputHeadLabels') ] ), (myLocalLMIWF, segWF, [('outputspec.atlasToSubjectTransform', 'inputspec.LMIatlasToSubject_tx') ] ), (FixWMNode, segWF, [('UpdatedPosteriorsList', 'inputspec.UpdatedPosteriorsList') ] ), ]) if not onlyT1: baw201.connect([(myLocalTCWF, segWF, [('outputspec.t2_average', 'inputspec.t2_average')])]) if 'warp_atlas_to_subject' in master_config['components']: ## ##~/src/NEP-build/bin/BRAINSResample # --warpTransform AtlasToSubjectPreBABC_Composite.h5 # --inputVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/Atlas/hncma-atlas.nii.gz # --referenceVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/singleSession_KID1_KT1/LandmarkInitialize/BROIAuto_cropped/Cropped_BCD_ACPC_Aligned.nii.gz # !--outputVolume hncma.nii.gz # !--interpolationMode NearestNeighbor # !--pixelType short ## ## ## TODO : SHOULD USE BRAINSCut transform that was refined even further! BResample = dict() AtlasLabelMapsToResample = [ 'hncma_atlas', 'template_WMPM2_labels', 'template_nac_labels', ] for atlasImage in AtlasLabelMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'short' BResample[atlasImage].inputs.interpolationMode = 'NearestNeighbor' BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) AtlasBinaryMapsToResample = [ 'template_rightHemisphere', 'template_leftHemisphere', 'template_ventricles'] for atlasImage in AtlasBinaryMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'binary' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) BRAINSCutAtlasImages = [ 'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap', 'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap', 'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap', 'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap', 'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap', 'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap', 'r_globus_ProbabilityMap' ] for atlasImage in BRAINSCutAtlasImages: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BCUTBRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'float' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(atlasBCUTNode_W, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) WhiteMatterHemisphereNode = pe.Node(interface=Function(function=CreateLeftRightWMHemispheres, input_names=['BRAINLABELSFile', 'HDCMARegisteredVentricleMaskFN', 'LeftHemisphereMaskName', 'RightHemisphereMaskName', 'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'], output_names=['WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName']), name="WhiteMatterHemisphere") WhiteMatterHemisphereNode.inputs.WM_LeftHemisphereFileName ="left_hemisphere_wm.nii.gz" WhiteMatterHemisphereNode.inputs.WM_RightHemisphereFileName ="right_hemisphere_wm.nii.gz" baw201.connect(myLocalBrainStemWF,'outputspec.ouputTissuelLabelFilename',WhiteMatterHemisphereNode,'BRAINLABELSFile') baw201.connect(BResample['hncma_atlas'],'outputVolume',WhiteMatterHemisphereNode,'HDCMARegisteredVentricleMaskFN') baw201.connect(BResample['template_leftHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'LeftHemisphereMaskName') baw201.connect(BResample['template_rightHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'RightHemisphereMaskName') baw201.connect(WhiteMatterHemisphereNode,'WM_LeftHemisphereFileName',DataSink,'WarpedAtlas2Subject.@LeftHemisphereWM') baw201.connect(WhiteMatterHemisphereNode,'WM_RightHemisphereFileName',DataSink,'WarpedAtlas2Subject.@RightHemisphereWM') if 'malf_2015_wholebrain' in master_config['components']: ## HACK Do MALF labeling ## HACK FOR NOW SHOULD BE MORE ELEGANT FROM THE .config file BASE_DATA_GRABBER_DIR='/Shared/johnsonhj/HDNI/ReferenceData/Neuromorphometrics/2012Subscription' if onlyT1: print("T1 only processing in baseline") else: print("Multimodal processing in baseline") myLocalMALF = CreateMALFWorkflow("MALF", onlyT1, master_config,BASE_DATA_GRABBER_DIR) baw201.connect(myLocalTCWF,'outputspec.t1_average',myLocalMALF,'inputspec.subj_t1_image') baw201.connect(myLocalTCWF,'outputspec.t2_average',myLocalMALF,'inputspec.subj_t2_image') baw201.connect(myLocalBrainStemWF, 'outputspec.ouputTissuelLabelFilename',myLocalMALF,'inputspec.subj_fixed_head_labels') baw201.connect(BResample['template_leftHemisphere'],'outputVolume',myLocalMALF,'inputspec.subj_left_hemisphere') baw201.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace' ,myLocalMALF,'inputspec.subj_lmks') baw201.connect(atlasBCDNode_S,'template_weights_50Lmks_wts',myLocalMALF,'inputspec.atlasWeightFilename') inputLabelFileMALFnameSpec = pe.Node( interface=IdentityInterface( fields=['labelBaseFilename']), run_without_submitting = True, name="inputLabelFileMALFnameSpec") baw201.connect( inputLabelFileMALFnameSpec, 'labelBaseFilename', myLocalMALF, 'inputspec.labelBaseFilename') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_CSFVBInjected_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_CSFVBInjected_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_fs_standard_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_fs_standard_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_lobar_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_lobar_label') baw201.connect(myLocalMALF,'outputspec.MALF_extended_snapshot',DataSink,'TissueClassify.@MALF_extended_snapshot') return baw201
def CreateLandmarkInitializeWorkflow(WFname, master_config, InterpolationMode, PostACPCAlignToAtlas, DoReverseInit, useEMSP=False, Debug=False): CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] landmarkInitializeWF = pe.Workflow(name=WFname) ############# inputsSpec = pe.Node(interface=IdentityInterface(fields=['inputVolume', 'atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'atlasVolume', 'EMSP']), run_without_submitting=True, name='inputspec') ############# outputsSpec = pe.Node(interface=IdentityInterface(fields=['outputLandmarksInACPCAlignedSpace', 'outputResampledVolume', 'outputResampledCroppedVolume', 'outputLandmarksInInputSpace', 'writeBranded2DImage', 'outputTransform', 'outputMRML', 'atlasToSubjectTransform' ]), run_without_submitting=True, name='outputspec') ########################################################/ # Run ACPC Detect on first T1 Image - Base Image ######################################################## BCD = pe.Node(interface=BRAINSConstellationDetector(), name="BCD") many_cpu_BCD_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} BCD.plugin_args = many_cpu_BCD_options_dictionary ## Use program default BCD.inputs.inputTemplateModel = T1ACPCModelFile # BCD.inputs.outputVolume = "BCD_OUT" + "_ACPC_InPlace.nii.gz" #$# T1AcpcImageList BCD.inputs.outputTransform = "BCD" + "_Original2ACPC_transform.h5" BCD.inputs.outputResampledVolume = "BCD" + "_ACPC.nii.gz" BCD.inputs.outputLandmarksInInputSpace = "BCD" + "_Original.fcsv" BCD.inputs.outputLandmarksInACPCAlignedSpace = "BCD" + "_ACPC_Landmarks.fcsv" BCD.inputs.writeBranded2DImage = "BCD"+"_Branded2DQCimage.png" # BCD.inputs.outputMRML = "BCD" + "_Scene.mrml" BCD.inputs.interpolationMode = InterpolationMode BCD.inputs.houghEyeDetectorMode = 1 # Look for dark eyes like on a T1 image, 0=Look for bright eyes like in a T2 image BCD.inputs.acLowerBound = 80.0 # Chop the data set 80mm below the AC PC point. # Entries below are of the form: landmarkInitializeWF.connect(inputsSpec, 'inputVolume', BCD, 'inputVolume') landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BCD, 'atlasLandmarkWeights') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename',BCD, 'atlasLandmarks') landmarkInitializeWF.connect(inputsSpec, 'LLSModel', BCD, 'LLSModel') landmarkInitializeWF.connect(inputsSpec, 'inputTemplateModel', BCD, 'inputTemplateModel') # If EMSP, pre-selected landmarks are given, force to use. if useEMSP: print("*** Use pre-selected landmark file for Landmark Detection") landmarkInitializeWF.connect(inputsSpec, 'EMSP', BCD, 'inputLandmarksEMSP') # If the atlas volume is from this subject (i.e. after template building for the longitudinal phase) then set this to True # Otherwise, it is probably best to let the ACPC alignment be fully defined by the landmark points themselves. if PostACPCAlignToAtlas: landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', BCD, 'atlasVolume') ######################################################## # Run BLI atlas_to_subject ######################################################## BLI = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI") BLI.inputs.outputTransformFilename = "landmarkInitializer_atlas_to_subject_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI, 'inputMovingLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', BLI, 'inputFixedLandmarkFilename') ## This is for debugging purposes, and it is not intended for general use. if DoReverseInit == True: ######################################################## # Run BLI subject_to_atlas ######################################################## BLI2Atlas = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI2Atlas") BLI2Atlas.inputs.outputTransformFilename = "landmarkInitializer_subject_to_atlas_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI2Atlas, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI2Atlas, 'inputFixedLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', BLI2Atlas, 'inputMovingLandmarkFilename') Resample2Atlas = pe.Node(interface=BRAINSResample(), name="Resample2Atlas") Resample2Atlas.inputs.interpolationMode = "Linear" Resample2Atlas.inputs.outputVolume = "subject2atlas.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'inputVolume', Resample2Atlas, 'inputVolume') landmarkInitializeWF.connect(BLI2Atlas, 'outputTransformFilename', Resample2Atlas, 'warpTransform') if (DoReverseInit == True) and (Debug == True): ResampleFromAtlas = pe.Node(interface=BRAINSResample(), name="ResampleFromAtlas") ResampleFromAtlas.inputs.interpolationMode = "Linear" ResampleFromAtlas.inputs.outputVolume = "atlas2subject.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', ResampleFromAtlas, 'inputVolume') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', ResampleFromAtlas, 'warpTransform') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', ResampleFromAtlas, 'referenceVolume') BROIAUTO = pe.Node(interface=BRAINSROIAuto(), name="BROIAuto_cropped") many_cpu_BROIAUTO_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} BROIAUTO.plugin_args = many_cpu_BROIAUTO_options_dictionary BROIAUTO.inputs.outputVolume = "Cropped_BCD_ACPC_Aligned.nii.gz" BROIAUTO.inputs.ROIAutoDilateSize = 10 BROIAUTO.inputs.cropOutput = True landmarkInitializeWF.connect(BCD, 'outputResampledVolume', BROIAUTO, 'inputVolume') landmarkInitializeWF.connect(BROIAUTO, 'outputVolume', outputsSpec, 'outputResampledCroppedVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', outputsSpec, 'outputLandmarksInACPCAlignedSpace') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', outputsSpec, 'outputResampledVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', outputsSpec, 'outputLandmarksInInputSpace') landmarkInitializeWF.connect(BCD, 'outputTransform', outputsSpec, 'outputTransform') landmarkInitializeWF.connect(BCD, 'outputMRML', outputsSpec, 'outputMRML') landmarkInitializeWF.connect(BCD, 'writeBranded2DImage', outputsSpec, 'writeBranded2DImage') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', outputsSpec, 'atlasToSubjectTransform') return landmarkInitializeWF
def _template_runner(argv, environment, experiment, pipeline_options, cluster): print("Getting subjects from database...") # subjects = argv["--subjects"].split(',') subjects, subjects_sessions_dictionary = get_subjects_sessions_dictionary(argv['SUBJECTS'], experiment['cachedir'], experiment['resultdir'], environment['prefix'], experiment['dbfile'], argv['--use-sentinal'], argv['--use-shuffle'] ) # Build database before parallel section useSentinal = argv['--use-sentinal'] # Quick preliminary sanity check for thisSubject in subjects: if len(subjects_sessions_dictionary[thisSubject]) == 0: print("ERROR: subject {0} has no sessions found. Did you supply a valid subject id on the command line?".format(thisSubject) ) sys.exit(-1) for thisSubject in subjects: print("Processing atlas generation for this subject: {0}".format(thisSubject)) print("="*80) print("Copying Atlas directory and determining appropriate Nipype options...") subj_pipeline_options = nipype_options(argv, pipeline_options, cluster, experiment, environment) # Generate Nipype options print("Dispatching jobs to the system...") ###### ###### Now start workflow construction ###### # Set universal pipeline options nipype_config.update_config(subj_pipeline_options) ready_for_template_building = True for thisSession in subjects_sessions_dictionary[thisSubject]: path_test = os.path.join(experiment['previousresult'],'*/{0}/{1}/TissueClassify/t1_average_BRAINSABC.nii.gz'.format(thisSubject,thisSession)) t1_file_result = glob.glob(path_test) if len(t1_file_result) != 1: print("Incorrect number of t1 images found for data grabber {0}".format(t1_file_result)) print(" at path {0}".format(path_test)) ready_for_template_building = False if not ready_for_template_building: print("TEMPORARY SKIPPING: Not ready to process {0}".format(thisSubject)) continue base_output_directory = os.path.join(subj_pipeline_options['logging']['log_directory'],thisSubject) template = pe.Workflow(name='SubjectAtlas_Template_'+thisSubject) template.base_dir = base_output_directory subjectNode = pe.Node(interface=IdentityInterface(fields=['subject']), run_without_submitting=True, name='99_subjectIterator') subjectNode.inputs.subject = thisSubject sessionsExtractorNode = pe.Node(Function(function=getSessionsFromSubjectDictionary, input_names=['subject_session_dictionary','subject'], output_names=['sessions']), run_without_submitting=True, name="99_sessionsExtractor") sessionsExtractorNode.inputs.subject_session_dictionary = subjects_sessions_dictionary baselineOptionalDG = pe.MapNode(nio.DataGrabber(infields=['subject','session'], outfields=[ 't2_average', 'pd_average', 'fl_average'], run_without_submitting=True ), run_without_submitting=True, iterfield=['session'], name='BaselineOptional_DG') baselineOptionalDG.inputs.base_directory = experiment['previousresult'] baselineOptionalDG.inputs.sort_filelist = True baselineOptionalDG.inputs.raise_on_empty = False baselineOptionalDG.inputs.template = '*' baselineOptionalDG.inputs.field_template = { 't2_average':'*/%s/%s/TissueClassify/t2_average_BRAINSABC.nii.gz', 'pd_average':'*/%s/%s/TissueClassify/pd_average_BRAINSABC.nii.gz', 'fl_average':'*/%s/%s/TissueClassify/fl_average_BRAINSABC.nii.gz' } baselineOptionalDG.inputs.template_args = { 't2_average':[['subject','session']], 'pd_average':[['subject','session']], 'fl_average':[['subject','session']] } baselineRequiredDG = pe.MapNode(nio.DataGrabber(infields=['subject','session'], outfields=['t1_average', 'brainMaskLabels', 'posteriorImages','passive_intensities','passive_masks', 'BCD_ACPC_Landmarks_fcsv'], run_without_submitting=True ), run_without_submitting=True, iterfield=['session'], name='Baseline_DG') baselineRequiredDG.inputs.base_directory = experiment['previousresult'] baselineRequiredDG.inputs.sort_filelist = True baselineRequiredDG.inputs.raise_on_empty = True baselineRequiredDG.inputs.template = '*' posterior_files = ['AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS', 'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM'] passive_intensities_files = [ 'rho.nii.gz', 'phi.nii.gz', 'theta.nii.gz', 'l_thalamus_ProbabilityMap.nii.gz', 'r_accumben_ProbabilityMap.nii.gz', 'l_globus_ProbabilityMap.nii.gz', 'l_accumben_ProbabilityMap.nii.gz', 'l_caudate_ProbabilityMap.nii.gz', 'l_putamen_ProbabilityMap.nii.gz', 'r_thalamus_ProbabilityMap.nii.gz', 'r_putamen_ProbabilityMap.nii.gz', 'r_caudate_ProbabilityMap.nii.gz', 'r_hippocampus_ProbabilityMap.nii.gz', 'r_globus_ProbabilityMap.nii.gz', 'l_hippocampus_ProbabilityMap.nii.gz' ] passive_mask_files = [ 'template_WMPM2_labels.nii.gz', 'hncma_atlas.nii.gz', 'template_nac_labels.nii.gz', 'template_leftHemisphere.nii.gz', 'template_rightHemisphere.nii.gz', 'template_ventricles.nii.gz', 'template_headregion.nii.gz' ] baselineRequiredDG.inputs.field_template = {'t1_average':'*/%s/%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'brainMaskLabels':'*/%s/%s/TissueClassify/complete_brainlabels_seg.nii.gz', 'BCD_ACPC_Landmarks_fcsv':'*/%s/%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', 'posteriorImages':'*/%s/%s/TissueClassify/POSTERIOR_%s.nii.gz', 'passive_intensities':'*/%s/%s/WarpedAtlas2Subject/%s', 'passive_masks':'*/%s/%s/WarpedAtlas2Subject/%s', } baselineRequiredDG.inputs.template_args = {'t1_average':[['subject','session']], 'brainMaskLabels':[['subject','session']], 'BCD_ACPC_Landmarks_fcsv':[['subject','session']], 'posteriorImages':[['subject','session', posterior_files]], 'passive_intensities':[['subject','session', passive_intensities_files]], 'passive_masks':[['subject','session', passive_mask_files]] } MergeByExtendListElementsNode = pe.Node(Function(function=MergeByExtendListElements, input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors', 'passive_intensities', 'passive_masks' ], output_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping']), run_without_submitting=True, name="99_MergeByExtendListElements") template.connect([(subjectNode, baselineRequiredDG, [('subject', 'subject')]), (subjectNode, baselineOptionalDG, [('subject', 'subject')]), (subjectNode, sessionsExtractorNode, [('subject','subject')]), (sessionsExtractorNode, baselineRequiredDG, [('sessions', 'session')]), (sessionsExtractorNode, baselineOptionalDG, [('sessions', 'session')]), (baselineRequiredDG, MergeByExtendListElementsNode, [('t1_average', 't1s'), ('brainMaskLabels', 'labels'), (('posteriorImages', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'posteriors') ]), (baselineOptionalDG, MergeByExtendListElementsNode, [ ('t2_average', 't2s'), ('pd_average', 'pds'), ('fl_average', 'fls') ]), (baselineRequiredDG, MergeByExtendListElementsNode, [ (('passive_intensities', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_intensities') ]), (baselineRequiredDG, MergeByExtendListElementsNode, [ (('passive_masks', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_masks') ]) ]) myInitAvgWF = pe.Node(interface=ants.AverageImages(), name='Atlas_antsSimpleAverage') # was 'Phase1_antsSimpleAverage' myInitAvgWF.inputs.dimension = 3 myInitAvgWF.inputs.normalize = True myInitAvgWF.inputs.num_threads = -1 template.connect(baselineRequiredDG, 't1_average', myInitAvgWF, "images") #################################################################################################### # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE' # if numSessions == 1: # TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE' #################################################################################################### CLUSTER_QUEUE=cluster['queue'] CLUSTER_QUEUE_LONG=cluster['long_q'] buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF('iteration01',CLUSTER_QUEUE,CLUSTER_QUEUE_LONG) # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2') buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF('Iteration02',CLUSTER_QUEUE,CLUSTER_QUEUE_LONG) CreateAtlasXMLAndCleanedDeformedAveragesNode = pe.Node(interface=Function(function=CreateAtlasXMLAndCleanedDeformedAverages, input_names=['t1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'], output_names=['outAtlasFullPath', 'clean_deformed_list']), # This is a lot of work, so submit it run_without_submitting=True, run_without_submitting=True, # HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! name='99_CreateAtlasXMLAndCleanedDeformedAverages') if subj_pipeline_options['plugin_name'].startswith('SGE'): # for some nodes, the qsub call needs to be modified on the cluster CreateAtlasXMLAndCleanedDeformedAveragesNode.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'qsub_args': modify_qsub_args(cluster['queue'], 1, 1, 1), 'overwrite': True} for bt in [buildTemplateIteration1, buildTemplateIteration2]: BeginANTS = bt.get_node("BeginANTS") BeginANTS.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 7, 4, 16)} wimtdeformed = bt.get_node("wimtdeformed") wimtdeformed.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 2)} #AvgAffineTransform = bt.get_node("AvgAffineTransform") #AvgAffineTransform.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, # 'qsub_args': modify_qsub_args(cluster['queue'], 2, 1, 1)} wimtPassivedeformed = bt.get_node("wimtPassivedeformed") wimtPassivedeformed.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 4)} # Running off previous baseline experiment NACCommonAtlas = MakeAtlasNode(experiment['atlascache'], 'NACCommonAtlas_{0}'.format('subject'), ['S_BRAINSABCSupport'] ) ## HACK : replace 'subject' with subject id once this is a loop rather than an iterable. template.connect([(myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping','inputspec.interpolationMapping')]), (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes','inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (subjectNode, CreateAtlasXMLAndCleanedDeformedAveragesNode, [(('subject', xml_filename), 'outDefinition')]), (NACCommonAtlas, CreateAtlasXMLAndCleanedDeformedAveragesNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]), (buildTemplateIteration2, CreateAtlasXMLAndCleanedDeformedAveragesNode, [('outputspec.template', 't1_image'), ('outputspec.passive_deformed_templates', 'deformed_list')]), ]) ## Genearate an average lmks file. myAverageLmk = pe.Node(interface = GenerateAverageLmkFile(), name="myAverageLmk" ) myAverageLmk.inputs.outputLandmarkFile = "AVG_LMKS.fcsv" template.connect(baselineRequiredDG,'BCD_ACPC_Landmarks_fcsv',myAverageLmk,'inputLandmarkFiles') # Create DataSinks SubjectAtlas_DataSink = pe.Node(nio.DataSink(), name="Subject_DS") SubjectAtlas_DataSink.overwrite = subj_pipeline_options['ds_overwrite'] SubjectAtlas_DataSink.inputs.base_directory = experiment['resultdir'] template.connect([(subjectNode, SubjectAtlas_DataSink, [('subject', 'container')]), (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('outAtlasFullPath', 'Atlas.@definitions')]), (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('clean_deformed_list', 'Atlas.@passive_deformed_templates')]), (subjectNode, SubjectAtlas_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]), (buildTemplateIteration2, SubjectAtlas_DataSink, [('outputspec.template', 'Atlas.@template')]), (myAverageLmk,SubjectAtlas_DataSink,[('outputLandmarkFile','Atlas.@outputLandmarkFile')]), ]) dotfilename = argv['--dotfilename'] if dotfilename is not None: print("WARNING: Printing workflow, but not running pipeline") print_workflow(template, plugin=subj_pipeline_options['plugin_name'], dotfilename=dotfilename) else: run_workflow(template, plugin=subj_pipeline_options['plugin_name'], plugin_args=subj_pipeline_options['plugin_args'])
def segmentation(projectid, subjectid, sessionid, master_config, onlyT1=True, pipeline_name=""): """ This function... :param projectid: :param subjectid: :param sessionid: :param master_config: :param onlyT1: :param pipeline_name: :return: """ import os.path import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces import ants from nipype.interfaces.utility import IdentityInterface, Function, Merge # Set universal pipeline options from nipype import config config.update_config(master_config) from PipeLineFunctionHelpers import clip_t1_image_with_brain_mask from .WorkupT1T2BRAINSCut import create_brains_cut_workflow from utilities.distributed import modify_qsub_args from nipype.interfaces.semtools import BRAINSSnapShotWriter # CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG = master_config["long_q"] baw200 = pe.Workflow(name=pipeline_name) # HACK: print for debugging for key, itme in list(master_config.items()): print(("-" * 30)) print((key, ":", itme)) print(("-" * 30)) # END HACK inputsSpec = pe.Node( interface=IdentityInterface(fields=[ "t1_average", "t2_average", "template_t1", "hncma_atlas", "LMIatlasToSubject_tx", "inputLabels", "inputHeadLabels", "posteriorImages", "UpdatedPosteriorsList", "atlasToSubjectRegistrationState", "rho", "phi", "theta", "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap", "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap", "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap", "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap", "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap", "l_globus_ProbabilityMap", "r_globus_ProbabilityMap", "trainModelFile_txtD0060NT0060_gz", ]), run_without_submitting=True, name="inputspec", ) # outputsSpec = pe.Node(interface=IdentityInterface(fields=[...]), # run_without_submitting=True, name='outputspec') currentClipT1ImageWithBrainMaskName = ("ClipT1ImageWithBrainMask_" + str(subjectid) + "_" + str(sessionid)) ClipT1ImageWithBrainMaskNode = pe.Node( interface=Function( function=clip_t1_image_with_brain_mask, input_names=["t1_image", "brain_labels", "clipped_file_name"], output_names=["clipped_file"], ), name=currentClipT1ImageWithBrainMaskName, ) ClipT1ImageWithBrainMaskNode.inputs.clipped_file_name = ( "clipped_from_BABC_labels_t1.nii.gz") baw200.connect([( inputsSpec, ClipT1ImageWithBrainMaskNode, [("t1_average", "t1_image"), ("inputLabels", "brain_labels")], )]) currentA2SantsRegistrationPostABCSyN = ("A2SantsRegistrationPostABCSyN_" + str(subjectid) + "_" + str(sessionid)) ## INFO: It would be great to update the BRAINSABC atlasToSubjectTransform at this point, but ## That requires more testing, and fixes to ANTS to properly collapse transforms. ## For now we are simply creating a dummy node to pass through A2SantsRegistrationPostABCSyN = pe.Node( interface=ants.Registration(), name=currentA2SantsRegistrationPostABCSyN) many_cpu_ANTsSyN_options_dictionary = { "qsub_args": modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), "overwrite": True, } A2SantsRegistrationPostABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary common_ants_registration_settings( antsRegistrationNode=A2SantsRegistrationPostABCSyN, registrationTypeDescription="A2SantsRegistrationPostABCSyN", output_transform_prefix="AtlasToSubjectPostBABC_SyN", output_warped_image="atlas2subjectPostBABC.nii.gz", output_inverse_warped_image="subject2atlasPostBABC.nii.gz", save_state="SavedInternalSyNStatePostBABC.h5", invert_initial_moving_transform=False, initial_moving_transform=None, ) ## INFO: Try multi-modal registration here baw200.connect([( inputsSpec, A2SantsRegistrationPostABCSyN, [ ("atlasToSubjectRegistrationState", "restore_state"), ("t1_average", "fixed_image"), ("template_t1", "moving_image"), ], )]) myLocalSegWF = create_brains_cut_workflow( projectid, subjectid, sessionid, master_config["queue"], master_config["long_q"], "Segmentation", onlyT1, ) MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str( sessionid) MergeStage2AverageImages = pe.Node( interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName, ) baw200.connect([ ( inputsSpec, myLocalSegWF, [ ("t1_average", "inputspec.T1Volume"), ("template_t1", "inputspec.template_t1"), ("posteriorImages", "inputspec.posteriorDictionary"), ("inputLabels", "inputspec.RegistrationROI"), ], ), (inputsSpec, MergeStage2AverageImages, [("t1_average", "in1")]), ( A2SantsRegistrationPostABCSyN, myLocalSegWF, [("composite_transform", "inputspec.atlasToSubjectTransform")], ), ]) baw200.connect([( inputsSpec, myLocalSegWF, [ ("rho", "inputspec.rho"), ("phi", "inputspec.phi"), ("theta", "inputspec.theta"), ("l_caudate_ProbabilityMap", "inputspec.l_caudate_ProbabilityMap"), ("r_caudate_ProbabilityMap", "inputspec.r_caudate_ProbabilityMap"), ( "l_hippocampus_ProbabilityMap", "inputspec.l_hippocampus_ProbabilityMap", ), ( "r_hippocampus_ProbabilityMap", "inputspec.r_hippocampus_ProbabilityMap", ), ("l_putamen_ProbabilityMap", "inputspec.l_putamen_ProbabilityMap"), ("r_putamen_ProbabilityMap", "inputspec.r_putamen_ProbabilityMap"), ( "l_thalamus_ProbabilityMap", "inputspec.l_thalamus_ProbabilityMap", ), ( "r_thalamus_ProbabilityMap", "inputspec.r_thalamus_ProbabilityMap", ), ( "l_accumben_ProbabilityMap", "inputspec.l_accumben_ProbabilityMap", ), ( "r_accumben_ProbabilityMap", "inputspec.r_accumben_ProbabilityMap", ), ("l_globus_ProbabilityMap", "inputspec.l_globus_ProbabilityMap"), ("r_globus_ProbabilityMap", "inputspec.r_globus_ProbabilityMap"), ( "trainModelFile_txtD0060NT0060_gz", "inputspec.trainModelFile_txtD0060NT0060_gz", ), ], )]) if not onlyT1: baw200.connect([ (inputsSpec, myLocalSegWF, [("t2_average", "inputspec.T2Volume")]), (inputsSpec, MergeStage2AverageImages, [("t2_average", "in2")]), ]) file_count = 15 # Count of files to merge into MergeSessionSubjectToAtlas else: file_count = 14 # Count of files to merge into MergeSessionSubjectToAtlas ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue # baw200.connect([(AccumulateLikeTissuePosteriorsNode, myLocalSegWF, # [(('AccumulatePriorsList', get_list_index, 0), "inputspec.TotalGM")]), # ]) ### Now define where the final organized outputs should go. DataSink = pe.Node( nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid), ) DataSink.overwrite = master_config["ds_overwrite"] DataSink.inputs.base_directory = master_config["resultdir"] # DataSink.inputs.regexp_substitutions = generate_output_patern(projectid, subjectid, sessionid,'BRAINSCut') # DataSink.inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) DataSink.inputs.substitutions = [ ( "Segmentations", os.path.join(projectid, subjectid, sessionid, "CleanedDenoisedRFSegmentations"), ), ("subjectANNLabel_", ""), ("ANNContinuousPrediction", ""), ("subject.nii.gz", ".nii.gz"), ("_seg.nii.gz", "_seg.nii.gz"), (".nii.gz", "_seg.nii.gz"), ("_seg_seg", "_seg"), ] baw200.connect([ ( myLocalSegWF, DataSink, [ ( "outputspec.outputBinaryLeftCaudate", "Segmentations.@LeftCaudate", ), ( "outputspec.outputBinaryRightCaudate", "Segmentations.@RightCaudate", ), ( "outputspec.outputBinaryLeftHippocampus", "Segmentations.@LeftHippocampus", ), ( "outputspec.outputBinaryRightHippocampus", "Segmentations.@RightHippocampus", ), ( "outputspec.outputBinaryLeftPutamen", "Segmentations.@LeftPutamen", ), ( "outputspec.outputBinaryRightPutamen", "Segmentations.@RightPutamen", ), ( "outputspec.outputBinaryLeftThalamus", "Segmentations.@LeftThalamus", ), ( "outputspec.outputBinaryRightThalamus", "Segmentations.@RightThalamus", ), ( "outputspec.outputBinaryLeftAccumben", "Segmentations.@LeftAccumben", ), ( "outputspec.outputBinaryRightAccumben", "Segmentations.@RightAccumben", ), ("outputspec.outputBinaryLeftGlobus", "Segmentations.@LeftGlobus"), ( "outputspec.outputBinaryRightGlobus", "Segmentations.@RightGlobus", ), ( "outputspec.outputLabelImageName", "Segmentations.@LabelImageName", ), ("outputspec.outputCSVFileName", "Segmentations.@CSVFileName"), ], ), # (myLocalSegWF, DataSink, [('outputspec.cleaned_labels', 'Segmentations.@cleaned_labels')]) ]) MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str( sessionid) MergeStage2BinaryVolumes = pe.Node( interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName, ) baw200.connect([( myLocalSegWF, MergeStage2BinaryVolumes, [ ("outputspec.outputBinaryLeftAccumben", "in1"), ("outputspec.outputBinaryLeftCaudate", "in2"), ("outputspec.outputBinaryLeftPutamen", "in3"), ("outputspec.outputBinaryLeftGlobus", "in4"), ("outputspec.outputBinaryLeftThalamus", "in5"), ("outputspec.outputBinaryLeftHippocampus", "in6"), ("outputspec.outputBinaryRightAccumben", "in7"), ("outputspec.outputBinaryRightCaudate", "in8"), ("outputspec.outputBinaryRightPutamen", "in9"), ("outputspec.outputBinaryRightGlobus", "in10"), ("outputspec.outputBinaryRightThalamus", "in11"), ("outputspec.outputBinaryRightHippocampus", "in12"), ], )]) ## SnapShotWriter for Segmented result checking: SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) SnapShotWriter = pe.Node(interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) SnapShotWriter.inputs.outputFilename = ("snapShot" + str(sessionid) + ".png") # output specification SnapShotWriter.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] SnapShotWriter.inputs.inputSliceToExtractInPhysicalPoint = [ -3, -7, -3, 5, 7, 22, -22, ] baw200.connect([ (MergeStage2AverageImages, SnapShotWriter, [("out", "inputVolumes")]), (MergeStage2BinaryVolumes, SnapShotWriter, [("out", "inputBinaryVolumes")]), ( SnapShotWriter, DataSink, [("outputFilename", "Segmentations.@outputSnapShot")], ), ]) # currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str(subjectid) + "_" + str(sessionid) # AntsLabelWarpToSubject = pe.Node(interface=ants.ApplyTransforms(), name=currentAntsLabelWarpToSubject) # # AntsLabelWarpToSubject.inputs.num_threads = -1 # AntsLabelWarpToSubject.inputs.dimension = 3 # AntsLabelWarpToSubject.inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' # AntsLabelWarpToSubject.inputs.interpolation = "MultiLabel" # # baw200.connect([(A2SantsRegistrationPostABCSyN, AntsLabelWarpToSubject, [('composite_transform', 'transforms')]), # (inputsSpec, AntsLabelWarpToSubject, [('t1_average', 'reference_image'), # ('hncma_atlas', 'input_image')]) # ]) # ##### # ### Now define where the final organized outputs should go. # AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str(sessionid) # AntsLabelWarpedToSubject_DS = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) # AntsLabelWarpedToSubject_DS.overwrite = master_config['ds_overwrite'] # AntsLabelWarpedToSubject_DS.inputs.base_directory = master_config['resultdir'] # AntsLabelWarpedToSubject_DS.inputs.substitutions = [('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject'))] # # baw200.connect([(AntsLabelWarpToSubject, AntsLabelWarpedToSubject_DS, [('output_image', 'AntsLabelWarpedToSubject')])]) MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str( sessionid) MergeSessionSubjectToAtlas = pe.Node( interface=Merge(file_count), run_without_submitting=True, name=MergeSessionSubjectToAtlasName, ) baw200.connect([ ( myLocalSegWF, MergeSessionSubjectToAtlas, [ ("outputspec.outputBinaryLeftAccumben", "in1"), ("outputspec.outputBinaryLeftCaudate", "in2"), ("outputspec.outputBinaryLeftPutamen", "in3"), ("outputspec.outputBinaryLeftGlobus", "in4"), ("outputspec.outputBinaryLeftThalamus", "in5"), ("outputspec.outputBinaryLeftHippocampus", "in6"), ("outputspec.outputBinaryRightAccumben", "in7"), ("outputspec.outputBinaryRightCaudate", "in8"), ("outputspec.outputBinaryRightPutamen", "in9"), ("outputspec.outputBinaryRightGlobus", "in10"), ("outputspec.outputBinaryRightThalamus", "in11"), ("outputspec.outputBinaryRightHippocampus", "in12"), ], ), # (FixWMPartitioningNode, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), ( inputsSpec, MergeSessionSubjectToAtlas, [("UpdatedPosteriorsList", "in13")], ), (inputsSpec, MergeSessionSubjectToAtlas, [("t1_average", "in14")]), ]) if not onlyT1: assert file_count == 15 baw200.connect([(inputsSpec, MergeSessionSubjectToAtlas, [("t2_average", "in15")])]) LinearSubjectToAtlasANTsApplyTransformsName = ( "LinearSubjectToAtlasANTsApplyTransforms_" + str(sessionid)) LinearSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=["input_image"], name=LinearSubjectToAtlasANTsApplyTransformsName, ) LinearSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 LinearSubjectToAtlasANTsApplyTransforms.inputs.interpolation = "Linear" baw200.connect([ ( A2SantsRegistrationPostABCSyN, LinearSubjectToAtlasANTsApplyTransforms, [("inverse_composite_transform", "transforms")], ), ( inputsSpec, LinearSubjectToAtlasANTsApplyTransforms, [("template_t1", "reference_image")], ), ( MergeSessionSubjectToAtlas, LinearSubjectToAtlasANTsApplyTransforms, [("out", "input_image")], ), ]) MergeMultiLabelSessionSubjectToAtlasName = ( "99_MergeMultiLabelSessionSubjectToAtlas_" + str(sessionid)) MergeMultiLabelSessionSubjectToAtlas = pe.Node( interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName, ) baw200.connect([( inputsSpec, MergeMultiLabelSessionSubjectToAtlas, [("inputLabels", "in1"), ("inputHeadLabels", "in2")], )]) ### This is taking this sessions RF label map back into NAC atlas space. # { MultiLabelSubjectToAtlasANTsApplyTransformsName = ( "MultiLabelSubjectToAtlasANTsApplyTransforms_" + str(sessionid) + "_map") MultiLabelSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=["input_image"], name=MultiLabelSubjectToAtlasANTsApplyTransformsName, ) MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.interpolation = "MultiLabel" baw200.connect([ ( A2SantsRegistrationPostABCSyN, MultiLabelSubjectToAtlasANTsApplyTransforms, [("inverse_composite_transform", "transforms")], ), ( inputsSpec, MultiLabelSubjectToAtlasANTsApplyTransforms, [("template_t1", "reference_image")], ), ( MergeMultiLabelSessionSubjectToAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [("out", "input_image")], ), ]) # } ### Now we must take the sessions to THIS SUBJECTS personalized atlas. # { # } ### Now define where the final organized outputs should go. Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) Subj2Atlas_DS = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) Subj2Atlas_DS.overwrite = master_config["ds_overwrite"] Subj2Atlas_DS.inputs.base_directory = master_config["resultdir"] Subj2Atlas_DS.inputs.regexp_substitutions = [ (r"_LinearSubjectToAtlasANTsApplyTransforms_[^/]*", r"" + sessionid + "/") ] baw200.connect([( LinearSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [("output_image", "SubjectToAtlasWarped.@linear_output_images")], )]) Subj2AtlasTransforms_DSName = "SubjectToAtlasTransforms_DS_" + str( sessionid) Subj2AtlasTransforms_DS = pe.Node(nio.DataSink(), name=Subj2AtlasTransforms_DSName) Subj2AtlasTransforms_DS.overwrite = master_config["ds_overwrite"] Subj2AtlasTransforms_DS.inputs.base_directory = master_config["resultdir"] Subj2AtlasTransforms_DS.inputs.regexp_substitutions = [ (r"SubjectToAtlasWarped", r"SubjectToAtlasWarped/" + sessionid + "/") ] baw200.connect([( A2SantsRegistrationPostABCSyN, Subj2AtlasTransforms_DS, [ ( "composite_transform", "SubjectToAtlasWarped.@composite_transform", ), ( "inverse_composite_transform", "SubjectToAtlasWarped.@inverse_composite_transform", ), ], )]) # baw200.connect([(MultiLabelSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@multilabel_output_images')])]) if master_config["plugin_name"].startswith( "SGE" ): # for some nodes, the qsub call needs to be modified on the cluster A2SantsRegistrationPostABCSyN.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 8, 8, 24), } SnapShotWriter.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } LinearSubjectToAtlasANTsApplyTransforms.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } MultiLabelSubjectToAtlasANTsApplyTransforms.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } return baw200
def CreateTissueClassifyWorkflow(WFname, master_config, InterpolationMode,UseRegistrationMasking): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'T1_count', 'PrimaryT1', 'atlasDefinition', 'atlasToSubjectInitialTransform','atlasVolume' ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'outputLabels', 'outputHeadLabels', # ??? #'t1_corrected', 't2_corrected', 't1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', ]), run_without_submitting=True, name='outputspec') ######################################################## # Run BABCext on Multi-modal images ######################################################## makeOutImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList','postfix','PrimaryT1'], output_names=['inImageList','outImageList','imageTypeList']), run_without_submitting=True, name="99_makeOutImageList") tissueClassifyWF.connect(inputsSpec, 'T1List', makeOutImageList, 'T1List') tissueClassifyWF.connect(inputsSpec, 'T2List', makeOutImageList, 'T2List') tissueClassifyWF.connect(inputsSpec, 'PDList', makeOutImageList, 'PDList') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', makeOutImageList, 'PrimaryT1') makeOutImageList.inputs.FLList = [] # an emptyList HACK makeOutImageList.inputs.postfix = "_corrected.nii.gz" # HACK tissueClassifyWF.connect( inputsSpec, 'FLList', makeOutImageList, 'FLList' ) tissueClassifyWF.connect(inputsSpec, 'OtherList', makeOutImageList, 'OtherList') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'AtlasToSubjectANTsPreABC_Rigid' A2SantsRegistrationPreABCRigid = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,4,2,8), 'overwrite': True} A2SantsRegistrationPreABCRigid.plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid.inputs.num_threads = -1 A2SantsRegistrationPreABCRigid.inputs.dimension = 3 A2SantsRegistrationPreABCRigid.inputs.transforms = ["Affine",] A2SantsRegistrationPreABCRigid.inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid.inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid.inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid.inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid.inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid.inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid.inputs.number_of_iterations = [[1000,1000, 500, 100]] A2SantsRegistrationPreABCRigid.inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid.inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid.inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid.inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid.inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid.inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid.inputs.use_estimate_learning_rate_once = [False] A2SantsRegistrationPreABCRigid.inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid.inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid.inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid.inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid.inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid.inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCRigid.inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' A2SantsRegistrationPreABCRigid.inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' A2SantsRegistrationPreABCRigid.inputs.float = True tissueClassifyWF.connect(inputsSpec, 'atlasToSubjectInitialTransform',A2SantsRegistrationPreABCRigid,'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1',A2SantsRegistrationPreABCRigid,'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume',A2SantsRegistrationPreABCRigid,'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsPreABC_SyN' A2SantsRegistrationPreABCSyN = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,8,8,12), 'overwrite': True} A2SantsRegistrationPreABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN.inputs.num_threads = -1 A2SantsRegistrationPreABCSyN.inputs.dimension = 3 A2SantsRegistrationPreABCSyN.inputs.transforms = ["SyN","SyN"] A2SantsRegistrationPreABCSyN.inputs.transform_parameters = [[0.1, 3, 0],[0.1, 3, 0]] A2SantsRegistrationPreABCSyN.inputs.metric = ['CC','CC'] A2SantsRegistrationPreABCSyN.inputs.sampling_strategy = [None,None] A2SantsRegistrationPreABCSyN.inputs.sampling_percentage = [1.0,1.0] A2SantsRegistrationPreABCSyN.inputs.metric_weight = [1.0,1.0] A2SantsRegistrationPreABCSyN.inputs.radius_or_number_of_bins = [4,4] A2SantsRegistrationPreABCSyN.inputs.number_of_iterations = [[500, 500], [500, 70]] A2SantsRegistrationPreABCSyN.inputs.convergence_threshold = [1e-8,1e-6] A2SantsRegistrationPreABCSyN.inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN.inputs.use_histogram_matching = [True,True] A2SantsRegistrationPreABCSyN.inputs.shrink_factors = [[8, 4], [2, 1]] A2SantsRegistrationPreABCSyN.inputs.smoothing_sigmas = [[3, 2], [1, 0]] A2SantsRegistrationPreABCSyN.inputs.sigma_units = ["vox","vox"] A2SantsRegistrationPreABCSyN.inputs.use_estimate_learning_rate_once = [False,False] A2SantsRegistrationPreABCSyN.inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN.inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN.inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCSyN.inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN.inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN.inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN.inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN.inputs.output_warped_image = 'atlas2subject.nii.gz' A2SantsRegistrationPreABCSyN.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreABCSyN.inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize=10 fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto = pe.Node(interface=BRAINSROIAuto(), name="movingImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize=10 movingROIAuto.inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" tissueClassifyWF.connect(inputsSpec, 'PrimaryT1',fixedROIAuto,'inputVolume') tissueClassifyWF.connect(inputsSpec, 'atlasVolume',movingROIAuto,'inputVolume') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid,'fixed_image_mask') tissueClassifyWF.connect(movingROIAuto, 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid,'moving_image_mask') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN,'fixed_image_mask') tissueClassifyWF.connect(movingROIAuto, 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN,'moving_image_mask') tissueClassifyWF.connect(A2SantsRegistrationPreABCRigid, 'composite_transform', A2SantsRegistrationPreABCSyN,'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1',A2SantsRegistrationPreABCSyN,'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume',A2SantsRegistrationPreABCSyN,'moving_image') BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") many_cpu_BABC_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,2,4), 'overwrite': True} BABCext.plugin_args = many_cpu_BABC_options_dictionary tissueClassifyWF.connect(makeOutImageList, 'inImageList', BABCext, 'inputVolumes') tissueClassifyWF.connect(makeOutImageList, 'imageTypeList', BABCext, 'inputVolumeTypes') tissueClassifyWF.connect(makeOutImageList, 'outImageList', BABCext, 'outputVolumes') BABCext.inputs.debuglevel = 0 BABCext.inputs.useKNN = True BABCext.inputs.maxIterations = 3 BABCext.inputs.maxBiasDegree = 4 BABCext.inputs.filterIteration = 3 BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' BABCext.inputs.atlasToSubjectTransformType = 'SyN' BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = './' BABCext.inputs.saveState = 'SavedBABCInternalSyNState.h5' tissueClassifyWF.connect(inputsSpec, 'atlasDefinition', BABCext, 'atlasDefinition') # NOTE: MUTUALLY EXCLUSIVE with restoreState #tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, # 'composite_transform', # BABCext, 'atlasToSubjectInitialTransform') tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN,'save_state', BABCext, 'restoreState') """ Get the first T1 and T2 corrected images from BABCext """ """ HACK: THIS IS NOT NEEDED! We should use the averged t1 and averaged t2 images instead! def get_first_T1_and_T2(in_files,T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' return in_files[0],in_files[T1_count] bfc_files = pe.Node(Function(input_names=['in_files','T1_count'], output_names=['t1_corrected','t2_corrected'], function=get_first_T1_and_T2), run_without_submitting=True, name='99_bfc_files' ) tissueClassifyWF.connect( inputsSpec, 'T1_count', bfc_files, 'T1_count') tissueClassifyWF.connect(BABCext,'outputVolumes',bfc_files, 'in_files') tissueClassifyWF.connect(bfc_files,'t1_corrected',outputsSpec,'t1_corrected') tissueClassifyWF.connect(bfc_files,'t2_corrected',outputsSpec,'t2_corrected') #tissueClassifyWF.connect(bfc_files,'pd_corrected',outputsSpec,'pd_corrected') #tissueClassifyWF.connect(bfc_files,'fl_corrected',outputsSpec,'fl_corrected') """ ############# tissueClassifyWF.connect(BABCext, 'saveState', outputsSpec, 'atlasToSubjectRegistrationState') tissueClassifyWF.connect(BABCext, 'atlasToSubjectTransform', outputsSpec, 'atlasToSubjectTransform') def MakeInverseTransformFileName(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs""" fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return [fixed_inverse_name] tissueClassifyWF.connect([(BABCext, outputsSpec, [(('atlasToSubjectTransform', MakeInverseTransformFileName), "atlasToSubjectInverseTransform")]), ]) tissueClassifyWF.connect(BABCext, 'outputLabels', outputsSpec, 'outputLabels') tissueClassifyWF.connect(BABCext, 'outputDirtyLabels', outputsSpec, 'outputHeadLabels') tissueClassifyWF.connect(BABCext, 'outputT1AverageImage', outputsSpec, 't1_average') tissueClassifyWF.connect(BABCext, 'outputT2AverageImage', outputsSpec, 't2_average') tissueClassifyWF.connect(BABCext, 'outputPDAverageImage', outputsSpec, 'pd_average') tissueClassifyWF.connect(BABCext, 'outputFLAverageImage', outputsSpec, 'fl_average') ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 2 ), "pd_average")] ), ] ) MakePosteriorDictionaryNode = pe.Node(Function(function=MakePosteriorDictionaryFunc, input_names=['posteriorImages'], output_names=['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") tissueClassifyWF.connect(BABCext, 'posteriorImages', MakePosteriorDictionaryNode, 'posteriorImages') tissueClassifyWF.connect(MakePosteriorDictionaryNode, 'posteriorDictionary', outputsSpec, 'posteriorImages') return tissueClassifyWF
def create_tissue_classify_workflow( WFname, master_config, InterpolationMode, UseRegistrationMasking ): """ This function... :param WFname: :param master_config: :param InterpolationMode: :param UseRegistrationMasking: :return: """ from nipype.interfaces import ants CLUSTER_QUEUE = master_config["queue"] CLUSTER_QUEUE_LONG = master_config["long_q"] tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node( interface=IdentityInterface( fields=[ "T1List", "T2List", "PDList", "FLList", "OTHERList", "T1_count", "PrimaryT1", "atlasDefinition", "atlasToSubjectInitialTransform", "atlasVolume", "atlasheadregion", ] ), run_without_submitting=True, name="inputspec", ) outputsSpec = pe.Node( interface=IdentityInterface( fields=[ "atlasToSubjectTransform", "atlasToSubjectInverseTransform", "atlasToSubjectRegistrationState", "outputLabels", "outputHeadLabels", # ??? # 't1_corrected', 't2_corrected', "t1_average", "t2_average", "pd_average", "fl_average", "posteriorImages", ] ), run_without_submitting=True, name="outputspec", ) ######################################################## # Run BABCext on Multi-modal images ######################################################## makeOutImageList = pe.Node( Function( function=make_out_from_file, input_names=[ "T1List", "T2List", "PDList", "FLList", "OTHERList", "postfix", "postfixBFC", "postfixUnwrapped", "PrimaryT1", "ListOutType", ], output_names=[ "inImageList", "outImageList", "outBFCImageList", "outUnwrappedImageList", "imageTypeList", ], ), run_without_submitting=True, name="99_makeOutImageList", ) tissueClassifyWF.connect(inputsSpec, "T1List", makeOutImageList, "T1List") tissueClassifyWF.connect(inputsSpec, "T2List", makeOutImageList, "T2List") tissueClassifyWF.connect(inputsSpec, "PDList", makeOutImageList, "PDList") tissueClassifyWF.connect(inputsSpec, "FLList", makeOutImageList, "FLList") tissueClassifyWF.connect(inputsSpec, "OTHERList", makeOutImageList, "OTHERList") tissueClassifyWF.connect(inputsSpec, "PrimaryT1", makeOutImageList, "PrimaryT1") makeOutImageList.inputs.ListOutType = False makeOutImageList.inputs.postfix = "_corrected.nii.gz" makeOutImageList.inputs.postfixBFC = "_NOT_USED" makeOutImageList.inputs.postfixUnwrapped = "_NOT_USED" ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = "AtlasToSubjectANTsPreABC_Affine" A2SantsRegistrationPreABCAffine = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration ) many_cpu_ANTsRigid_options_dictionary = { "qsub_args": modify_qsub_args(CLUSTER_QUEUE, 4, 2, 8), "overwrite": True, } A2SantsRegistrationPreABCAffine.plugin_args = many_cpu_ANTsRigid_options_dictionary common_ants_registration_settings( antsRegistrationNode=A2SantsRegistrationPreABCAffine, registrationTypeDescription="AtlasToSubjectANTsPreABC_Affine", output_transform_prefix="AtlasToSubjectPreBABC_Rigid", output_warped_image="atlas2subjectRigid.nii.gz", output_inverse_warped_image="subject2atlasRigid.nii.gz", save_state=None, invert_initial_moving_transform=False, initial_moving_transform=None, ) tissueClassifyWF.connect( inputsSpec, "atlasToSubjectInitialTransform", A2SantsRegistrationPreABCAffine, "initial_moving_transform", ) tissueClassifyWF.connect( inputsSpec, "PrimaryT1", A2SantsRegistrationPreABCAffine, "fixed_image" ) tissueClassifyWF.connect( inputsSpec, "atlasVolume", A2SantsRegistrationPreABCAffine, "moving_image" ) ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = "AtlasToSubjectANTsPreABC_SyN" A2SantsRegistrationPreABCSyN = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration ) many_cpu_ANTsSyN_options_dictionary = { "qsub_args": modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), "overwrite": True, } A2SantsRegistrationPreABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary common_ants_registration_settings( antsRegistrationNode=A2SantsRegistrationPreABCSyN, registrationTypeDescription="AtlasToSubjectANTsPreABC_SyN", output_transform_prefix="AtlasToSubjectPreBABC_SyN", output_warped_image="atlas2subject.nii.gz", output_inverse_warped_image="subject2atlas.nii.gz", save_state="SavedInternalSyNState.h5", invert_initial_moving_transform=False, initial_moving_transform=None, ) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = ( 15 ) ## NOTE Very large to include some skull in bad cases of bias where back of head is very dark fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" tissueClassifyWF.connect(inputsSpec, "PrimaryT1", fixedROIAuto, "inputVolume") tissueClassifyWF.connect( fixedROIAuto, "outputROIMaskVolume", A2SantsRegistrationPreABCAffine, "fixed_image_masks", ) tissueClassifyWF.connect( fixedROIAuto, "outputROIMaskVolume", A2SantsRegistrationPreABCSyN, "fixed_image_masks", ) ## NOTE: Always use atlas head region to avoid computing this every time. tissueClassifyWF.connect( inputsSpec, "atlasheadregion", A2SantsRegistrationPreABCAffine, "moving_image_masks", ) tissueClassifyWF.connect( inputsSpec, "atlasheadregion", A2SantsRegistrationPreABCSyN, "moving_image_masks", ) tissueClassifyWF.connect( A2SantsRegistrationPreABCAffine, "composite_transform", A2SantsRegistrationPreABCSyN, "initial_moving_transform", ) tissueClassifyWF.connect( inputsSpec, "PrimaryT1", A2SantsRegistrationPreABCSyN, "fixed_image" ) tissueClassifyWF.connect( inputsSpec, "atlasVolume", A2SantsRegistrationPreABCSyN, "moving_image" ) BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") many_cpu_BABC_options_dictionary = { "qsub_args": modify_qsub_args(CLUSTER_QUEUE, 13, 8, 16), "overwrite": True, } BABCext.plugin_args = many_cpu_BABC_options_dictionary tissueClassifyWF.connect(makeOutImageList, "inImageList", BABCext, "inputVolumes") tissueClassifyWF.connect( makeOutImageList, "imageTypeList", BABCext, "inputVolumeTypes" ) tissueClassifyWF.connect(makeOutImageList, "outImageList", BABCext, "outputVolumes") BABCext.inputs.debuglevel = 0 BABCext.inputs.useKNN = True BABCext.inputs.purePlugsThreshold = ( 0.1 ) # New feature to allow for pure plug processing and improvements. BABCext.inputs.maxIterations = 2 BABCext.inputs.maxBiasDegree = 0 BABCext.inputs.filterIteration = 3 # BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' ## If inputs are denoised, we don't need this BABCext.inputs.filterMethod = "None" BABCext.inputs.atlasToSubjectTransformType = "SyN" # Using SyN, so no bsplines here BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = "./" BABCext.inputs.saveState = "SavedBABCInternalSyNState.h5" tissueClassifyWF.connect(inputsSpec, "atlasDefinition", BABCext, "atlasDefinition") # NOTE: MUTUALLY EXCLUSIVE with restoreState # tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, # 'composite_transform', # BABCext, 'atlasToSubjectInitialTransform') tissueClassifyWF.connect( A2SantsRegistrationPreABCSyN, "save_state", BABCext, "restoreState" ) """ Get the first T1 and T2 corrected images from BABCext """ """ HACK: THIS IS NOT NEEDED! We should use the averged t1 and averaged t2 images instead! def get_first_T1_and_T2(in_files,T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' return in_files[0],in_files[T1_count] bfc_files = pe.Node(Function(input_names=['in_files','T1_count'], output_names=['t1_corrected','t2_corrected'], function=get_first_T1_and_T2), run_without_submitting=True, name='99_bfc_files' ) tissueClassifyWF.connect( inputsSpec, 'T1_count', bfc_files, 'T1_count') tissueClassifyWF.connect(BABCext,'outputVolumes',bfc_files, 'in_files') tissueClassifyWF.connect(bfc_files,'t1_corrected',outputsSpec,'t1_corrected') tissueClassifyWF.connect(bfc_files,'t2_corrected',outputsSpec,'t2_corrected') #tissueClassifyWF.connect(bfc_files,'pd_corrected',outputsSpec,'pd_corrected') #tissueClassifyWF.connect(bfc_files,'fl_corrected',outputsSpec,'fl_corrected') """ ############# tissueClassifyWF.connect( BABCext, "saveState", outputsSpec, "atlasToSubjectRegistrationState" ) tissueClassifyWF.connect( BABCext, "atlasToSubjectTransform", outputsSpec, "atlasToSubjectTransform" ) def make_inverse_transform_filename(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs :param Transform: :return: """ fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return [fixed_inverse_name] tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ ( ("atlasToSubjectTransform", make_inverse_transform_filename), "atlasToSubjectInverseTransform", ) ], ) ] ) tissueClassifyWF.connect(BABCext, "outputLabels", outputsSpec, "outputLabels") tissueClassifyWF.connect( BABCext, "outputDirtyLabels", outputsSpec, "outputHeadLabels" ) tissueClassifyWF.connect(BABCext, "outputT1AverageImage", outputsSpec, "t1_average") tissueClassifyWF.connect(BABCext, "outputT2AverageImage", outputsSpec, "t2_average") tissueClassifyWF.connect(BABCext, "outputPDAverageImage", outputsSpec, "pd_average") tissueClassifyWF.connect(BABCext, "outputFLAverageImage", outputsSpec, "fl_average") ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', get_list_index_or_none_if_out_of_range, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', get_list_index_or_none_if_out_of_range, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', get_list_index_or_none_if_out_of_range, 2 ), "pd_average")] ), ] ) MakePosteriorListOfTuplesNode = pe.Node( Function( function=make_posteriour_list_of_tuplefunc, input_names=["posteriorImages"], output_names=["posteriorDictionary"], ), run_without_submitting=True, name="99_makePosteriorDictionary", ) tissueClassifyWF.connect( BABCext, "posteriorImages", MakePosteriorListOfTuplesNode, "posteriorImages" ) tissueClassifyWF.connect( MakePosteriorListOfTuplesNode, "posteriorDictionary", outputsSpec, "posteriorImages", ) return tissueClassifyWF
def CreateMALFWorkflow(WFname, master_config, good_subjects, BASE_DATA_GRABBER_DIR): from nipype.interfaces import ants CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node( interface=IdentityInterface(fields=[ 'subj_t1_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'atlasWeightFilename' #The static weights file name ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node( interface=IdentityInterface(fields=['MALF_neuro2012_labelmap']), run_without_submitting=True, name='outputspec') BLICreator = dict() MALF_DG = dict() A2SantsRegistrationPreABCRigid = dict() A2SantsRegistrationPreABCSyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(good_subjects)), name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)), name="LblMergeAtlas") malf_atlas_mergeindex = 1 for malf_atlas_subject in good_subjects: ## Need DataGrabber Here For the Atlas MALF_DG[malf_atlas_subject] = pe.Node(interface=nio.DataGrabber( infields=['subject'], outfields=['malf_atlas_t1', 'malf_atlas_lbls', 'malf_atlas_lmks']), run_without_submitting=True, name='MALF_DG_' + malf_atlas_subject) #MALF_DG[malf_atlas_subject].inputs.base_directory = master_config['previousresult'] MALF_DG[ malf_atlas_subject].inputs.base_directory = BASE_DATA_GRABBER_DIR MALF_DG[malf_atlas_subject].inputs.subject = malf_atlas_subject MALF_DG[malf_atlas_subject].inputs.field_template = { 'malf_atlas_t1': '%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'malf_atlas_lbls': '%s/TissueClassify/neuro_lbls.nii.gz', 'malf_atlas_lmks': '%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', } MALF_DG[malf_atlas_subject].inputs.template_args = { 'malf_atlas_t1': [['subject']], 'malf_atlas_lbls': [['subject']], 'malf_atlas_lmks': [['subject']], } MALF_DG[malf_atlas_subject].inputs.template = '*' MALF_DG[malf_atlas_subject].inputs.sort_filelist = True MALF_DG[malf_atlas_subject].inputs.raise_on_empty = True ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node( interface=BRAINSLandmarkInitializer(), name="BLI_" + malf_atlas_subject) BLICreator[ malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format( malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'Rigid_AtlasToSubjectANTsPreABC_' + malf_atlas_subject A2SantsRegistrationPreABCRigid[malf_atlas_subject] = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 2, 1, 1), 'overwrite': True } A2SantsRegistrationPreABCRigid[ malf_atlas_subject].plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.transforms = [ "Affine", ] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric = [ 'MI' ] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.number_of_iterations = [[ 1000, 1000, 500, 100 ]] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.use_estimate_learning_rate_once = [ False ] A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid[ malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' MALFWF.connect(BLICreator[malf_atlas_subject], 'outputTransformFilename', A2SantsRegistrationPreABCRigid[malf_atlas_subject], 'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image', A2SantsRegistrationPreABCRigid[malf_atlas_subject], 'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', A2SantsRegistrationPreABCRigid[malf_atlas_subject], 'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreABC_' + malf_atlas_subject A2SantsRegistrationPreABCSyN[malf_atlas_subject] = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 4, 2, 4), 'overwrite': True } A2SantsRegistrationPreABCSyN[ malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transforms = [ "SyN", "SyN" ] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.transform_parameters = [[0.1, 3, 0], [0.1, 3, 0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric = [ 'MI', 'MI' ] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.sampling_strategy = [None, None] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.sampling_percentage = [1.0, 1.0] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.metric_weight = [1.0, 1.0] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.radius_or_number_of_bins = [32, 32] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.number_of_iterations = [[ 500, 500, 500, 500 ], [70]] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.convergence_threshold = [1e-8, 1e-4] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.use_histogram_matching = [True, True] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.shrink_factors = [[8, 4, 3, 2], [1]] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 2, 1], [0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sigma_units = [ "vox", "vox" ] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.use_estimate_learning_rate_once = [ False, False ] A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject + '_ToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN[ malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from SEMTools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node( interface=BRAINSROIAuto(), name="fixedROIAUTOMask_" + malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize = 10 fixedROIAuto[ malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node( interface=BRAINSROIAuto(), name="movingROIAUTOMask_" + malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize = 10 movingROIAuto[ malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image', fixedROIAuto[malf_atlas_subject], 'inputVolume') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', movingROIAuto[malf_atlas_subject], 'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume', A2SantsRegistrationPreABCRigid[malf_atlas_subject], 'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume', A2SantsRegistrationPreABCRigid[malf_atlas_subject], 'moving_image_mask') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'moving_image_mask') MALFWF.connect( A2SantsRegistrationPreABCRigid[malf_atlas_subject], ('composite_transform', getListIndexOrNoneIfOutOfRange, 0), A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image', A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'moving_image') labelMapResample[malf_atlas_subject] = pe.Node( interface=ants.ApplyTransforms(), name="WLABEL_" + malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True } labelMapResample[ malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension = 3 labelMapResample[ malf_atlas_subject].inputs.output_image = malf_atlas_subject + '_2_subj_lbl.nii.gz' labelMapResample[ malf_atlas_subject].inputs.interpolation = 'MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value = 0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags = [ False ] MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'composite_transform', labelMapResample[malf_atlas_subject], 'transforms') MALFWF.connect(inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject], 'reference_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lbls', labelMapResample[malf_atlas_subject], 'input_image') MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject], 'warped_image', warpedAtlasT1MergeNode, 'in' + str(malf_atlas_mergeindex)) MALFWF.connect(labelMapResample[malf_atlas_subject], 'output_image', warpedAtlasLblMergeNode, 'in' + str(malf_atlas_mergeindex)) malf_atlas_mergeindex += 1 jointFusion = pe.Node(interface=ants.JointFusion(), name="JointFusion") many_cpu_JointFusion_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 8, 4, 4), 'overwrite': True } jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension = 3 jointFusion.inputs.num_modalities = 1 jointFusion.inputs.method = 'Joint[0.1,2]' jointFusion.inputs.output_label_image = 'fusion_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode, 'out', jointFusion, 'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode, 'out', jointFusion, 'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image', jointFusion, 'target_image') MALFWF.connect(jointFusion, 'output_label_image', outputsSpec, 'MALF_neuro2012_labelmap') return MALFWF
def segmentation( projectid, subjectid, sessionid, master_config, onlyT1=True, pipeline_name="" ): """ This function... :param projectid: :param subjectid: :param sessionid: :param master_config: :param onlyT1: :param pipeline_name: :return: """ import os.path import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces import ants from nipype.interfaces.utility import IdentityInterface, Function, Merge # Set universal pipeline options from nipype import config config.update_config(master_config) from PipeLineFunctionHelpers import clip_t1_image_with_brain_mask from .WorkupT1T2BRAINSCut import create_brains_cut_workflow from utilities.distributed import modify_qsub_args from nipype.interfaces.semtools import BRAINSSnapShotWriter # CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG = master_config["long_q"] baw200 = pe.Workflow(name=pipeline_name) # HACK: print for debugging for key, itme in list(master_config.items()): print(("-" * 30)) print((key, ":", itme)) print(("-" * 30)) # END HACK inputsSpec = pe.Node( interface=IdentityInterface( fields=[ "t1_average", "t2_average", "template_t1", "hncma_atlas", "LMIatlasToSubject_tx", "inputLabels", "inputHeadLabels", "posteriorImages", "UpdatedPosteriorsList", "atlasToSubjectRegistrationState", "rho", "phi", "theta", "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap", "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap", "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap", "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap", "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap", "l_globus_ProbabilityMap", "r_globus_ProbabilityMap", "trainModelFile_txtD0060NT0060_gz", ] ), run_without_submitting=True, name="inputspec", ) # outputsSpec = pe.Node(interface=IdentityInterface(fields=[...]), # run_without_submitting=True, name='outputspec') currentClipT1ImageWithBrainMaskName = ( "ClipT1ImageWithBrainMask_" + str(subjectid) + "_" + str(sessionid) ) ClipT1ImageWithBrainMaskNode = pe.Node( interface=Function( function=clip_t1_image_with_brain_mask, input_names=["t1_image", "brain_labels", "clipped_file_name"], output_names=["clipped_file"], ), name=currentClipT1ImageWithBrainMaskName, ) ClipT1ImageWithBrainMaskNode.inputs.clipped_file_name = ( "clipped_from_BABC_labels_t1.nii.gz" ) baw200.connect( [ ( inputsSpec, ClipT1ImageWithBrainMaskNode, [("t1_average", "t1_image"), ("inputLabels", "brain_labels")], ) ] ) currentA2SantsRegistrationPostABCSyN = ( "A2SantsRegistrationPostABCSyN_" + str(subjectid) + "_" + str(sessionid) ) ## TODO: It would be great to update the BRAINSABC atlasToSubjectTransform at this point, but ## That requires more testing, and fixes to ANTS to properly collapse transforms. ## For now we are simply creating a dummy node to pass through A2SantsRegistrationPostABCSyN = pe.Node( interface=ants.Registration(), name=currentA2SantsRegistrationPostABCSyN ) many_cpu_ANTsSyN_options_dictionary = { "qsub_args": modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), "overwrite": True, } A2SantsRegistrationPostABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary common_ants_registration_settings( antsRegistrationNode=A2SantsRegistrationPostABCSyN, registrationTypeDescription="A2SantsRegistrationPostABCSyN", output_transform_prefix="AtlasToSubjectPostBABC_SyN", output_warped_image="atlas2subjectPostBABC.nii.gz", output_inverse_warped_image="subject2atlasPostBABC.nii.gz", save_state="SavedInternalSyNStatePostBABC.h5", invert_initial_moving_transform=False, initial_moving_transform=None, ) ## TODO: Try multi-modal registration here baw200.connect( [ ( inputsSpec, A2SantsRegistrationPostABCSyN, [ ("atlasToSubjectRegistrationState", "restore_state"), ("t1_average", "fixed_image"), ("template_t1", "moving_image"), ], ) ] ) myLocalSegWF = create_brains_cut_workflow( projectid, subjectid, sessionid, master_config["queue"], master_config["long_q"], "Segmentation", onlyT1, ) MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str(sessionid) MergeStage2AverageImages = pe.Node( interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName, ) baw200.connect( [ ( inputsSpec, myLocalSegWF, [ ("t1_average", "inputspec.T1Volume"), ("template_t1", "inputspec.template_t1"), ("posteriorImages", "inputspec.posteriorDictionary"), ("inputLabels", "inputspec.RegistrationROI"), ], ), (inputsSpec, MergeStage2AverageImages, [("t1_average", "in1")]), ( A2SantsRegistrationPostABCSyN, myLocalSegWF, [("composite_transform", "inputspec.atlasToSubjectTransform")], ), ] ) baw200.connect( [ ( inputsSpec, myLocalSegWF, [ ("rho", "inputspec.rho"), ("phi", "inputspec.phi"), ("theta", "inputspec.theta"), ("l_caudate_ProbabilityMap", "inputspec.l_caudate_ProbabilityMap"), ("r_caudate_ProbabilityMap", "inputspec.r_caudate_ProbabilityMap"), ( "l_hippocampus_ProbabilityMap", "inputspec.l_hippocampus_ProbabilityMap", ), ( "r_hippocampus_ProbabilityMap", "inputspec.r_hippocampus_ProbabilityMap", ), ("l_putamen_ProbabilityMap", "inputspec.l_putamen_ProbabilityMap"), ("r_putamen_ProbabilityMap", "inputspec.r_putamen_ProbabilityMap"), ( "l_thalamus_ProbabilityMap", "inputspec.l_thalamus_ProbabilityMap", ), ( "r_thalamus_ProbabilityMap", "inputspec.r_thalamus_ProbabilityMap", ), ( "l_accumben_ProbabilityMap", "inputspec.l_accumben_ProbabilityMap", ), ( "r_accumben_ProbabilityMap", "inputspec.r_accumben_ProbabilityMap", ), ("l_globus_ProbabilityMap", "inputspec.l_globus_ProbabilityMap"), ("r_globus_ProbabilityMap", "inputspec.r_globus_ProbabilityMap"), ( "trainModelFile_txtD0060NT0060_gz", "inputspec.trainModelFile_txtD0060NT0060_gz", ), ], ) ] ) if not onlyT1: baw200.connect( [ (inputsSpec, myLocalSegWF, [("t2_average", "inputspec.T2Volume")]), (inputsSpec, MergeStage2AverageImages, [("t2_average", "in2")]), ] ) file_count = 15 # Count of files to merge into MergeSessionSubjectToAtlas else: file_count = 14 # Count of files to merge into MergeSessionSubjectToAtlas ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue # baw200.connect([(AccumulateLikeTissuePosteriorsNode, myLocalSegWF, # [(('AccumulatePriorsList', get_list_index, 0), "inputspec.TotalGM")]), # ]) ### Now define where the final organized outputs should go. DataSink = pe.Node( nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid), ) DataSink.overwrite = master_config["ds_overwrite"] DataSink.inputs.base_directory = master_config["resultdir"] # DataSink.inputs.regexp_substitutions = generate_output_patern(projectid, subjectid, sessionid,'BRAINSCut') # DataSink.inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) DataSink.inputs.substitutions = [ ( "Segmentations", os.path.join( projectid, subjectid, sessionid, "CleanedDenoisedRFSegmentations" ), ), ("subjectANNLabel_", ""), ("ANNContinuousPrediction", ""), ("subject.nii.gz", ".nii.gz"), ("_seg.nii.gz", "_seg.nii.gz"), (".nii.gz", "_seg.nii.gz"), ("_seg_seg", "_seg"), ] baw200.connect( [ ( myLocalSegWF, DataSink, [ ( "outputspec.outputBinaryLeftCaudate", "Segmentations.@LeftCaudate", ), ( "outputspec.outputBinaryRightCaudate", "Segmentations.@RightCaudate", ), ( "outputspec.outputBinaryLeftHippocampus", "Segmentations.@LeftHippocampus", ), ( "outputspec.outputBinaryRightHippocampus", "Segmentations.@RightHippocampus", ), ( "outputspec.outputBinaryLeftPutamen", "Segmentations.@LeftPutamen", ), ( "outputspec.outputBinaryRightPutamen", "Segmentations.@RightPutamen", ), ( "outputspec.outputBinaryLeftThalamus", "Segmentations.@LeftThalamus", ), ( "outputspec.outputBinaryRightThalamus", "Segmentations.@RightThalamus", ), ( "outputspec.outputBinaryLeftAccumben", "Segmentations.@LeftAccumben", ), ( "outputspec.outputBinaryRightAccumben", "Segmentations.@RightAccumben", ), ("outputspec.outputBinaryLeftGlobus", "Segmentations.@LeftGlobus"), ( "outputspec.outputBinaryRightGlobus", "Segmentations.@RightGlobus", ), ( "outputspec.outputLabelImageName", "Segmentations.@LabelImageName", ), ("outputspec.outputCSVFileName", "Segmentations.@CSVFileName"), ], ), # (myLocalSegWF, DataSink, [('outputspec.cleaned_labels', 'Segmentations.@cleaned_labels')]) ] ) MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str(sessionid) MergeStage2BinaryVolumes = pe.Node( interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName, ) baw200.connect( [ ( myLocalSegWF, MergeStage2BinaryVolumes, [ ("outputspec.outputBinaryLeftAccumben", "in1"), ("outputspec.outputBinaryLeftCaudate", "in2"), ("outputspec.outputBinaryLeftPutamen", "in3"), ("outputspec.outputBinaryLeftGlobus", "in4"), ("outputspec.outputBinaryLeftThalamus", "in5"), ("outputspec.outputBinaryLeftHippocampus", "in6"), ("outputspec.outputBinaryRightAccumben", "in7"), ("outputspec.outputBinaryRightCaudate", "in8"), ("outputspec.outputBinaryRightPutamen", "in9"), ("outputspec.outputBinaryRightGlobus", "in10"), ("outputspec.outputBinaryRightThalamus", "in11"), ("outputspec.outputBinaryRightHippocampus", "in12"), ], ) ] ) ## SnapShotWriter for Segmented result checking: SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) SnapShotWriter = pe.Node( interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName ) SnapShotWriter.inputs.outputFilename = ( "snapShot" + str(sessionid) + ".png" ) # output specification SnapShotWriter.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] SnapShotWriter.inputs.inputSliceToExtractInPhysicalPoint = [ -3, -7, -3, 5, 7, 22, -22, ] baw200.connect( [ (MergeStage2AverageImages, SnapShotWriter, [("out", "inputVolumes")]), (MergeStage2BinaryVolumes, SnapShotWriter, [("out", "inputBinaryVolumes")]), ( SnapShotWriter, DataSink, [("outputFilename", "Segmentations.@outputSnapShot")], ), ] ) # currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str(subjectid) + "_" + str(sessionid) # AntsLabelWarpToSubject = pe.Node(interface=ants.ApplyTransforms(), name=currentAntsLabelWarpToSubject) # # AntsLabelWarpToSubject.inputs.num_threads = -1 # AntsLabelWarpToSubject.inputs.dimension = 3 # AntsLabelWarpToSubject.inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' # AntsLabelWarpToSubject.inputs.interpolation = "MultiLabel" # # baw200.connect([(A2SantsRegistrationPostABCSyN, AntsLabelWarpToSubject, [('composite_transform', 'transforms')]), # (inputsSpec, AntsLabelWarpToSubject, [('t1_average', 'reference_image'), # ('hncma_atlas', 'input_image')]) # ]) # ##### # ### Now define where the final organized outputs should go. # AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str(sessionid) # AntsLabelWarpedToSubject_DS = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) # AntsLabelWarpedToSubject_DS.overwrite = master_config['ds_overwrite'] # AntsLabelWarpedToSubject_DS.inputs.base_directory = master_config['resultdir'] # AntsLabelWarpedToSubject_DS.inputs.substitutions = [('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject'))] # # baw200.connect([(AntsLabelWarpToSubject, AntsLabelWarpedToSubject_DS, [('output_image', 'AntsLabelWarpedToSubject')])]) MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str(sessionid) MergeSessionSubjectToAtlas = pe.Node( interface=Merge(file_count), run_without_submitting=True, name=MergeSessionSubjectToAtlasName, ) baw200.connect( [ ( myLocalSegWF, MergeSessionSubjectToAtlas, [ ("outputspec.outputBinaryLeftAccumben", "in1"), ("outputspec.outputBinaryLeftCaudate", "in2"), ("outputspec.outputBinaryLeftPutamen", "in3"), ("outputspec.outputBinaryLeftGlobus", "in4"), ("outputspec.outputBinaryLeftThalamus", "in5"), ("outputspec.outputBinaryLeftHippocampus", "in6"), ("outputspec.outputBinaryRightAccumben", "in7"), ("outputspec.outputBinaryRightCaudate", "in8"), ("outputspec.outputBinaryRightPutamen", "in9"), ("outputspec.outputBinaryRightGlobus", "in10"), ("outputspec.outputBinaryRightThalamus", "in11"), ("outputspec.outputBinaryRightHippocampus", "in12"), ], ), # (FixWMPartitioningNode, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), ( inputsSpec, MergeSessionSubjectToAtlas, [("UpdatedPosteriorsList", "in13")], ), (inputsSpec, MergeSessionSubjectToAtlas, [("t1_average", "in14")]), ] ) if not onlyT1: assert file_count == 15 baw200.connect( [(inputsSpec, MergeSessionSubjectToAtlas, [("t2_average", "in15")])] ) LinearSubjectToAtlasANTsApplyTransformsName = ( "LinearSubjectToAtlasANTsApplyTransforms_" + str(sessionid) ) LinearSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=["input_image"], name=LinearSubjectToAtlasANTsApplyTransformsName, ) LinearSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 LinearSubjectToAtlasANTsApplyTransforms.inputs.interpolation = "Linear" baw200.connect( [ ( A2SantsRegistrationPostABCSyN, LinearSubjectToAtlasANTsApplyTransforms, [("inverse_composite_transform", "transforms")], ), ( inputsSpec, LinearSubjectToAtlasANTsApplyTransforms, [("template_t1", "reference_image")], ), ( MergeSessionSubjectToAtlas, LinearSubjectToAtlasANTsApplyTransforms, [("out", "input_image")], ), ] ) MergeMultiLabelSessionSubjectToAtlasName = ( "99_MergeMultiLabelSessionSubjectToAtlas_" + str(sessionid) ) MergeMultiLabelSessionSubjectToAtlas = pe.Node( interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName, ) baw200.connect( [ ( inputsSpec, MergeMultiLabelSessionSubjectToAtlas, [("inputLabels", "in1"), ("inputHeadLabels", "in2")], ) ] ) ### This is taking this sessions RF label map back into NAC atlas space. # { MultiLabelSubjectToAtlasANTsApplyTransformsName = ( "MultiLabelSubjectToAtlasANTsApplyTransforms_" + str(sessionid) + "_map" ) MultiLabelSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=["input_image"], name=MultiLabelSubjectToAtlasANTsApplyTransformsName, ) MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.interpolation = "MultiLabel" baw200.connect( [ ( A2SantsRegistrationPostABCSyN, MultiLabelSubjectToAtlasANTsApplyTransforms, [("inverse_composite_transform", "transforms")], ), ( inputsSpec, MultiLabelSubjectToAtlasANTsApplyTransforms, [("template_t1", "reference_image")], ), ( MergeMultiLabelSessionSubjectToAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [("out", "input_image")], ), ] ) # } ### Now we must take the sessions to THIS SUBJECTS personalized atlas. # { # } ### Now define where the final organized outputs should go. Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) Subj2Atlas_DS = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) Subj2Atlas_DS.overwrite = master_config["ds_overwrite"] Subj2Atlas_DS.inputs.base_directory = master_config["resultdir"] Subj2Atlas_DS.inputs.regexp_substitutions = [ (r"_LinearSubjectToAtlasANTsApplyTransforms_[^/]*", r"" + sessionid + "/") ] baw200.connect( [ ( LinearSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [("output_image", "SubjectToAtlasWarped.@linear_output_images")], ) ] ) Subj2AtlasTransforms_DSName = "SubjectToAtlasTransforms_DS_" + str(sessionid) Subj2AtlasTransforms_DS = pe.Node(nio.DataSink(), name=Subj2AtlasTransforms_DSName) Subj2AtlasTransforms_DS.overwrite = master_config["ds_overwrite"] Subj2AtlasTransforms_DS.inputs.base_directory = master_config["resultdir"] Subj2AtlasTransforms_DS.inputs.regexp_substitutions = [ (r"SubjectToAtlasWarped", r"SubjectToAtlasWarped/" + sessionid + "/") ] baw200.connect( [ ( A2SantsRegistrationPostABCSyN, Subj2AtlasTransforms_DS, [ ( "composite_transform", "SubjectToAtlasWarped.@composite_transform", ), ( "inverse_composite_transform", "SubjectToAtlasWarped.@inverse_composite_transform", ), ], ) ] ) # baw200.connect([(MultiLabelSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@multilabel_output_images')])]) if master_config["plugin_name"].startswith( "SGE" ): # for some nodes, the qsub call needs to be modified on the cluster A2SantsRegistrationPostABCSyN.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 8, 8, 24), } SnapShotWriter.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } LinearSubjectToAtlasANTsApplyTransforms.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } MultiLabelSubjectToAtlasANTsApplyTransforms.plugin_args = { "template": master_config["plugin_args"]["template"], "overwrite": True, "qsub_args": modify_qsub_args(master_config["queue"], 1, 1, 1), } return baw200
def CreateMALFWorkflow(WFname, onlyT1, master_config,BASE_DATA_GRABBER_DIR=None, runFixFusionLabelMap=True): from nipype.interfaces import ants if onlyT1: n_modality = 1 else: n_modality = 2 CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_t2_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename', #The static weights file name 'labelBaseFilename' #Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_HDAtlas20_2015_label', 'MALF_HDAtlas20_2015_CSFVBInjected_label', 'MALF_HDAtlas20_2015_fs_standard_label', 'MALF_HDAtlas20_2015_lobar_label', 'MALF_extended_snapshot']), run_without_submitting=True, name='outputspec') BLICreator = dict() A2SantsRegistrationPreMALF_SyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() NewlabelMapResample = dict() malf_atlas_mergeindex = 0 merge_input_offset = 1 #Merge nodes are indexed from 1, not zero! """ multimodal ants registration if t2 exists """ sessionMakeMultimodalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeMultimodalInput") MALFWF.connect(inputsSpec, 'subj_t1_image', sessionMakeMultimodalInput, 'inFN1') if not onlyT1: MALFWF.connect(inputsSpec, 'subj_t2_image', sessionMakeMultimodalInput, 'inFN2') else: pass #print('malf_atlas_db_base') #print(master_config['malf_atlas_db_base']) malfAtlasDict = readMalfAtlasDbBase( master_config['malf_atlas_db_base'] ) number_of_atlas_sources = len(malfAtlasDict) malfAtlases = dict() atlasMakeMultimodalInput = dict() t2Resample = dict() warpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources),name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources),name="fswmLblMergeAtlas") warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources*n_modality),name="MergeAtlases") for malf_atlas_subject in list(malfAtlasDict.keys()): ## Need DataGrabber Here For the Atlas malfAtlases[malf_atlas_subject] = pe.Node(interface = IdentityInterface( fields=['t1', 't2', 'label', 'lmks']), name='malfAtlasInput'+malf_atlas_subject) malfAtlases[malf_atlas_subject].inputs.t1 = malfAtlasDict[malf_atlas_subject]['t1'] malfAtlases[malf_atlas_subject].inputs.t2 = malfAtlasDict[malf_atlas_subject]['t2'] malfAtlases[malf_atlas_subject].inputs.label = malfAtlasDict[malf_atlas_subject]['label'] malfAtlases[malf_atlas_subject].inputs.lmks = malfAtlasDict[malf_atlas_subject]['lmks'] ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(malfAtlases[malf_atlas_subject], 'lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For SyN currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreMALF_'+malf_atlas_subject A2SantsRegistrationPreMALF_SyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,16), 'overwrite': True} A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.dimension = 3 #### DEBUGGIN A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.transforms = ["Affine","Affine","SyN","SyN"] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.transform_parameters = [[0.1],[0.1],[0.1, 3, 0],[0.1, 3, 0]] if onlyT1: A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric = ['MI','MI','CC','CC'] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0,1.0,1.0] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_percentage = [.5,.5,1.0,1.0] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32,4,4] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_strategy = ['Regular','Regular',None,None] else: A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric = ['MI',['MI','MI'],'CC',['CC','CC']] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.metric_weight = [1.0,[1.0,1.0],1.0,[1.0,1.0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_percentage = [.5,[.5,0.5],1.0,[1.0,1.0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,[32,32],4,[4,4]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sampling_strategy = ['Regular',['Regular','Regular'],None,[None,None]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000,500],[500,500],[500,500],[500,70]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-6,1e-8,1e-6] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True,True,True] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2],[2, 1],[8, 4],[2, 1]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1],[1, 0],[3, 2],[1, 0]] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox","vox","vox"] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False,False,False] A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreMALF_SyN' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreMALF_SyN[malf_atlas_subject].inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(malfAtlases[malf_atlas_subject], 't1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(BLICreator[malf_atlas_subject],'outputTransformFilename', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'initial_moving_transform') """ make multimodal input for atlases """ atlasMakeMultimodalInput[malf_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2'], output_names=['outFNs']), run_without_submitting=True, name="atlasMakeMultimodalInput"+malf_atlas_subject) MALFWF.connect(malfAtlases[malf_atlas_subject], 't1', atlasMakeMultimodalInput[malf_atlas_subject], 'inFN1') if not onlyT1: MALFWF.connect(malfAtlases[malf_atlas_subject], 't2', atlasMakeMultimodalInput[malf_atlas_subject], 'inFN2') else: pass MALFWF.connect(sessionMakeMultimodalInput, 'outFNs', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'fixed_image') MALFWF.connect(atlasMakeMultimodalInput[malf_atlas_subject], 'outFNs', A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'moving_image') MALFWF.connect(A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'warped_image', warpedAtlasesMergeNode,'in'+str(merge_input_offset + malf_atlas_mergeindex*n_modality) ) """ Original t2 resampling """ for modality_index in range(1,n_modality): t2Resample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="resampledT2"+malf_atlas_subject) many_cpu_t2Resample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} t2Resample[malf_atlas_subject].plugin_args = many_cpu_t2Resample_options_dictionary t2Resample[malf_atlas_subject].inputs.dimension=3 t2Resample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_t2.nii.gz' t2Resample[malf_atlas_subject].inputs.interpolation='BSpline' t2Resample[malf_atlas_subject].inputs.default_value=0 t2Resample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'composite_transform', t2Resample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', t2Resample[malf_atlas_subject],'reference_image') MALFWF.connect( malfAtlases[malf_atlas_subject], 't2', t2Resample[malf_atlas_subject],'input_image') MALFWF.connect(t2Resample[malf_atlas_subject],'output_image', warpedAtlasesMergeNode,'in'+str(merge_input_offset + malf_atlas_mergeindex*n_modality+modality_index) ) """ Original labelmap resampling """ labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="resampledLabel"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( malfAtlases[malf_atlas_subject], 'label', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(merge_input_offset + malf_atlas_mergeindex) ) ### New labelmap resampling NewlabelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="FSWM_WLABEL_"+malf_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} NewlabelMapResample[malf_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[malf_atlas_subject].inputs.dimension=3 NewlabelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' NewlabelMapResample[malf_atlas_subject].inputs.default_value=0 NewlabelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreMALF_SyN[malf_atlas_subject],'composite_transform', NewlabelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', NewlabelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( malfAtlases[malf_atlas_subject], 'label', NewlabelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(NewlabelMapResample[malf_atlas_subject],'output_image',NewwarpedAtlasLblMergeNode,'in'+str(merge_input_offset + malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from .FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from .FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='MALF_HDAtlas20_2015_label.nii.gz' MALFWF.connect(warpedAtlasesMergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') #MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') MALFWF.connect(sessionMakeMultimodalInput, 'outFNs',jointFusion,'target_image') MALFWF.connect(jointFusion, 'output_label_image', outputsSpec,'MALF_HDAtlas20_2015_label') if onlyT1: jointFusion.inputs.modalities=1 else: jointFusion.inputs.modalities=2 ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071,47),(15072,47),(15073,47),(15145,1011),(15157,1011),(15161,1011), (15179,1012),(15141,1014),(15151,1017),(15163,1018),(15165,1019),(15143,1027), (15191,1028),(15193,1028),(15185,1030),(15201,1030),(15175,1031),(15195,1031), (15173,1035),(15144,2011),(15156,2011),(15160,2011),(15178,2012),(15140,2014), (15150,2017),(15162,2018),(15164,2019),(15142,2027),(15190,2028),(15192,2028), (15184,2030),(15174,2031),(15194,2031),(15172,2035),(15200,2030)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'MALF_HDAtlas20_2015_fs_standard_label.nii.gz' MALFWF.connect(RecodeToStandardFSWM,'OutputFileName',outputsSpec,'MALF_HDAtlas20_2015_fs_standard_label') ## MALF_SNAPSHOT_WRITER for Segmented result checking: # MALF_SNAPSHOT_WRITERNodeName = "MALF_ExtendedMALF_SNAPSHOT_WRITER" # MALF_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=MALF_SNAPSHOT_WRITERNodeName) # MALF_SNAPSHOT_WRITER.inputs.outputFilename = 'MALF_HDAtlas20_2015_CSFVBInjected_label.png' # output specification # MALF_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] # MALF_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] # MALFWF.connect(MALF_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'MALF_extended_snapshot') if runFixFusionLabelMap: ## post processing of jointfusion injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN', 'FixedHeadFN', 'LeftHemisphereFN', 'outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'MALF_HDAtlas20_2015_CSFVBInjected_label.nii.gz' FREESURFER_DICT = { 'BRAINSTEM': 16, 'RH_CSF':24, 'LH_CSF':24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11,12,13,9,17,26,50,51,52,48,53,58] } injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT MALFWF.connect(jointFusion, 'output_label_image', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', RecodeToStandardFSWM,'InputFileName') MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap,'fixedFusionLabelFN', outputsSpec,'MALF_HDAtlas20_2015_CSFVBInjected_label') # MALFWF.connect([(inputsSpec, MALF_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (injectSurfaceCSFandVBIntoLabelMap, MALF_SNAPSHOT_WRITER, # [('fixedFusionLabelFN', 'inputBinaryVolumes')]) # ]) else: MALFWF.connect(jointFusion, 'output_label_image', RecodeToStandardFSWM,'InputFileName') MALFWF.connect(jointFusion, 'output_label_image', outputsSpec,'MALF_HDAtlas20_2015_CSFVBInjected_label') # MALFWF.connect([(inputsSpec, MALF_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (jointFusion, MALF_SNAPSHOT_WRITER, # [('output_label_image', 'inputBinaryVolumes')]) # ]) ## Lobar Pacellation by recoding if master_config['relabel2lobes_filename'] != None: #print("Generate relabeled version based on {0}".format(master_config['relabel2lobes_filename'])) RECODE_LABELS_2_LobarPacellation = readRecodingList( master_config['relabel2lobes_filename'] ) RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobarPacellation RecordToFSLobes.inputs.OutputFileName = 'MALF_HDAtlas20_2015_lobar_label.nii.gz' MALFWF.connect(RecodeToStandardFSWM, 'OutputFileName',RecordToFSLobes,'InputFileName') MALFWF.connect(RecordToFSLobes,'OutputFileName',outputsSpec,'MALF_HDAtlas20_2015_lobar_label') return MALFWF
def CreateMALFWorkflow(WFname, master_config,good_subjects,BASE_DATA_GRABBER_DIR, runFixFusionLabelMap=True): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename', #The static weights file name 'labelBaseFilename' #Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_neuro2012_labelmap', 'MALF_fswm_extended_neuro2012_labelmap', 'MALF_fswm_standard_neuro2012_labelmap', 'MALF_fswm_lobar_neuro2012_labelmap', 'MALF_extended_snapshot']), run_without_submitting=True, name='outputspec') BLICreator = dict() MALF_DG = dict() A2SantsRegistrationPreABCRigid =dict() A2SantsRegistrationPreABCSyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() NewlabelMapResample = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(good_subjects)),name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="fswmLblMergeAtlas") malf_atlas_mergeindex = 1; for malf_atlas_subject in good_subjects: ## Need DataGrabber Here For the Atlas MALF_DG[malf_atlas_subject] = pe.Node(interface=nio.DataGrabber(infields=['subject', 'labelBaseFilename'], outfields=['malf_atlas_t1', 'malf_atlas_lbls', 'malf_fswm_atlas_lbls', 'malf_atlas_lmks' ]), run_without_submitting=True,name='MALF_DG_'+malf_atlas_subject) #MALF_DG[malf_atlas_subject].inputs.base_directory = master_config['previousresult'] MALF_DG[malf_atlas_subject].inputs.base_directory = BASE_DATA_GRABBER_DIR MALF_DG[malf_atlas_subject].inputs.subject = malf_atlas_subject MALFWF.connect( inputsSpec, 'labelBaseFilename', MALF_DG[malf_atlas_subject], 'labelBaseFilename') MALF_DG[malf_atlas_subject].inputs.field_template = { 'malf_atlas_t1': '%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'malf_atlas_lbls': '%s/TissueClassify/%s', 'malf_atlas_lmks': '%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', } MALF_DG[malf_atlas_subject].inputs.template_args = { 'malf_atlas_t1': [['subject']], 'malf_atlas_lbls': [['subject','labelBaseFilename']], 'malf_atlas_lmks': [['subject']], } MALF_DG[malf_atlas_subject].inputs.template = '*' MALF_DG[malf_atlas_subject].inputs.sort_filelist = True MALF_DG[malf_atlas_subject].inputs.raise_on_empty = True ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'Rigid_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCRigid[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,2,1,1), 'overwrite': True} A2SantsRegistrationPreABCRigid[malf_atlas_subject].plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transforms = ["Affine",] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000, 500, 100]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.float = True ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' MALFWF.connect(BLICreator[malf_atlas_subject], 'outputTransformFilename',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCSyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,16), 'overwrite': True} A2SantsRegistrationPreABCSyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transforms = ["SyN","SyN"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transform_parameters = [[0.1, 3, 0],[0.1, 3, 0] ] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric = ['MI','MI'] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_strategy = [None,None] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_percentage = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.number_of_iterations = [[500, 500, 500, 500 ], [70]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-4] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 3, 2], [1]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 2, 1], [0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from SEMTools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image_mask') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(A2SantsRegistrationPreABCRigid[malf_atlas_subject], ('composite_transform', getListIndexOrNoneIfOutOfRange, 0 ), A2SantsRegistrationPreABCSyN[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image') MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject],'warped_image',warpedAtlasT1MergeNode,'in'+str(malf_atlas_mergeindex) ) ### Original labelmap resampling labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="WLABEL_"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_atlas_lbls', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) ### New labelmap resampling NewlabelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="FSWM_WLABEL_"+malf_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} NewlabelMapResample[malf_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[malf_atlas_subject].inputs.dimension=3 NewlabelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' NewlabelMapResample[malf_atlas_subject].inputs.default_value=0 NewlabelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', NewlabelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', NewlabelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_fswm_atlas_lbls', NewlabelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(NewlabelMapResample[malf_atlas_subject],'output_image',NewwarpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.modalities=1 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='fusion_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') if runFixFusionLabelMap: fixFusionLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN' ], output_names=['fixedFusionLabelFN']), name="FixedFusionLabelmap") fixFusionLabelMap.inputs.outFN = 'neuro2012_20fusion_merge_seg.nii.gz' MALFWF.connect(jointFusion, 'output_label_image', fixFusionLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', fixFusionLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', fixFusionLabelMap, 'LeftHemisphereFN') MALFWF.connect(fixFusionLabelMap,'fixedFusionLabelFN',outputsSpec,'MALF_neuro2012_labelmap') else: MALFWF.connect(jointFusion, 'output_label_image', outputsSpec,'MALF_neuro2012_labelmap') MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') ## 2014-02-19 Updated fs_wmparcelation_improved malf newJointFusion = pe.Node(interface=ants.JointFusion(),name="FSWM_JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} newJointFusion.plugin_args = many_cpu_JointFusion_options_dictionary newJointFusion.inputs.dimension=3 newJointFusion.inputs.modalities=1 newJointFusion.inputs.method='Joint[0.1,2]' newJointFusion.inputs.output_label_image='fswm_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',newJointFusion,'warped_intensity_images') MALFWF.connect(NewwarpedAtlasLblMergeNode,'out',newJointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',newJointFusion,'target_image') FREESURFER_DICT = { 'BRAINSTEM': 16, 'RH_CSF':24, 'LH_CSF':24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11,12,13,9,17,26,50,51,52,48,53,58] } injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'fswm_neuro2012_20_merge_seg.nii.gz' injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT MALFWF.connect(newJointFusion, 'output_label_image', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') ## We need to recode values to ensure that there are no conflicts in the future RECODE_LABELS_2_Extended_FSWM = [ (7071,15071),(7072,15072),(7073,15073),(7145,15145),(7157,15157), (7161,15161),(7179,15179),(7141,15141),(7151,15151),(7163,15163), (7165,15165),(7143,15143),(7191,15191),(7193,15193),(7185,15185), (7201,15201),(7175,15175),(7195,15195),(7173,15173),(7144,15144), (7156,15156),(7160,15160),(7178,15178),(7140,15140),(7150,15150), (7162,15162),(7164,15164),(7142,15142),(7190,15190),(7192,15192), (7184,15184),(7174,15174),(7194,15194),(7172,15172)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToExtended = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToExteneded") RecodeToExtended.inputs.RECODE_TABLE = RECODE_LABELS_2_Extended_FSWM RecodeToExtended.inputs.OutputFileName = 'fswm_extended_neuro2012_20_merge_seg.nii.gz' MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN',RecodeToExtended,'InputFileName') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071,47),(15072,47),(15073,47),(15145,1011),(15157,1011),(15161,1011), (15179,1012),(15141,1014),(15151,1017),(15163,1018),(15165,1019),(15143,1027), (15191,1028),(15193,1028),(15185,1030),(15201,1030),(15175,1031),(15195,1031), (15173,1035),(15144,2011),(15156,2011),(15160,2011),(15178,2012),(15140,2014), (15150,2017),(15162,2018),(15164,2019),(15142,2027),(15190,2028),(15192,2028), (15184,2030),(15174,2031),(15194,2031),(15172,2035)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'fswm_standard_neuro2012_20_merge_seg.nii.gz' MALFWF.connect(RecodeToExtended, 'OutputFileName',RecodeToStandardFSWM,'InputFileName') MALFWF.connect(RecodeToExtended,'OutputFileName',outputsSpec,'MALF_fswm_extended_neuro2012_labelmap') MALFWF.connect(RecodeToStandardFSWM,'OutputFileName',outputsSpec,'MALF_fswm_standard_neuro2012_labelmap') ## MALF_SNAPSHOT_WRITER for Segmented result checking: MALF_SNAPSHOT_WRITERNodeName = "MALF_ExtendedMALF_SNAPSHOT_WRITER" MALF_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=MALF_SNAPSHOT_WRITERNodeName) MALF_SNAPSHOT_WRITER.inputs.outputFilename = 'fswm_extended_neuro2012_labelmap.png' # output specification MALF_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] MALF_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] MALFWF.connect([(inputsSpec, MALF_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), (RecodeToExtended, MALF_SNAPSHOT_WRITER, [('OutputFileName', 'inputBinaryVolumes')]) ]) MALFWF.connect(MALF_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'MALF_extended_snapshot') ## Lobar Pacellation by recoding #### HACK: #### LAbel 2001 in FS standard is WRONG. It supposed to be, 2030,ctx-rh-superiortemporal in FS #### which is from 200 Right_STG_superior_temporal_gyrus in neuromorphometric. #### 20 Atlas has to be properly changed/considered RECODE_LABELS_2_LobarPacellation = [(4,4),(5,5),(7,7),(8,8),(10,10), (11,11),(12,12),(13,13),(14,14),(15,15),(16,16),(17,17),(18,18),(24,24),(26,26),(28,28), (30,30),(31,31),(43,43),(44,44),(46,46),(47,47),(49,49),(50,50),(51,51),(52,52),(53,53), (54,54),(58,58),(60,60),(62,62),(63,63),(85,85), (251,251),(252,252),(253,253),(254,254),(255,255),(1000,1000), (1002,1002),(1005,1005),(1006,1004),(1007,1004),(1008,1006),(1009,1004),(1010,1010),(1011,1005),(1012,1001), (1013,1005),(1014,1001),(1015,1004),(1016,1016),(1017,1001),(1018,1001),(1019,1001),(1020,1001),(1021,1005), (1022,1006),(1024,1001),(1025,1006),(1026,1026),(1027,1001),(1028,1001),(1029,1006),(1030,1004),(1031,1006), (1032,1001),(1033,1004),(1034,1004),(1035,1035),(1116,1005),(1129,1129),(2000,2000),(2001,1004),(2002,2002), (2005,2005),(2006,2004),(2007,2004),(2008,2006),(2009,2004),(2010,2010),(2011,2005),(2012,2001),(2013,2005), (2014,2001),(2015,2004),(2016,2016),(2017,2001),(2018,2001),(2019,2001),(2020,2001),(2021,2005),(2022,2006), (2024,2001),(2025,2006),(2026,2026),(2027,2001),(2028,2001),(2029,2006),(2030,2004),(2031,2006),(2032,2001), (2033,2004),(2034,2004),(2035,2035),(2116,2005),(2129,2129),(3001,3001),(3002,3002),(3003,3001),(3005,3005), (3006,3004),(3007,3004),(3008,3006),(3009,3004),(3010,3010),(3011,3005),(3012,3001),(3013,3005),(3014,3001), (3015,3004),(3016,3016),(3017,3001),(3018,3001),(3019,3001),(3020,3001),(3021,3005),(3022,3006),(3023,3023), (3024,3001),(3025,3006),(3026,3026),(3027,3001),(3028,3001),(3029,3006),(3030,3004),(3031,3006),(3032,3001), (3033,3004),(3034,3004),(3035,3035),(4001,4001),(4002,4002),(4003,4001),(4005,4005),(4006,4004),(4007,4004), (4008,4006),(4009,4004),(4010,4010),(4011,4005),(4012,4001),(4013,4005),(4014,4001),(4015,4004),(4016,4016), (4017,4001),(4018,4001),(4019,4001),(4020,4001),(4021,4005),(4022,4006),(4023,4023),(4024,4001),(4025,4006), (4026,4026),(4027,4001),(4028,4001),(4029,4006),(4030,4004),(4031,4006),(4032,4001),(4033,4004),(4034,4004), (4035,4035),(5001,5001),(5002,5002)] RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobarPacellation RecordToFSLobes.inputs.OutputFileName = 'fswm_standard_neuro2012_20_lobar_seg.nii.gz' MALFWF.connect(RecodeToStandardFSWM, 'OutputFileName',RecordToFSLobes,'InputFileName') MALFWF.connect(RecordToFSLobes,'OutputFileName',outputsSpec,'MALF_fswm_lobar_neuro2012_labelmap') return MALFWF
def segmentation(projectid, subjectid, sessionid, master_config, BAtlas, onlyT1=True, pipeline_name=''): import os.path import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces import ants from nipype.interfaces.utility import IdentityInterface, Function, Merge # Set universal pipeline options from nipype import config config.update_config(master_config) assert config.get('execution', 'plugin') == master_config['execution']['plugin'] from PipeLineFunctionHelpers import ClipT1ImageWithBrainMask from WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow from utilities.distributed import modify_qsub_args from SEMTools import BRAINSSnapShotWriter baw200 = pe.Workflow(name=pipeline_name) # HACK: print for debugging for key, itme in master_config.items(): print "-" * 30 print key, ":", itme print "-" * 30 #END HACK inputsSpec = pe.Node(interface=IdentityInterface(fields=[ 't1_average', 't2_average', 'LMIatlasToSubject_tx', 'inputLabels', 'inputHeadLabels', 'posteriorImages', 'TissueClassifyatlasToSubjectInverseTransform', 'UpdatedPosteriorsList' ]), run_without_submitting=True, name='inputspec') # outputsSpec = pe.Node(interface=IdentityInterface(fields=[...]), # run_without_submitting=True, name='outputspec') currentClipT1ImageWithBrainMaskName = 'ClipT1ImageWithBrainMask_' + str( subjectid) + "_" + str(sessionid) ClipT1ImageWithBrainMaskNode = pe.Node( interface=Function( function=ClipT1ImageWithBrainMask, input_names=['t1_image', 'brain_labels', 'clipped_file_name'], output_names=['clipped_file']), name=currentClipT1ImageWithBrainMaskName) ClipT1ImageWithBrainMaskNode.inputs.clipped_file_name = 'clipped_from_BABC_labels_t1.nii.gz' baw200.connect([(inputsSpec, ClipT1ImageWithBrainMaskNode, [('t1_average', 't1_image'), ('inputLabels', 'brain_labels')])]) currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsRegistration_' + str( subjectid) + "_" + str(sessionid) AtlasToSubjectantsRegistration = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) AtlasToSubjectantsRegistration.inputs.dimension = 3 AtlasToSubjectantsRegistration.inputs.transforms = ["Affine", "SyN"] AtlasToSubjectantsRegistration.inputs.transform_parameters = [[0.1], [ 0.15, 3.0, 0.0 ]] AtlasToSubjectantsRegistration.inputs.metric = ['Mattes', 'CC'] AtlasToSubjectantsRegistration.inputs.sampling_strategy = ['Regular', None] AtlasToSubjectantsRegistration.inputs.sampling_percentage = [1.0, 1.0] AtlasToSubjectantsRegistration.inputs.metric_weight = [1.0, 1.0] AtlasToSubjectantsRegistration.inputs.radius_or_number_of_bins = [32, 4] AtlasToSubjectantsRegistration.inputs.number_of_iterations = [[ 1000, 1000, 1000 ], [10000, 500, 500, 200]] AtlasToSubjectantsRegistration.inputs.convergence_threshold = [5e-7, 5e-7] AtlasToSubjectantsRegistration.inputs.convergence_window_size = [25, 25] AtlasToSubjectantsRegistration.inputs.use_histogram_matching = [True, True] AtlasToSubjectantsRegistration.inputs.shrink_factors = [[4, 2, 1], [5, 4, 2, 1]] AtlasToSubjectantsRegistration.inputs.smoothing_sigmas = [[4, 2, 0], [5, 4, 2, 0]] AtlasToSubjectantsRegistration.inputs.sigma_units = ["vox", "vox"] AtlasToSubjectantsRegistration.inputs.use_estimate_learning_rate_once = [ False, False ] AtlasToSubjectantsRegistration.inputs.write_composite_transform = True AtlasToSubjectantsRegistration.inputs.collapse_output_transforms = True AtlasToSubjectantsRegistration.inputs.output_transform_prefix = 'AtlasToSubject_' AtlasToSubjectantsRegistration.inputs.winsorize_lower_quantile = 0.025 AtlasToSubjectantsRegistration.inputs.winsorize_upper_quantile = 0.975 AtlasToSubjectantsRegistration.inputs.collapse_linear_transforms_to_fixed_image_header = False AtlasToSubjectantsRegistration.inputs.output_warped_image = 'atlas2subject.nii.gz' AtlasToSubjectantsRegistration.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' baw200.connect([(inputsSpec, AtlasToSubjectantsRegistration, [('LMIatlasToSubject_tx', 'initial_moving_transform'), ('t1_average', 'fixed_image')]), (BAtlas, AtlasToSubjectantsRegistration, [('template_t1', 'moving_image')])]) myLocalSegWF = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid, 'Segmentation', master_config['queue'], master_config['long_q'], BAtlas, onlyT1) MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str( sessionid) MergeStage2AverageImages = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName) baw200.connect([(inputsSpec, myLocalSegWF, [ ('t1_average', 'inputspec.T1Volume'), ('posteriorImages', "inputspec.posteriorDictionary"), ('inputLabels', 'inputspec.RegistrationROI'), ]), (inputsSpec, MergeStage2AverageImages, [('t1_average', 'in1')]), (AtlasToSubjectantsRegistration, myLocalSegWF, [('composite_transform', 'inputspec.atlasToSubjectTransform')])]) if not onlyT1: baw200.connect([ (inputsSpec, myLocalSegWF, [('t2_average', 'inputspec.T2Volume')]), (inputsSpec, MergeStage2AverageImages, [('t2_average', 'in2')]) ]) file_count = 15 # Count of files to merge into MergeSessionSubjectToAtlas else: file_count = 14 # Count of files to merge into MergeSessionSubjectToAtlas ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue # baw200.connect([(AccumulateLikeTissuePosteriorsNode, myLocalSegWF, # [(('AccumulatePriorsList', getListIndex, 0), "inputspec.TotalGM")]), # ]) ### Now define where the final organized outputs should go. DataSink = pe.Node(nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid)) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.base_directory = master_config['resultdir'] # DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut') # DataSink.inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) DataSink.inputs.substitutions = [ ('Segmentations', os.path.join(projectid, subjectid, sessionid, 'CleanedDenoisedRFSegmentations')), ('subjectANNLabel_', ''), ('ANNContinuousPrediction', ''), ('subject.nii.gz', '.nii.gz'), ('_seg.nii.gz', '_seg.nii.gz'), ('.nii.gz', '_seg.nii.gz'), ('_seg_seg', '_seg') ] baw200.connect([ (myLocalSegWF, DataSink, [('outputspec.outputBinaryLeftCaudate', 'Segmentations.@LeftCaudate'), ('outputspec.outputBinaryRightCaudate', 'Segmentations.@RightCaudate'), ('outputspec.outputBinaryLeftHippocampus', 'Segmentations.@LeftHippocampus'), ('outputspec.outputBinaryRightHippocampus', 'Segmentations.@RightHippocampus'), ('outputspec.outputBinaryLeftPutamen', 'Segmentations.@LeftPutamen'), ('outputspec.outputBinaryRightPutamen', 'Segmentations.@RightPutamen'), ('outputspec.outputBinaryLeftThalamus', 'Segmentations.@LeftThalamus'), ('outputspec.outputBinaryRightThalamus', 'Segmentations.@RightThalamus'), ('outputspec.outputBinaryLeftAccumben', 'Segmentations.@LeftAccumben'), ('outputspec.outputBinaryRightAccumben', 'Segmentations.@RightAccumben'), ('outputspec.outputBinaryLeftGlobus', 'Segmentations.@LeftGlobus'), ('outputspec.outputBinaryRightGlobus', 'Segmentations.@RightGlobus'), ('outputspec.outputLabelImageName', 'Segmentations.@LabelImageName'), ('outputspec.outputCSVFileName', 'Segmentations.@CSVFileName')]), # (myLocalSegWF, DataSink, [('outputspec.cleaned_labels', 'Segmentations.@cleaned_labels')]) ]) MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str( sessionid) MergeStage2BinaryVolumes = pe.Node(interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName) baw200.connect([(myLocalSegWF, MergeStage2BinaryVolumes, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')])]) ## SnapShotWriter for Segmented result checking: SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) SnapShotWriter = pe.Node(interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) SnapShotWriter.inputs.outputFilename = 'snapShot' + str( sessionid) + '.png' # output specification SnapShotWriter.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] SnapShotWriter.inputs.inputSliceToExtractInPhysicalPoint = [ -3, -7, -3, 5, 7, 22, -22 ] baw200.connect([(MergeStage2AverageImages, SnapShotWriter, [('out', 'inputVolumes')]), (MergeStage2BinaryVolumes, SnapShotWriter, [('out', 'inputBinaryVolumes')]), (SnapShotWriter, DataSink, [('outputFilename', 'Segmentations.@outputSnapShot')])]) currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str( subjectid) + "_" + str(sessionid) AntsLabelWarpToSubject = pe.Node(interface=ants.ApplyTransforms(), name=currentAntsLabelWarpToSubject) AntsLabelWarpToSubject.inputs.dimension = 3 AntsLabelWarpToSubject.inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' AntsLabelWarpToSubject.inputs.interpolation = "MultiLabel" baw200.connect([(AtlasToSubjectantsRegistration, AntsLabelWarpToSubject, [('composite_transform', 'transforms')]), (inputsSpec, AntsLabelWarpToSubject, [('t1_average', 'reference_image')]), (BAtlas, AntsLabelWarpToSubject, [('hncma-atlas', 'input_image')])]) ##### ### Now define where the final organized outputs should go. AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str( sessionid) AntsLabelWarpedToSubject_DS = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) AntsLabelWarpedToSubject_DS.overwrite = master_config['ds_overwrite'] AntsLabelWarpedToSubject_DS.inputs.base_directory = master_config[ 'resultdir'] AntsLabelWarpedToSubject_DS.inputs.substitutions = [ ('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject')) ] baw200.connect([(AntsLabelWarpToSubject, AntsLabelWarpedToSubject_DS, [('output_image', 'AntsLabelWarpedToSubject')])]) MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str( sessionid) MergeSessionSubjectToAtlas = pe.Node(interface=Merge(file_count), run_without_submitting=True, name=MergeSessionSubjectToAtlasName) baw200.connect([ (myLocalSegWF, MergeSessionSubjectToAtlas, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')]), # (FixWMPartitioningNode, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('t1_average', 'in14')]) ]) if not onlyT1: assert file_count == 15 baw200.connect([(inputsSpec, MergeSessionSubjectToAtlas, [('t2_average', 'in15')])]) LinearSubjectToAtlasANTsApplyTransformsName = 'LinearSubjectToAtlasANTsApplyTransforms_' + str( sessionid) LinearSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=['input_image'], name=LinearSubjectToAtlasANTsApplyTransformsName) LinearSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'Linear' baw200.connect([ (AtlasToSubjectantsRegistration, LinearSubjectToAtlasANTsApplyTransforms, [ ('inverse_composite_transform', 'transforms') ]), (BAtlas, LinearSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeSessionSubjectToAtlas, LinearSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) MergeMultiLabelSessionSubjectToAtlasName = "99_MergeMultiLabelSessionSubjectToAtlas_" + str( sessionid) MergeMultiLabelSessionSubjectToAtlas = pe.Node( interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName) baw200.connect([(inputsSpec, MergeMultiLabelSessionSubjectToAtlas, [('inputLabels', 'in1'), ('inputHeadLabels', 'in2')])]) ### This is taking this sessions RF label map back into NAC atlas space. #{ MultiLabelSubjectToAtlasANTsApplyTransformsName = 'MultiLabelSubjectToAtlasANTsApplyTransforms_' + str( sessionid) MultiLabelSubjectToAtlasANTsApplyTransforms = pe.MapNode( interface=ants.ApplyTransforms(), iterfield=['input_image'], name=MultiLabelSubjectToAtlasANTsApplyTransformsName) MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'MultiLabel' baw200.connect([ (AtlasToSubjectantsRegistration, MultiLabelSubjectToAtlasANTsApplyTransforms, [ ('inverse_composite_transform', 'transforms') ]), (BAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeMultiLabelSessionSubjectToAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) #} ### Now we must take the sessions to THIS SUBJECTS personalized atlas. #{ #} ### Now define where the final organized outputs should go. Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) Subj2Atlas_DS = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) Subj2Atlas_DS.overwrite = master_config['ds_overwrite'] Subj2Atlas_DS.inputs.base_directory = master_config['resultdir'] Subj2Atlas_DS.inputs.regexp_substitutions = [ (r'_LinearSubjectToAtlasANTsApplyTransforms_[^/]*', r'' + sessionid + '/') ] baw200.connect([(LinearSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [ ('output_image', 'SubjectToAtlasWarped.@linear_output_images') ])]) Subj2AtlasTransforms_DSName = "SubjectToAtlasTransforms_DS_" + str( sessionid) Subj2AtlasTransforms_DS = pe.Node(nio.DataSink(), name=Subj2AtlasTransforms_DSName) Subj2AtlasTransforms_DS.overwrite = master_config['ds_overwrite'] Subj2AtlasTransforms_DS.inputs.base_directory = master_config['resultdir'] Subj2AtlasTransforms_DS.inputs.regexp_substitutions = [ (r'SubjectToAtlasWarped', r'SubjectToAtlasWarped/' + sessionid + '/') ] baw200.connect([(AtlasToSubjectantsRegistration, Subj2AtlasTransforms_DS, [ ('composite_transform', 'SubjectToAtlasWarped.@composite_transform'), ('inverse_composite_transform', 'SubjectToAtlasWarped.@inverse_composite_transform') ])]) # baw200.connect([(MultiLabelSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@multilabel_output_images')])]) if master_config['execution'][ 'plugin'] == 'SGE': # for some nodes, the qsub call needs to be modified on the cluster AtlasToSubjectantsRegistration.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '9000M', 4, hard=False) } SnapShotWriter.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, 1, hard=False) } LinearSubjectToAtlasANTsApplyTransforms.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, hard=True) } MultiLabelSubjectToAtlasANTsApplyTransforms.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, hard=True) } return baw200
def CreateTissueClassifyWorkflow(WFname, master_config, InterpolationMode, UseRegistrationMasking): from nipype.interfaces import ants CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=[ 'T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'T1_count', 'PrimaryT1', 'atlasDefinition', 'atlasToSubjectInitialTransform', 'atlasVolume' ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node( interface=IdentityInterface(fields=[ 'atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'outputLabels', 'outputHeadLabels', # ??? #'t1_corrected', 't2_corrected', 't1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', ]), run_without_submitting=True, name='outputspec') ######################################################## # Run BABCext on Multi-modal images ######################################################## makeOutImageList = pe.Node(Function( function=MakeOutFileList, input_names=[ 'T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix', 'PrimaryT1' ], output_names=['inImageList', 'outImageList', 'imageTypeList']), run_without_submitting=True, name="99_makeOutImageList") tissueClassifyWF.connect(inputsSpec, 'T1List', makeOutImageList, 'T1List') tissueClassifyWF.connect(inputsSpec, 'T2List', makeOutImageList, 'T2List') tissueClassifyWF.connect(inputsSpec, 'PDList', makeOutImageList, 'PDList') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', makeOutImageList, 'PrimaryT1') makeOutImageList.inputs.FLList = [] # an emptyList HACK makeOutImageList.inputs.postfix = "_corrected.nii.gz" # HACK tissueClassifyWF.connect( inputsSpec, 'FLList', makeOutImageList, 'FLList' ) tissueClassifyWF.connect(inputsSpec, 'OtherList', makeOutImageList, 'OtherList') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'AtlasToSubjectANTsPreABC_Rigid' A2SantsRegistrationPreABCRigid = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 4, 2, 8), 'overwrite': True } A2SantsRegistrationPreABCRigid.plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid.inputs.num_threads = -1 A2SantsRegistrationPreABCRigid.inputs.dimension = 3 A2SantsRegistrationPreABCRigid.inputs.transforms = [ "Affine", ] A2SantsRegistrationPreABCRigid.inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid.inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid.inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid.inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid.inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid.inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid.inputs.number_of_iterations = [[ 1000, 1000, 500, 100 ]] A2SantsRegistrationPreABCRigid.inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid.inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid.inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid.inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid.inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid.inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid.inputs.use_estimate_learning_rate_once = [ False ] A2SantsRegistrationPreABCRigid.inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid.inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid.inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid.inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid.inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid.inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCRigid.inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' A2SantsRegistrationPreABCRigid.inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' A2SantsRegistrationPreABCRigid.inputs.float = True tissueClassifyWF.connect(inputsSpec, 'atlasToSubjectInitialTransform', A2SantsRegistrationPreABCRigid, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCRigid, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCRigid, 'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsPreABC_SyN' A2SantsRegistrationPreABCSyN = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 12), 'overwrite': True } A2SantsRegistrationPreABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN.inputs.num_threads = -1 A2SantsRegistrationPreABCSyN.inputs.dimension = 3 A2SantsRegistrationPreABCSyN.inputs.transforms = ["SyN", "SyN"] A2SantsRegistrationPreABCSyN.inputs.transform_parameters = [[0.1, 3, 0], [0.1, 3, 0]] A2SantsRegistrationPreABCSyN.inputs.metric = ['CC', 'CC'] A2SantsRegistrationPreABCSyN.inputs.sampling_strategy = [None, None] A2SantsRegistrationPreABCSyN.inputs.sampling_percentage = [1.0, 1.0] A2SantsRegistrationPreABCSyN.inputs.metric_weight = [1.0, 1.0] A2SantsRegistrationPreABCSyN.inputs.radius_or_number_of_bins = [4, 4] A2SantsRegistrationPreABCSyN.inputs.number_of_iterations = [[500, 500], [500, 70]] A2SantsRegistrationPreABCSyN.inputs.convergence_threshold = [1e-8, 1e-6] A2SantsRegistrationPreABCSyN.inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN.inputs.use_histogram_matching = [True, True] A2SantsRegistrationPreABCSyN.inputs.shrink_factors = [[8, 4], [2, 1]] A2SantsRegistrationPreABCSyN.inputs.smoothing_sigmas = [[3, 2], [1, 0]] A2SantsRegistrationPreABCSyN.inputs.sigma_units = ["vox", "vox"] A2SantsRegistrationPreABCSyN.inputs.use_estimate_learning_rate_once = [ False, False ] A2SantsRegistrationPreABCSyN.inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN.inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN.inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCSyN.inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN.inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN.inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN.inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN.inputs.output_warped_image = 'atlas2subject.nii.gz' A2SantsRegistrationPreABCSyN.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreABCSyN.inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 10 fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto = pe.Node(interface=BRAINSROIAuto(), name="movingImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 10 movingROIAuto.inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', fixedROIAuto, 'inputVolume') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', movingROIAuto, 'inputVolume') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCRigid, 'fixed_image_mask') tissueClassifyWF.connect(movingROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCRigid, 'moving_image_mask') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN, 'fixed_image_mask') tissueClassifyWF.connect(movingROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN, 'moving_image_mask') tissueClassifyWF.connect(A2SantsRegistrationPreABCRigid, 'composite_transform', A2SantsRegistrationPreABCSyN, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCSyN, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCSyN, 'moving_image') BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") many_cpu_BABC_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 8, 2, 4), 'overwrite': True } BABCext.plugin_args = many_cpu_BABC_options_dictionary tissueClassifyWF.connect(makeOutImageList, 'inImageList', BABCext, 'inputVolumes') tissueClassifyWF.connect(makeOutImageList, 'imageTypeList', BABCext, 'inputVolumeTypes') tissueClassifyWF.connect(makeOutImageList, 'outImageList', BABCext, 'outputVolumes') BABCext.inputs.debuglevel = 0 BABCext.inputs.useKNN = True BABCext.inputs.maxIterations = 3 BABCext.inputs.maxBiasDegree = 4 BABCext.inputs.filterIteration = 3 BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' BABCext.inputs.atlasToSubjectTransformType = 'SyN' BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = './' BABCext.inputs.saveState = 'SavedBABCInternalSyNState.h5' tissueClassifyWF.connect(inputsSpec, 'atlasDefinition', BABCext, 'atlasDefinition') # NOTE: MUTUALLY EXCLUSIVE with restoreState #tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, # 'composite_transform', # BABCext, 'atlasToSubjectInitialTransform') tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, 'save_state', BABCext, 'restoreState') """ Get the first T1 and T2 corrected images from BABCext """ """ HACK: THIS IS NOT NEEDED! We should use the averged t1 and averaged t2 images instead! def get_first_T1_and_T2(in_files,T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' return in_files[0],in_files[T1_count] bfc_files = pe.Node(Function(input_names=['in_files','T1_count'], output_names=['t1_corrected','t2_corrected'], function=get_first_T1_and_T2), run_without_submitting=True, name='99_bfc_files' ) tissueClassifyWF.connect( inputsSpec, 'T1_count', bfc_files, 'T1_count') tissueClassifyWF.connect(BABCext,'outputVolumes',bfc_files, 'in_files') tissueClassifyWF.connect(bfc_files,'t1_corrected',outputsSpec,'t1_corrected') tissueClassifyWF.connect(bfc_files,'t2_corrected',outputsSpec,'t2_corrected') #tissueClassifyWF.connect(bfc_files,'pd_corrected',outputsSpec,'pd_corrected') #tissueClassifyWF.connect(bfc_files,'fl_corrected',outputsSpec,'fl_corrected') """ ############# tissueClassifyWF.connect(BABCext, 'saveState', outputsSpec, 'atlasToSubjectRegistrationState') tissueClassifyWF.connect(BABCext, 'atlasToSubjectTransform', outputsSpec, 'atlasToSubjectTransform') def MakeInverseTransformFileName(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs""" fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return [fixed_inverse_name] tissueClassifyWF.connect([ (BABCext, outputsSpec, [(('atlasToSubjectTransform', MakeInverseTransformFileName), "atlasToSubjectInverseTransform")]), ]) tissueClassifyWF.connect(BABCext, 'outputLabels', outputsSpec, 'outputLabels') tissueClassifyWF.connect(BABCext, 'outputDirtyLabels', outputsSpec, 'outputHeadLabels') tissueClassifyWF.connect(BABCext, 'outputT1AverageImage', outputsSpec, 't1_average') tissueClassifyWF.connect(BABCext, 'outputT2AverageImage', outputsSpec, 't2_average') tissueClassifyWF.connect(BABCext, 'outputPDAverageImage', outputsSpec, 'pd_average') tissueClassifyWF.connect(BABCext, 'outputFLAverageImage', outputsSpec, 'fl_average') ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 2 ), "pd_average")] ), ] ) MakePosteriorDictionaryNode = pe.Node(Function( function=MakePosteriorDictionaryFunc, input_names=['posteriorImages'], output_names=['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") tissueClassifyWF.connect(BABCext, 'posteriorImages', MakePosteriorDictionaryNode, 'posteriorImages') tissueClassifyWF.connect(MakePosteriorDictionaryNode, 'posteriorDictionary', outputsSpec, 'posteriorImages') return tissueClassifyWF
def main(args): subjects, master_config = args import os import sys import traceback # Set universal pipeline options from nipype import config config.update_config(master_config) assert config.get('execution', 'plugin') == master_config['execution']['plugin'] import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces.utility import IdentityInterface, Function import nipype.interfaces.ants as ants from template import MergeByExtendListElements, xml_filename from PipeLineFunctionHelpers import mapPosteriorList from atlasNode import GetAtlasNode, MakeNewAtlasTemplate from utilities.misc import GenerateSubjectOutputPattern as outputPattern from utilities.distributed import modify_qsub_args template = pe.Workflow(name='SubjectAtlas_Template') template.base_dir = master_config['logging']['log_directory'] if 'previouscache' in master_config: # Running off previous baseline experiment BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas') else: # Running after previous baseline experiment BAtlas = GetAtlasNode(os.path.dirname(master_config['atlascache']), 'BAtlas') inputspec = pe.Node(interface=IdentityInterface(fields=['subject']), name='inputspec') inputspec.iterables = ('subject', subjects) baselineDG = pe.Node(nio.DataGrabber(infields=['subject'], outfields=['t1_average', 't2_average', 'pd_average', 'fl_average', 'outputLabels', 'posteriorImages']), name='Baseline_DG') if 'previousresult' in master_config: baselineDG.inputs.base_directory = master_config['previousresult'] else: baselineDG.inputs.base_directory = master_config['resultdir'] baselineDG.inputs.sort_filelist = True baselineDG.inputs.raise_on_empty = False baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz' baselineDG.inputs.template_args['t1_average'] = [['subject', 't1_average_BRAINSABC']] baselineDG.inputs.template_args['t2_average'] = [['subject', 't2_average_BRAINSABC']] baselineDG.inputs.template_args['pd_average'] = [['subject', 'pd_average_BRAINSABC']] baselineDG.inputs.template_args['fl_average'] = [['subject', 'fl_average_BRAINSABC']] baselineDG.inputs.template_args['outputLabels'] = [['subject', 'brain_label_seg']] baselineDG.inputs.field_template = {'posteriorImages':'*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz'} posterior_files = ['AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS', 'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM'] baselineDG.inputs.template_args['posteriorImages'] = [['subject', posterior_files]] MergeByExtendListElementsNode = pe.Node(Function(function=MergeByExtendListElements, input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors'], output_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping']), run_without_submitting=True, name="99_MergeByExtendListElements") from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc template.connect([(inputspec, baselineDG, [('subject', 'subject')]), (baselineDG, MergeByExtendListElementsNode, [('t1_average', 't1s'), ('t2_average', 't2s'), ('pd_average', 'pds'), ('fl_average', 'fls'), ('outputLabels', 'labels'), (('posteriorImages', wrapfunc), 'posteriors')]) ]) myInitAvgWF = pe.Node(interface=ants.AverageImages(), name='Atlas_antsSimpleAverage') # was 'Phase1_antsSimpleAverage' myInitAvgWF.inputs.dimension = 3 myInitAvgWF.inputs.normalize = True template.connect(baselineDG, 't1_average', myInitAvgWF, "images") #################################################################################################### # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE' # if numSessions == 1: # TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE' #################################################################################################### from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF buildTemplateIteration1 = registrationWF('iteration01') # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2') buildTemplateIteration2 = registrationWF('Iteration02') MakeNewAtlasTemplateNode = pe.Node(interface=Function(function=MakeNewAtlasTemplate, input_names=['t1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'], output_names=['outAtlasFullPath', 'clean_deformed_list']), # This is a lot of work, so submit it run_without_submitting=True, run_without_submitting=True, # HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! name='99_MakeNewAtlasTemplate') if master_config['execution']['plugin'] == 'SGE': # for some nodes, the qsub call needs to be modified on the cluster MakeNewAtlasTemplateNode.plugin_args = {'template': master_config['plugin_args']['template'], 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, 1), 'overwrite': True} for bt in [buildTemplateIteration1, buildTemplateIteration2]: ################################################## # *** Hans, is this TODO already addressed? *** # # ----> # TODO: Change these parameters <---- # ################################################## BeginANTS = bt.get_node("BeginANTS") BeginANTS.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '9000M', 4, hard=False)} wimtdeformed = bt.get_node("wimtdeformed") wimtdeformed.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2)} AvgAffineTransform = bt.get_node("AvgAffineTransform") AvgAffineTransform.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1)} wimtPassivedeformed = bt.get_node("wimtPassivedeformed") wimtPassivedeformed.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2)} template.connect([(myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping','inputspec.interpolationMapping')]), (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes','inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename), 'outDefinition')]), (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]), (buildTemplateIteration2, MakeNewAtlasTemplateNode, [('outputspec.template', 't1_image'), ('outputspec.passive_deformed_templates', 'deformed_list')]), ]) # Create DataSinks Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS") Atlas_DataSink.overwrite = master_config['ds_overwrite'] Atlas_DataSink.inputs.base_directory = master_config['resultdir'] Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS") Subject_DataSink.overwrite = master_config['ds_overwrite'] Subject_DataSink.inputs.base_directory = master_config['resultdir'] template.connect([(inputspec, Atlas_DataSink, [('subject', 'container')]), (buildTemplateIteration1, Atlas_DataSink, [('outputspec.template', 'Atlas.iteration1')]), # Unnecessary (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath', 'Atlas.definitions')]), (BAtlas, Atlas_DataSink, [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'), ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'), ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'), ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]), (inputspec, Subject_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]), (buildTemplateIteration2, Subject_DataSink, [('outputspec.template', 'ANTSTemplate.@template')]), (MakeNewAtlasTemplateNode, Subject_DataSink, [('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates')]), ]) from utils import run_workflow, print_workflow if False: print_workflow(template, plugin=master_config['execution']['plugin'], dotfilename='template') return run_workflow(template, plugin=master_config['execution']['plugin'], plugin_args=master_config['plugin_args'])
def CreateTissueClassifyWorkflow(WFname, master_config, InterpolationMode, UseRegistrationMasking): from nipype.interfaces import ants CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=[ 'T1List', 'T2List', 'PDList', 'FLList', 'OTHERList', 'T1_count', 'PrimaryT1', 'atlasDefinition', 'atlasToSubjectInitialTransform', 'atlasVolume', 'atlasheadregion' ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node( interface=IdentityInterface(fields=[ 'atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'outputLabels', 'outputHeadLabels', # ??? #'t1_corrected', 't2_corrected', 't1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', ]), run_without_submitting=True, name='outputspec') ######################################################## # Run BABCext on Multi-modal images ######################################################## makeOutImageList = pe.Node(Function(function=MakeOutFileList, input_names=[ 'T1List', 'T2List', 'PDList', 'FLList', 'OTHERList', 'postfix', 'postfixBFC', 'postfixUnwrapped', 'PrimaryT1', 'ListOutType' ], output_names=[ 'inImageList', 'outImageList', 'outBFCImageList', 'outUnwrappedImageList', 'imageTypeList' ]), run_without_submitting=True, name="99_makeOutImageList") tissueClassifyWF.connect(inputsSpec, 'T1List', makeOutImageList, 'T1List') tissueClassifyWF.connect(inputsSpec, 'T2List', makeOutImageList, 'T2List') tissueClassifyWF.connect(inputsSpec, 'PDList', makeOutImageList, 'PDList') tissueClassifyWF.connect(inputsSpec, 'FLList', makeOutImageList, 'FLList') tissueClassifyWF.connect(inputsSpec, 'OTHERList', makeOutImageList, 'OTHERList') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', makeOutImageList, 'PrimaryT1') makeOutImageList.inputs.ListOutType = False makeOutImageList.inputs.postfix = "_corrected.nii.gz" makeOutImageList.inputs.postfixBFC = "_NOT_USED" makeOutImageList.inputs.postfixUnwrapped = "_NOT_USED" ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'AtlasToSubjectANTsPreABC_Affine' A2SantsRegistrationPreABCAffine = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 4, 2, 8), 'overwrite': True } A2SantsRegistrationPreABCAffine.plugin_args = many_cpu_ANTsRigid_options_dictionary CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreABCAffine, registrationTypeDescription='AtlasToSubjectANTsPreABC_Affine', output_transform_prefix='AtlasToSubjectPreBABC_Rigid', output_warped_image='atlas2subjectRigid.nii.gz', output_inverse_warped_image='subject2atlasRigid.nii.gz', save_state=None, invert_initial_moving_transform=False) tissueClassifyWF.connect(inputsSpec, 'atlasToSubjectInitialTransform', A2SantsRegistrationPreABCAffine, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCAffine, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCAffine, 'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsPreABC_SyN' A2SantsRegistrationPreABCSyN = pe.Node( interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), 'overwrite': True } A2SantsRegistrationPreABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreABCSyN, registrationTypeDescription='AtlasToSubjectANTsPreABC_SyN', output_transform_prefix='AtlasToSubjectPreBABC_SyN', output_warped_image='atlas2subject.nii.gz', output_inverse_warped_image='subject2atlas.nii.gz', save_state='SavedInternalSyNState.h5', invert_initial_moving_transform=False) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 15 ## NOTE Very large to include some skull in bad cases of bias where back of head is very dark fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', fixedROIAuto, 'inputVolume') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCAffine, 'fixed_image_mask') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN, 'fixed_image_mask') ## NOTE: Always use atlas head region to avoid computing this every time. tissueClassifyWF.connect(inputsSpec, 'atlasheadregion', A2SantsRegistrationPreABCAffine, 'moving_image_mask') tissueClassifyWF.connect(inputsSpec, 'atlasheadregion', A2SantsRegistrationPreABCSyN, 'moving_image_mask') tissueClassifyWF.connect(A2SantsRegistrationPreABCAffine, 'composite_transform', A2SantsRegistrationPreABCSyN, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCSyN, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCSyN, 'moving_image') BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") many_cpu_BABC_options_dictionary = { 'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 13, 8, 16), 'overwrite': True } BABCext.plugin_args = many_cpu_BABC_options_dictionary tissueClassifyWF.connect(makeOutImageList, 'inImageList', BABCext, 'inputVolumes') tissueClassifyWF.connect(makeOutImageList, 'imageTypeList', BABCext, 'inputVolumeTypes') tissueClassifyWF.connect(makeOutImageList, 'outImageList', BABCext, 'outputVolumes') BABCext.inputs.debuglevel = 0 BABCext.inputs.useKNN = True BABCext.inputs.purePlugsThreshold = 0.1 #New feature to allow for pure plug processing and improvements. BABCext.inputs.maxIterations = 2 BABCext.inputs.maxBiasDegree = 0 BABCext.inputs.filterIteration = 3 #BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' ## If inputs are denoised, we don't need this BABCext.inputs.filterMethod = 'None' BABCext.inputs.atlasToSubjectTransformType = 'SyN' # Using SyN, so no bsplines here BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = './' BABCext.inputs.saveState = 'SavedBABCInternalSyNState.h5' tissueClassifyWF.connect(inputsSpec, 'atlasDefinition', BABCext, 'atlasDefinition') # NOTE: MUTUALLY EXCLUSIVE with restoreState #tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, # 'composite_transform', # BABCext, 'atlasToSubjectInitialTransform') tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, 'save_state', BABCext, 'restoreState') """ Get the first T1 and T2 corrected images from BABCext """ """ HACK: THIS IS NOT NEEDED! We should use the averged t1 and averaged t2 images instead! def get_first_T1_and_T2(in_files,T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' return in_files[0],in_files[T1_count] bfc_files = pe.Node(Function(input_names=['in_files','T1_count'], output_names=['t1_corrected','t2_corrected'], function=get_first_T1_and_T2), run_without_submitting=True, name='99_bfc_files' ) tissueClassifyWF.connect( inputsSpec, 'T1_count', bfc_files, 'T1_count') tissueClassifyWF.connect(BABCext,'outputVolumes',bfc_files, 'in_files') tissueClassifyWF.connect(bfc_files,'t1_corrected',outputsSpec,'t1_corrected') tissueClassifyWF.connect(bfc_files,'t2_corrected',outputsSpec,'t2_corrected') #tissueClassifyWF.connect(bfc_files,'pd_corrected',outputsSpec,'pd_corrected') #tissueClassifyWF.connect(bfc_files,'fl_corrected',outputsSpec,'fl_corrected') """ ############# tissueClassifyWF.connect(BABCext, 'saveState', outputsSpec, 'atlasToSubjectRegistrationState') tissueClassifyWF.connect(BABCext, 'atlasToSubjectTransform', outputsSpec, 'atlasToSubjectTransform') def MakeInverseTransformFileName(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs""" fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return [fixed_inverse_name] tissueClassifyWF.connect([ (BABCext, outputsSpec, [(('atlasToSubjectTransform', MakeInverseTransformFileName), "atlasToSubjectInverseTransform")]), ]) tissueClassifyWF.connect(BABCext, 'outputLabels', outputsSpec, 'outputLabels') tissueClassifyWF.connect(BABCext, 'outputDirtyLabels', outputsSpec, 'outputHeadLabels') tissueClassifyWF.connect(BABCext, 'outputT1AverageImage', outputsSpec, 't1_average') tissueClassifyWF.connect(BABCext, 'outputT2AverageImage', outputsSpec, 't2_average') tissueClassifyWF.connect(BABCext, 'outputPDAverageImage', outputsSpec, 'pd_average') tissueClassifyWF.connect(BABCext, 'outputFLAverageImage', outputsSpec, 'fl_average') ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 2 ), "pd_average")] ), ] ) MakePosteriorDictionaryNode = pe.Node(Function( function=MakePosteriorDictionaryFunc, input_names=['posteriorImages'], output_names=['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") tissueClassifyWF.connect(BABCext, 'posteriorImages', MakePosteriorDictionaryNode, 'posteriorImages') tissueClassifyWF.connect(MakePosteriorDictionaryNode, 'posteriorDictionary', outputsSpec, 'posteriorImages') return tissueClassifyWF
def CreateMALFWorkflow(WFname, master_config,good_subjects,BASE_DATA_GRABBER_DIR): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename' #The static weights file name ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_neuro2012_labelmap']), run_without_submitting=True, name='outputspec') BLICreator = dict() MALF_DG = dict() A2SantsRegistrationPreABCRigid =dict() A2SantsRegistrationPreABCSyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(good_subjects)),name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="LblMergeAtlas") malf_atlas_mergeindex = 1; for malf_atlas_subject in good_subjects: ## Need DataGrabber Here For the Atlas MALF_DG[malf_atlas_subject] = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['malf_atlas_t1', 'malf_atlas_lbls', 'malf_atlas_lmks' ]), run_without_submitting=True,name='MALF_DG_'+malf_atlas_subject) #MALF_DG[malf_atlas_subject].inputs.base_directory = master_config['previousresult'] MALF_DG[malf_atlas_subject].inputs.base_directory = BASE_DATA_GRABBER_DIR MALF_DG[malf_atlas_subject].inputs.subject = malf_atlas_subject MALF_DG[malf_atlas_subject].inputs.field_template = { 'malf_atlas_t1': '%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'malf_atlas_lbls': '%s/TissueClassify/neuro_lbls.nii.gz', 'malf_atlas_lmks': '%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', } MALF_DG[malf_atlas_subject].inputs.template_args = { 'malf_atlas_t1': [['subject']], 'malf_atlas_lbls': [['subject']], 'malf_atlas_lmks': [['subject']], } MALF_DG[malf_atlas_subject].inputs.template = '*' MALF_DG[malf_atlas_subject].inputs.sort_filelist = True MALF_DG[malf_atlas_subject].inputs.raise_on_empty = True ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'Rigid_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCRigid[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,2,1,1), 'overwrite': True} A2SantsRegistrationPreABCRigid[malf_atlas_subject].plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transforms = ["Affine",] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000, 500, 100]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' MALFWF.connect(BLICreator[malf_atlas_subject], 'outputTransformFilename',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCSyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} A2SantsRegistrationPreABCSyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transforms = ["SyN","SyN"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transform_parameters = [[0.1, 3, 0],[0.1, 3, 0] ] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric = ['MI','MI'] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_strategy = [None,None] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_percentage = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.number_of_iterations = [[500, 500, 500, 500 ], [70]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-4] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 3, 2], [1]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 2, 1], [0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from SEMTools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image_mask') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(A2SantsRegistrationPreABCRigid[malf_atlas_subject], ('composite_transform', getListIndexOrNoneIfOutOfRange, 0 ), A2SantsRegistrationPreABCSyN[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image') labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="WLABEL_"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_atlas_lbls', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject],'warped_image',warpedAtlasT1MergeNode,'in'+str(malf_atlas_mergeindex) ) MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.num_modalities=1 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='fusion_neuro2012_20.nii.gz' def FixLabelMapFromNeuromorphemetrics2012(fusionFN,FixedHeadFN,LeftHemisphereFN,outFN): import SimpleITK as sitk import os def ForceMaskInsert(inlabels,newmask,newmaskvalue): newmask = sitk.Cast( (newmask>0) , sitk.sitkUInt8) outlabels=inlabels*sitk.Cast( (1-newmask), sitk.sitkUInt8) outlabels = outlabels + newmask*newmaskvalue return sitk.Cast(outlabels,sitk.sitkUInt8) ## TODO: GetLargestLabel is copied from elsewhere def GetLargestLabel(inputMask, UseErosionCleaning): LargestComponentCode = 1 if UseErosionCleaning: erosionMask = sitk.ErodeObjectMorphology(inputMask, 1) else: erosionMask = inputMask CC = sitk.ConnectedComponent(erosionMask) Rlabel = sitk.RelabelComponent(CC) largestMask = ( Rlabel == LargestComponentCode) if UseErosionCleaning: dilateMask = sitk.DilateObjectMorphology(largestMask, 1) else: dilateMask = largestMask return (largestMask * dilateMask > 0) def RecodeNonLargest(outlabels,keepCode,UNKNOWN_LABEL_CODE): orig_mask = (outlabels == keepCode) connected_mask = GetLargestLabel(orig_mask,False) small_regions = ( orig_mask - connected_mask ) outlabels = ForceMaskInsert(outlabels,connected_mask,keepCode) outlabels = ForceMaskInsert(outlabels,small_regions,UNKNOWN_LABEL_CODE) return outlabels fusionIm=sitk.Cast(sitk.ReadImage(fusionFN),sitk.sitkUInt8) FixedHead=sitk.Cast(sitk.ReadImage(FixedHeadFN),sitk.sitkUInt8) LeftHemisphereIm=sitk.Cast(sitk.ReadImage(LeftHemisphereFN),sitk.sitkUInt8) csf_labels=(FixedHead == 4) outlabels= ForceMaskInsert(fusionIm,csf_labels,51) blood_labels=(FixedHead == 5) BLOOD_CODE=230 outlabels = ForceMaskInsert(outlabels,blood_labels,BLOOD_CODE) ## Add blood as value 230 left_hemi_pre = ( outlabels == 52 ) outlabels = ForceMaskInsert(outlabels,left_hemi_pre,51) ## Make all CSF Right hemisphere left_hemi_post = (LeftHemisphereIm * sitk.Cast ( ( outlabels == 51 ),sitk.sitkUInt8) > 0 ) outlabels = ForceMaskInsert(outlabels,left_hemi_post,52) ## Make all CSF Right hemisphere ## Now extend brainstem lower brain_stem = (FixedHead == 30) * (outlabels == 0) ## Only extend to areas where there is not already a label outlabels = ForceMaskInsert(outlabels,brain_stem,35) ## Make all CSF Right hemisphere BRAIN_MASK=sitk.Cast( (FixedHead > 0),sitk.sitkUInt8) outlabels = outlabels * BRAIN_MASK ## Caudate = 36 37 ## Putamen = 57 58 ## Pallidus = 55,56 ## Thalamus = 59,60 ## Hippocampus = 47,48 ## Accumbens = 23,30 UNKNOWN_LABEL_CODE=255 labels_to_ensure_connected = [36,37,57,58,55,56,59,60,47,48,23,30] for keepCode in labels_to_ensure_connected: outlabels = RecodeNonLargest(outlabels,keepCode,UNKNOWN_LABEL_CODE) ## FILL IN HOLES unkown_holes = ( BRAIN_MASK > 0 ) * ( outlabels == 0 ) outlabels = ForceMaskInsert(outlabels,unkown_holes,UNKNOWN_LABEL_CODE) ## Fill unkown regeions with unkown code fixedFusionLabelFN=os.path.realpath(outFN) sitk.WriteImage(outlabels,fixedFusionLabelFN) #print("\n\n\n\n\n\n{0}\n\n\n\nXXXXXXXX".format(fixedFusionLabelFN)) return fixedFusionLabelFN fixFusionLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN' ], output_names=['fixedFusionLabelFN']), name="FixedFusionLabelmap") fixFusionLabelMap.inputs.outFN = 'neuro2012_20fusion_merge_seg.nii.gz' MALFWF.connect(jointFusion, 'output_label_image', fixFusionLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', fixFusionLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', fixFusionLabelMap, 'LeftHemisphereFN') MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') MALFWF.connect(fixFusionLabelMap,'fixedFusionLabelFN',outputsSpec,'MALF_neuro2012_labelmap') return MALFWF
def _template_runner(argv, environment, experiment, pipeline_options, cluster): print("Getting subjects from database...") # subjects = argv["--subjects"].split(',') subjects, subjects_sessions_dictionary = get_subjects_sessions_dictionary( argv['SUBJECTS'], experiment['cachedir'], experiment['resultdir'], environment['prefix'], experiment['dbfile'], argv['--use-sentinal'], argv['--use-shuffle']) # Build database before parallel section useSentinal = argv['--use-sentinal'] # Quick preliminary sanity check for thisSubject in subjects: if len(subjects_sessions_dictionary[thisSubject]) == 0: print( "ERROR: subject {0} has no sessions found. Did you supply a valid subject id on the command line?" .format(thisSubject)) sys.exit(-1) for thisSubject in subjects: print("Processing atlas generation for this subject: {0}".format( thisSubject)) print("=" * 80) print( "Copying Atlas directory and determining appropriate Nipype options..." ) subj_pipeline_options = nipype_options( argv, pipeline_options, cluster, experiment, environment) # Generate Nipype options print("Dispatching jobs to the system...") ###### ###### Now start workflow construction ###### # Set universal pipeline options nipype_config.update_config(subj_pipeline_options) ready_for_template_building = True for thisSession in subjects_sessions_dictionary[thisSubject]: path_test = os.path.join( experiment['previousresult'], '*/{0}/{1}/TissueClassify/t1_average_BRAINSABC.nii.gz'.format( thisSubject, thisSession)) t1_file_result = glob.glob(path_test) if len(t1_file_result) != 1: print( "Incorrect number of t1 images found for data grabber {0}". format(t1_file_result)) print(" at path {0}".format(path_test)) ready_for_template_building = False if not ready_for_template_building: print("TEMPORARY SKIPPING: Not ready to process {0}".format( thisSubject)) continue base_output_directory = os.path.join( subj_pipeline_options['logging']['log_directory'], thisSubject) template = pe.Workflow(name='SubjectAtlas_Template_' + thisSubject) template.base_dir = base_output_directory subjectNode = pe.Node(interface=IdentityInterface(fields=['subject']), run_without_submitting=True, name='99_subjectIterator') subjectNode.inputs.subject = thisSubject sessionsExtractorNode = pe.Node(Function( function=getSessionsFromSubjectDictionary, input_names=['subject_session_dictionary', 'subject'], output_names=['sessions']), run_without_submitting=True, name="99_sessionsExtractor") sessionsExtractorNode.inputs.subject_session_dictionary = subjects_sessions_dictionary baselineOptionalDG = pe.MapNode(nio.DataGrabber( infields=['subject', 'session'], outfields=['t2_average', 'pd_average', 'fl_average'], run_without_submitting=True), run_without_submitting=True, iterfield=['session'], name='BaselineOptional_DG') baselineOptionalDG.inputs.base_directory = experiment['previousresult'] baselineOptionalDG.inputs.sort_filelist = True baselineOptionalDG.inputs.raise_on_empty = False baselineOptionalDG.inputs.template = '*' baselineOptionalDG.inputs.field_template = { 't2_average': '*/%s/%s/TissueClassify/t2_average_BRAINSABC.nii.gz', 'pd_average': '*/%s/%s/TissueClassify/pd_average_BRAINSABC.nii.gz', 'fl_average': '*/%s/%s/TissueClassify/fl_average_BRAINSABC.nii.gz' } baselineOptionalDG.inputs.template_args = { 't2_average': [['subject', 'session']], 'pd_average': [['subject', 'session']], 'fl_average': [['subject', 'session']] } baselineRequiredDG = pe.MapNode(nio.DataGrabber( infields=['subject', 'session'], outfields=[ 't1_average', 'brainMaskLabels', 'posteriorImages', 'passive_intensities', 'passive_masks', 'BCD_ACPC_Landmarks_fcsv' ], run_without_submitting=True), run_without_submitting=True, iterfield=['session'], name='Baseline_DG') baselineRequiredDG.inputs.base_directory = experiment['previousresult'] baselineRequiredDG.inputs.sort_filelist = True baselineRequiredDG.inputs.raise_on_empty = True baselineRequiredDG.inputs.template = '*' posterior_files = [ 'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS', 'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM' ] passive_intensities_files = [ 'rho.nii.gz', 'phi.nii.gz', 'theta.nii.gz', 'l_thalamus_ProbabilityMap.nii.gz', 'r_accumben_ProbabilityMap.nii.gz', 'l_globus_ProbabilityMap.nii.gz', 'l_accumben_ProbabilityMap.nii.gz', 'l_caudate_ProbabilityMap.nii.gz', 'l_putamen_ProbabilityMap.nii.gz', 'r_thalamus_ProbabilityMap.nii.gz', 'r_putamen_ProbabilityMap.nii.gz', 'r_caudate_ProbabilityMap.nii.gz', 'r_hippocampus_ProbabilityMap.nii.gz', 'r_globus_ProbabilityMap.nii.gz', 'l_hippocampus_ProbabilityMap.nii.gz' ] passive_mask_files = [ 'template_WMPM2_labels.nii.gz', 'hncma_atlas.nii.gz', 'template_nac_labels.nii.gz', 'template_leftHemisphere.nii.gz', 'template_rightHemisphere.nii.gz', 'template_ventricles.nii.gz', 'template_headregion.nii.gz' ] baselineRequiredDG.inputs.field_template = { 't1_average': '*/%s/%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'brainMaskLabels': '*/%s/%s/TissueClassify/complete_brainlabels_seg.nii.gz', 'BCD_ACPC_Landmarks_fcsv': '*/%s/%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', 'posteriorImages': '*/%s/%s/TissueClassify/POSTERIOR_%s.nii.gz', 'passive_intensities': '*/%s/%s/WarpedAtlas2Subject/%s', 'passive_masks': '*/%s/%s/WarpedAtlas2Subject/%s', } baselineRequiredDG.inputs.template_args = { 't1_average': [['subject', 'session']], 'brainMaskLabels': [['subject', 'session']], 'BCD_ACPC_Landmarks_fcsv': [['subject', 'session']], 'posteriorImages': [['subject', 'session', posterior_files]], 'passive_intensities': [['subject', 'session', passive_intensities_files]], 'passive_masks': [['subject', 'session', passive_mask_files]] } MergeByExtendListElementsNode = pe.Node( Function(function=MergeByExtendListElements, input_names=[ 't1s', 't2s', 'pds', 'fls', 'labels', 'posteriors', 'passive_intensities', 'passive_masks' ], output_names=[ 'ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping' ]), run_without_submitting=True, name="99_MergeByExtendListElements") template.connect([ (subjectNode, baselineRequiredDG, [('subject', 'subject')]), (subjectNode, baselineOptionalDG, [('subject', 'subject')]), (subjectNode, sessionsExtractorNode, [('subject', 'subject')]), (sessionsExtractorNode, baselineRequiredDG, [('sessions', 'session')]), (sessionsExtractorNode, baselineOptionalDG, [('sessions', 'session')]), (baselineRequiredDG, MergeByExtendListElementsNode, [('t1_average', 't1s'), ('brainMaskLabels', 'labels'), (('posteriorImages', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'posteriors')]), (baselineOptionalDG, MergeByExtendListElementsNode, [('t2_average', 't2s'), ('pd_average', 'pds'), ('fl_average', 'fls')]), (baselineRequiredDG, MergeByExtendListElementsNode, [(('passive_intensities', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_intensities')]), (baselineRequiredDG, MergeByExtendListElementsNode, [(('passive_masks', ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_masks')]) ]) myInitAvgWF = pe.Node( interface=ants.AverageImages(), name='Atlas_antsSimpleAverage') # was 'Phase1_antsSimpleAverage' myInitAvgWF.inputs.dimension = 3 myInitAvgWF.inputs.normalize = True myInitAvgWF.inputs.num_threads = -1 template.connect(baselineRequiredDG, 't1_average', myInitAvgWF, "images") #################################################################################################### # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE' # if numSessions == 1: # TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE' #################################################################################################### CLUSTER_QUEUE = cluster['queue'] CLUSTER_QUEUE_LONG = cluster['long_q'] buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF( 'iteration01', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2') buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF( 'Iteration02', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) CreateAtlasXMLAndCleanedDeformedAveragesNode = pe.Node( interface=Function( function=CreateAtlasXMLAndCleanedDeformedAverages, input_names=[ 't1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition' ], output_names=['outAtlasFullPath', 'clean_deformed_list']), # This is a lot of work, so submit it run_without_submitting=True, run_without_submitting= True, # HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! name='99_CreateAtlasXMLAndCleanedDeformedAverages') if subj_pipeline_options['plugin_name'].startswith( 'SGE' ): # for some nodes, the qsub call needs to be modified on the cluster CreateAtlasXMLAndCleanedDeformedAveragesNode.plugin_args = { 'template': subj_pipeline_options['plugin_args']['template'], 'qsub_args': modify_qsub_args(cluster['queue'], 1, 1, 1), 'overwrite': True } for bt in [buildTemplateIteration1, buildTemplateIteration2]: BeginANTS = bt.get_node("BeginANTS") BeginANTS.plugin_args = { 'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 7, 4, 16) } wimtdeformed = bt.get_node("wimtdeformed") wimtdeformed.plugin_args = { 'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 2) } #AvgAffineTransform = bt.get_node("AvgAffineTransform") #AvgAffineTransform.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, # 'qsub_args': modify_qsub_args(cluster['queue'], 2, 1, 1)} wimtPassivedeformed = bt.get_node("wimtPassivedeformed") wimtPassivedeformed.plugin_args = { 'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 4) } # Running off previous baseline experiment NACCommonAtlas = MakeAtlasNode( experiment['atlascache'], 'NACCommonAtlas_{0}'.format('subject'), ['S_BRAINSABCSupport'] ) ## HACK : replace 'subject' with subject id once this is a loop rather than an iterable. template.connect([ (myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (subjectNode, CreateAtlasXMLAndCleanedDeformedAveragesNode, [(('subject', xml_filename), 'outDefinition')]), (NACCommonAtlas, CreateAtlasXMLAndCleanedDeformedAveragesNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]), (buildTemplateIteration2, CreateAtlasXMLAndCleanedDeformedAveragesNode, [ ('outputspec.template', 't1_image'), ('outputspec.passive_deformed_templates', 'deformed_list') ]), ]) ## Genearate an average lmks file. myAverageLmk = pe.Node(interface=GenerateAverageLmkFile(), name="myAverageLmk") myAverageLmk.inputs.outputLandmarkFile = "AVG_LMKS.fcsv" template.connect(baselineRequiredDG, 'BCD_ACPC_Landmarks_fcsv', myAverageLmk, 'inputLandmarkFiles') # Create DataSinks SubjectAtlas_DataSink = pe.Node(nio.DataSink(), name="Subject_DS") SubjectAtlas_DataSink.overwrite = subj_pipeline_options['ds_overwrite'] SubjectAtlas_DataSink.inputs.base_directory = experiment['resultdir'] template.connect([ (subjectNode, SubjectAtlas_DataSink, [('subject', 'container')]), (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('outAtlasFullPath', 'Atlas.@definitions') ]), (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('clean_deformed_list', 'Atlas.@passive_deformed_templates')]), (subjectNode, SubjectAtlas_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]), (buildTemplateIteration2, SubjectAtlas_DataSink, [('outputspec.template', 'Atlas.@template')]), (myAverageLmk, SubjectAtlas_DataSink, [('outputLandmarkFile', 'Atlas.@outputLandmarkFile')]), ]) dotfilename = argv['--dotfilename'] if dotfilename is not None: print("WARNING: Printing workflow, but not running pipeline") print_workflow(template, plugin=subj_pipeline_options['plugin_name'], dotfilename=dotfilename) else: run_workflow(template, plugin=subj_pipeline_options['plugin_name'], plugin_args=subj_pipeline_options['plugin_args'])
def main(args): subjects, master_config = args import os import sys import traceback # Set universal pipeline options from nipype import config config.update_config(master_config) assert config.get('execution', 'plugin') == master_config['execution']['plugin'] import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces.utility import IdentityInterface, Function import nipype.interfaces.ants as ants from template import MergeByExtendListElements, xml_filename from PipeLineFunctionHelpers import mapPosteriorList from atlasNode import GetAtlasNode, MakeNewAtlasTemplate from utilities.misc import GenerateSubjectOutputPattern as outputPattern from utilities.distributed import modify_qsub_args template = pe.Workflow(name='SubjectAtlas_Template') template.base_dir = master_config['logging']['log_directory'] BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas') inputspec = pe.Node(interface=IdentityInterface(fields=['subject']), name='inputspec') inputspec.iterables = ('subject', subjects) baselineDG = pe.Node(nio.DataGrabber(infields=['subject'], outfields=[ 't1_average', 't2_average', 'pd_average', 'fl_average', 'outputLabels', 'posteriorImages' ]), name='Baseline_DG') baselineDG.inputs.base_directory = master_config['previousresult'] baselineDG.inputs.sort_filelist = True baselineDG.inputs.raise_on_empty = False baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz' baselineDG.inputs.template_args['t1_average'] = [[ 'subject', 't1_average_BRAINSABC' ]] baselineDG.inputs.template_args['t2_average'] = [[ 'subject', 't2_average_BRAINSABC' ]] baselineDG.inputs.template_args['pd_average'] = [[ 'subject', 'pd_average_BRAINSABC' ]] baselineDG.inputs.template_args['fl_average'] = [[ 'subject', 'fl_average_BRAINSABC' ]] baselineDG.inputs.template_args['outputLabels'] = [[ 'subject', 'brain_label_seg' ]] baselineDG.inputs.field_template = { 'posteriorImages': '*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz' } posterior_files = [ 'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS', 'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM' ] baselineDG.inputs.template_args['posteriorImages'] = [[ 'subject', posterior_files ]] MergeByExtendListElementsNode = pe.Node( Function( function=MergeByExtendListElements, input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors'], output_names=[ 'ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping' ]), run_without_submitting=True, name="99_MergeByExtendListElements") from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc template.connect([(inputspec, baselineDG, [('subject', 'subject')]), (baselineDG, MergeByExtendListElementsNode, [('t1_average', 't1s'), ('t2_average', 't2s'), ('pd_average', 'pds'), ('fl_average', 'fls'), ('outputLabels', 'labels'), (('posteriorImages', wrapfunc), 'posteriors')])]) myInitAvgWF = pe.Node( interface=ants.AverageImages(), name='Atlas_antsSimpleAverage') # was 'Phase1_antsSimpleAverage' myInitAvgWF.inputs.dimension = 3 myInitAvgWF.inputs.normalize = True template.connect(baselineDG, 't1_average', myInitAvgWF, "images") #################################################################################################### # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE' # if numSessions == 1: # TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE' #################################################################################################### from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF buildTemplateIteration1 = registrationWF('iteration01') # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2') buildTemplateIteration2 = registrationWF('Iteration02') MakeNewAtlasTemplateNode = pe.Node( interface=Function( function=MakeNewAtlasTemplate, input_names=[ 't1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition' ], output_names=['outAtlasFullPath', 'clean_deformed_list']), # This is a lot of work, so submit it run_without_submitting=True, run_without_submitting= True, # HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! name='99_MakeNewAtlasTemplate') if master_config['execution'][ 'plugin'] == 'SGE': # for some nodes, the qsub call needs to be modified on the cluster MakeNewAtlasTemplateNode.plugin_args = { 'template': master_config['plugin_args']['template'], 'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, 1), 'overwrite': True } for bt in [buildTemplateIteration1, buildTemplateIteration2]: ################################################## # *** Hans, is this TODO already addressed? *** # # ----> # TODO: Change these parameters <---- # ################################################## BeginANTS = bt.get_node("BeginANTS") BeginANTS.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '9000M', 4, hard=False) } wimtdeformed = bt.get_node("wimtdeformed") wimtdeformed.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2) } AvgAffineTransform = bt.get_node("AvgAffineTransform") AvgAffineTransform.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1) } wimtPassivedeformed = bt.get_node("wimtPassivedeformed") wimtPassivedeformed.plugin_args = { 'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2) } template.connect([ (myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]), (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'), ('registrationImageTypes', 'inputspec.registrationImageTypes'), ('interpolationMapping', 'inputspec.interpolationMapping')]), (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename), 'outDefinition')]), (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]), (buildTemplateIteration2, MakeNewAtlasTemplateNode, [('outputspec.template', 't1_image'), ('outputspec.passive_deformed_templates', 'deformed_list')]), ]) # Create DataSinks Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS") Atlas_DataSink.overwrite = master_config['ds_overwrite'] Atlas_DataSink.inputs.base_directory = master_config['resultdir'] Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS") Subject_DataSink.overwrite = master_config['ds_overwrite'] Subject_DataSink.inputs.base_directory = master_config['resultdir'] template.connect([ (inputspec, Atlas_DataSink, [('subject', 'container')]), (buildTemplateIteration1, Atlas_DataSink, [('outputspec.template', 'Atlas.iteration1')]), # Unnecessary (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath', 'Atlas.definitions')]), (BAtlas, Atlas_DataSink, [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'), ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'), ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'), ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]), (inputspec, Subject_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]), (buildTemplateIteration2, Subject_DataSink, [('outputspec.template', 'ANTSTemplate.@template')]), (MakeNewAtlasTemplateNode, Subject_DataSink, [ ('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates') ]), ]) from utils import run_workflow, print_workflow if False: print_workflow(template, plugin=master_config['execution']['plugin'], dotfilename='template') return run_workflow(template, plugin=master_config['execution']['plugin'], plugin_args=master_config['plugin_args'])
def CreateJointFusionWorkflow(WFname, onlyT1, master_config, runFixFusionLabelMap=True): """ This function... :param WFname: :param onlyT1: :param master_config: :param runFixFusionLabelMap: True :return: JointFusionWF """ from nipype.interfaces import ants if onlyT1: n_modality = 1 else: n_modality = 2 CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] JointFusionWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', # Desired image to create label map for 'subj_t2_image', # Desired image to create label map for 'subj_lmks', # The landmarks corresponding to t1_image 'subj_fixed_head_labels', # The fixed head labels from BABC 'subj_posteriors', # The BABC posteriors 'subj_left_hemisphere', # The warped left hemisphere mask 'atlasWeightFilename', # The static weights file name 'labelBaseFilename' # Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['JointFusion_HDAtlas20_2015_label', 'JointFusion_HDAtlas20_2015_CSFVBInjected_label', 'JointFusion_HDAtlas20_2015_fs_standard_label', 'JointFusion_HDAtlas20_2015_lobe_label', 'JointFusion_extended_snapshot', 'JointFusion_HDAtlas20_2015_dustCleaned_label', 'JointFusion_volumes_csv', 'JointFusion_volumes_json', 'JointFusion_lobe_volumes_csv', 'JointFusion_lobe_volumes_json']), run_without_submitting=True, name='outputspec') from collections import OrderedDict # Need OrderedDict internally to ensure consistent ordering BLICreator = OrderedDict() A2SantsRegistrationPreJointFusion_SyN = OrderedDict() movingROIAuto = OrderedDict() labelMapResample = OrderedDict() NewlabelMapResample = OrderedDict() jointFusion_atlas_mergeindex = 0 merge_input_offset = 1 # Merge nodes are indexed from 1, not zero! """ multimodal ants registration if t2 exists """ sessionMakeMultimodalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeMultimodalInput") sessionMakeMultimodalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeMultimodalInput, 'inFN1') """ T2 resample to T1 average image :: BRAINSABC changed its behavior to retain image's original spacing & origin :: Since antsJointFusion only works for the identical origin images for targets, :: Resampling is placed at this stage """ subjectT2Resample = pe.Node(interface=BRAINSResample(), name="BRAINSResample_T2_forAntsJointFusion") if not onlyT1: subjectT2Resample.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} subjectT2Resample.inputs.pixelType = 'short' subjectT2Resample.inputs.interpolationMode = 'Linear' subjectT2Resample.inputs.outputVolume = "t2_resampled_in_t1.nii.gz" # subjectT2Resample.inputs.warpTransform= "Identity" # Default is "Identity" JointFusionWF.connect(inputsSpec, 'subj_t1_image', subjectT2Resample, 'referenceVolume') JointFusionWF.connect(inputsSpec, 'subj_t2_image', subjectT2Resample, 'inputVolume') JointFusionWF.connect(subjectT2Resample, 'outputVolume', sessionMakeMultimodalInput, 'inFN2') else: pass # print('jointFusion_atlas_db_base') print("master_config") print(master_config) print("master_config['jointfusion_atlas_db_base']") print((master_config['jointfusion_atlas_db_base'])) jointFusionAtlasDict = readMalfAtlasDbBase(master_config['jointfusion_atlas_db_base']) number_of_atlas_sources = len(jointFusionAtlasDict) jointFusionAtlases = OrderedDict() atlasMakeMultimodalInput = OrderedDict() t2Resample = OrderedDict() warpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources), name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources), name="fswmLblMergeAtlas") # "HACK NOT to use T2 for JointFusion only" # warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources*n_modality),name="MergeAtlases") warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources * 1), name="MergeAtlases") ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 10 fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" JointFusionWF.connect(inputsSpec, 'subj_t1_image', fixedROIAuto, 'inputVolume') for jointFusion_atlas_subject in list(jointFusionAtlasDict.keys()): ## Need DataGrabber Here For the Atlas jointFusionAtlases[jointFusion_atlas_subject] = pe.Node(interface=IdentityInterface( fields=['t1', 't2', 'label', 'lmks', 'registration_mask']), name='jointFusionAtlasInput' + jointFusion_atlas_subject) jointFusionAtlases[jointFusion_atlas_subject].inputs.t1 = jointFusionAtlasDict[jointFusion_atlas_subject]['t1'] jointFusionAtlases[jointFusion_atlas_subject].inputs.t2 = jointFusionAtlasDict[jointFusion_atlas_subject]['t2'] jointFusionAtlases[jointFusion_atlas_subject].inputs.label = jointFusionAtlasDict[jointFusion_atlas_subject][ 'label'] jointFusionAtlases[jointFusion_atlas_subject].inputs.lmks = jointFusionAtlasDict[jointFusion_atlas_subject][ 'lmks'] jointFusionAtlases[jointFusion_atlas_subject].inputs.registration_mask = \ jointFusionAtlasDict[jointFusion_atlas_subject]['registration_mask'] ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[jointFusion_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_" + jointFusion_atlas_subject) BLICreator[ jointFusion_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format( jointFusion_atlas_subject) JointFusionWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[jointFusion_atlas_subject], 'inputWeightFilename') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'lmks', BLICreator[jointFusion_atlas_subject], 'inputMovingLandmarkFilename') JointFusionWF.connect(inputsSpec, 'subj_lmks', BLICreator[jointFusion_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For SyN currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreJointFusion_' + jointFusion_atlas_subject A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 4, 2, 16), 'overwrite': True} A2SantsRegistrationPreJointFusion_SyN[ jointFusion_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary if onlyT1: JFregistrationTypeDescription = "FiveStageAntsRegistrationT1Only" else: JFregistrationTypeDescription = "FiveStageAntsRegistrationMultiModal" CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], registrationTypeDescription=JFregistrationTypeDescription, output_transform_prefix=jointFusion_atlas_subject + '_ToSubjectPreJointFusion_SyN', output_warped_image=jointFusion_atlas_subject + '_2subject.nii.gz', output_inverse_warped_image=None, # NO NEED FOR THIS save_state=None, # NO NEED FOR THIS invert_initial_moving_transform=False, initial_moving_transform=None) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'fixed_image_masks') # JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', # A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'fixed_image_masks') # NOTE: Moving image mask can be taken from Atlas directly so that it does not need to be read in # movingROIAuto[jointFusion_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+jointFusion_atlas_subject) # movingROIAuto.inputs.ROIAutoDilateSize=10 # movingROIAuto[jointFusion_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" # JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', movingROIAuto[jointFusion_atlas_subject],'inputVolume') # JointFusionWF.connect(movingROIAuto[jointFusion_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'moving_image_masks') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'registration_mask', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'moving_image_masks') JointFusionWF.connect(BLICreator[jointFusion_atlas_subject], 'outputTransformFilename', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'initial_moving_transform') """ make multimodal input for atlases """ atlasMakeMultimodalInput[jointFusion_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="atlasMakeMultimodalInput" + jointFusion_atlas_subject) atlasMakeMultimodalInput[jointFusion_atlas_subject].inputs.jointFusion = False JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN1') if not onlyT1: JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't2', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN2') else: pass JointFusionWF.connect(sessionMakeMultimodalInput, 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'fixed_image') JointFusionWF.connect(atlasMakeMultimodalInput[jointFusion_atlas_subject], 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'moving_image') "HACK NOT to use T2 for JointFusion" # JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'warped_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality) ) JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'warped_image', warpedAtlasesMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex * 1)) """ Original t2 resampling """ for modality_index in range(1, n_modality): t2Resample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="resampledT2" + jointFusion_atlas_subject) many_cpu_t2Resample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} t2Resample[jointFusion_atlas_subject].plugin_args = many_cpu_t2Resample_options_dictionary t2Resample[jointFusion_atlas_subject].inputs.num_threads = -1 t2Resample[jointFusion_atlas_subject].inputs.dimension = 3 t2Resample[jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + '_t2.nii.gz' t2Resample[jointFusion_atlas_subject].inputs.interpolation = 'BSpline' t2Resample[jointFusion_atlas_subject].inputs.default_value = 0 t2Resample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', t2Resample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', t2Resample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't2', t2Resample[jointFusion_atlas_subject], 'input_image') "HACK NOT to use T2 for JointFusion only" # JointFusionWF.connect(t2Resample[jointFusion_atlas_subject],'output_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality+modality_index) ) """ Original labelmap resampling """ labelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="resampledLabel" + jointFusion_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} labelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[jointFusion_atlas_subject].inputs.num_threads = -1 labelMapResample[jointFusion_atlas_subject].inputs.dimension = 3 labelMapResample[ jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + '_2_subj_lbl.nii.gz' labelMapResample[jointFusion_atlas_subject].inputs.interpolation = 'MultiLabel' labelMapResample[jointFusion_atlas_subject].inputs.default_value = 0 labelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', labelMapResample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', labelMapResample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'label', labelMapResample[jointFusion_atlas_subject], 'input_image') JointFusionWF.connect(labelMapResample[jointFusion_atlas_subject], 'output_image', warpedAtlasLblMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex)) ### New labelmap resampling NewlabelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="FSWM_WLABEL_" + jointFusion_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} NewlabelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[jointFusion_atlas_subject].inputs.num_threads = -1 NewlabelMapResample[jointFusion_atlas_subject].inputs.dimension = 3 NewlabelMapResample[ jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + 'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[jointFusion_atlas_subject].inputs.interpolation = 'MultiLabel' NewlabelMapResample[jointFusion_atlas_subject].inputs.default_value = 0 NewlabelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', NewlabelMapResample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', NewlabelMapResample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'label', NewlabelMapResample[jointFusion_atlas_subject], 'input_image') JointFusionWF.connect(NewlabelMapResample[jointFusion_atlas_subject], 'output_image', NewwarpedAtlasLblMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex)) jointFusion_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from .FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from .FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.AntsJointFusion(), name="AntsJointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 10, 8, 16), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.num_threads = -1 jointFusion.inputs.dimension = 3 jointFusion.inputs.search_radius = [3] # jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.out_label_fusion = 'JointFusion_HDAtlas20_2015_label.nii.gz' # JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', jointFusion, 'mask_image') JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume', jointFusion, 'mask_image') JointFusionWF.connect(warpedAtlasLblMergeNode, 'out', jointFusion, 'atlas_segmentation_image') AdjustMergeListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList', 'n_modality'], output_names=['out']), name="AdjustMergeListNode") "*** HACK JointFusion only uses T1" # AdjustMergeListNode.inputs.n_modality = n_modality AdjustMergeListNode.inputs.n_modality = 1 JointFusionWF.connect(warpedAtlasesMergeNode, 'out', AdjustMergeListNode, 'allList') JointFusionWF.connect(AdjustMergeListNode, 'out', jointFusion, 'atlas_image') AdjustTargetImageListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList', 'n_modality'], output_names=['out']), name="AdjustTargetImageListNode") AdjustTargetImageListNode.inputs.n_modality = n_modality "*** HACK JointFusion only uses T1" """ Once JointFusion works with T2 properly, delete sessionMakeListSingleModalInput and use sessionMakeMultimodalInput instead """ sessionMakeListSingleModalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeListSingleModalInput") sessionMakeListSingleModalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeListSingleModalInput, 'inFN1') JointFusionWF.connect(sessionMakeListSingleModalInput, 'outFNs', jointFusion, 'target_image') JointFusionWF.connect(jointFusion, 'out_label_fusion', outputsSpec, 'JointFusion_HDAtlas20_2015_label') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071, 47), (15072, 47), (15073, 47), (15145, 1011), (15157, 1011), (15161, 1011), (15179, 1012), (15141, 1014), (15151, 1017), (15163, 1018), (15165, 1019), (15143, 1027), (15191, 1028), (15193, 1028), (15185, 1030), (15201, 1030), (15175, 1031), (15195, 1031), (15173, 1035), (15144, 2011), (15156, 2011), (15160, 2011), (15178, 2012), (15140, 2014), (15150, 2017), (15162, 2018), (15164, 2019), (15142, 2027), (15190, 2028), (15192, 2028), (15184, 2030), (15174, 2031), (15194, 2031), (15172, 2035), (15200, 2030)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName', 'OutputFileName', 'RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_fs_standard_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM, 'OutputFileName', outputsSpec, 'JointFusion_HDAtlas20_2015_fs_standard_label') ## JointFusion_SNAPSHOT_WRITER for Segmented result checking: # JointFusion_SNAPSHOT_WRITERNodeName = "JointFusion_ExtendedJointFusion_SNAPSHOT_WRITER" # JointFusion_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=JointFusion_SNAPSHOT_WRITERNodeName) # JointFusion_SNAPSHOT_WRITER.inputs.outputFilename = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.png' # output specification # JointFusion_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] # JointFusion_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] # JointFusionWF.connect(JointFusion_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'JointFusion_extended_snapshot') myLocalDustCleanup = CreateDustCleanupWorkflow("DUST_CLEANUP", onlyT1, master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', myLocalDustCleanup, 'inputspec.subj_t1_image') if not onlyT1: JointFusionWF.connect(subjectT2Resample, 'outputVolume', myLocalDustCleanup, 'inputspec.subj_t2_image') if runFixFusionLabelMap: ## post processing of jointfusion injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN', 'FixedHeadFN', 'posteriorListOfTuples', 'LeftHemisphereFN', 'outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.nii.gz' from collections import OrderedDict # Need OrderedDict internally to ensure consistent ordering FREESURFER_DICT = OrderedDict({'BRAINSTEM': 16, 'RH_CSF': 24, 'LH_CSF': 24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11, 12, 13, 9, 17, 26, 50, 51, 52, 48, 53, 58] }) injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT JointFusionWF.connect(jointFusion, 'out_label_fusion', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') JointFusionWF.connect(inputsSpec, 'subj_posteriors', injectSurfaceCSFandVBIntoLabelMap, 'posteriorListOfTuples') JointFusionWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', outputsSpec, 'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM, 'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (injectSurfaceCSFandVBIntoLabelMap, JointFusion_SNAPSHOT_WRITER, # [('fixedFusionLabelFN', 'inputBinaryVolumes')]) # ]) else: JointFusionWF.connect(jointFusion, 'output_label_image', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(jointFusion, 'output_label_image', outputsSpec, 'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM, 'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (jointFusion, JointFusion_SNAPSHOT_WRITER, # [('output_label_image', 'inputBinaryVolumes')]) # ]) """ Compute label volumes """ computeLabelVolumes = CreateVolumeMeasureWorkflow("LabelVolume", master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', computeLabelVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', computeLabelVolumes, 'inputspec.subj_label_image') JointFusionWF.connect(computeLabelVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_volumes_csv') JointFusionWF.connect(computeLabelVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_volumes_json') ## Lobe Pacellation by recoding if master_config['relabel2lobes_filename'] != None: # print("Generate relabeled version based on {0}".format(master_config['relabel2lobes_filename'])) RECODE_LABELS_2_LobePacellation = readRecodingList(master_config['relabel2lobes_filename']) RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName', 'OutputFileName', 'RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobePacellation RecordToFSLobes.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_lobe_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM, 'OutputFileName', RecordToFSLobes, 'InputFileName') JointFusionWF.connect(RecordToFSLobes, 'OutputFileName', outputsSpec, 'JointFusion_HDAtlas20_2015_lobe_label') """ Compute lobe volumes """ computeLobeVolumes = CreateVolumeMeasureWorkflow("LobeVolume", master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', computeLobeVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect(RecordToFSLobes, 'OutputFileName', computeLobeVolumes, 'inputspec.subj_label_image') JointFusionWF.connect(computeLobeVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_lobe_volumes_csv') JointFusionWF.connect(computeLobeVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_lobe_volumes_json') return JointFusionWF
def segmentation(projectid, subjectid, sessionid, master_config, onlyT1=True, pipeline_name=''): import os.path import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces import ants from nipype.interfaces.utility import IdentityInterface, Function, Merge # Set universal pipeline options from nipype import config config.update_config(master_config) from PipeLineFunctionHelpers import ClipT1ImageWithBrainMask from .WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow from utilities.distributed import modify_qsub_args from nipype.interfaces.semtools import BRAINSSnapShotWriter # CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] baw200 = pe.Workflow(name=pipeline_name) # HACK: print for debugging for key, itme in list(master_config.items()): print(("-" * 30)) print((key, ":", itme)) print(("-" * 30)) # END HACK inputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'template_t1', 'hncma_atlas', 'LMIatlasToSubject_tx', 'inputLabels', 'inputHeadLabels', 'posteriorImages', 'UpdatedPosteriorsList', 'atlasToSubjectRegistrationState', 'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap', 'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap', 'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap', 'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap', 'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap', 'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap', 'r_globus_ProbabilityMap', 'trainModelFile_txtD0060NT0060_gz', ]), run_without_submitting=True, name='inputspec') # outputsSpec = pe.Node(interface=IdentityInterface(fields=[...]), # run_without_submitting=True, name='outputspec') currentClipT1ImageWithBrainMaskName = 'ClipT1ImageWithBrainMask_' + str(subjectid) + "_" + str(sessionid) ClipT1ImageWithBrainMaskNode = pe.Node(interface=Function(function=ClipT1ImageWithBrainMask, input_names=['t1_image', 'brain_labels', 'clipped_file_name'], output_names=['clipped_file']), name=currentClipT1ImageWithBrainMaskName) ClipT1ImageWithBrainMaskNode.inputs.clipped_file_name = 'clipped_from_BABC_labels_t1.nii.gz' baw200.connect([(inputsSpec, ClipT1ImageWithBrainMaskNode, [('t1_average', 't1_image'), ('inputLabels', 'brain_labels')])]) currentA2SantsRegistrationPostABCSyN = 'A2SantsRegistrationPostABCSyN_' + str(subjectid) + "_" + str(sessionid) ## TODO: It would be great to update the BRAINSABC atlasToSubjectTransform at this point, but ## That requires more testing, and fixes to ANTS to properly collapse transforms. ## For now we are simply creating a dummy node to pass through A2SantsRegistrationPostABCSyN = pe.Node(interface=ants.Registration(), name=currentA2SantsRegistrationPostABCSyN) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), 'overwrite': True} A2SantsRegistrationPostABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPostABCSyN, registrationTypeDescription="A2SantsRegistrationPostABCSyN", output_transform_prefix='AtlasToSubjectPostBABC_SyN', output_warped_image='atlas2subjectPostBABC.nii.gz', output_inverse_warped_image='subject2atlasPostBABC.nii.gz', save_state='SavedInternalSyNStatePostBABC.h5', invert_initial_moving_transform=False, initial_moving_transform=None) ## TODO: Try multi-modal registration here baw200.connect([(inputsSpec, A2SantsRegistrationPostABCSyN, [('atlasToSubjectRegistrationState', 'restore_state'), ('t1_average', 'fixed_image'), ('template_t1', 'moving_image')]) ]) myLocalSegWF = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid, master_config['queue'], master_config['long_q'], "Segmentation", onlyT1) MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str(sessionid) MergeStage2AverageImages = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName) baw200.connect([(inputsSpec, myLocalSegWF, [('t1_average', 'inputspec.T1Volume'), ('template_t1', 'inputspec.template_t1'), ('posteriorImages', "inputspec.posteriorDictionary"), ('inputLabels', 'inputspec.RegistrationROI'), ]), (inputsSpec, MergeStage2AverageImages, [('t1_average', 'in1')]), (A2SantsRegistrationPostABCSyN, myLocalSegWF, [('composite_transform', 'inputspec.atlasToSubjectTransform')]) ]) baw200.connect([(inputsSpec, myLocalSegWF, [ ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'), ('theta', 'inputspec.theta'), ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'), ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'), ('l_hippocampus_ProbabilityMap', 'inputspec.l_hippocampus_ProbabilityMap'), ('r_hippocampus_ProbabilityMap', 'inputspec.r_hippocampus_ProbabilityMap'), ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'), ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'), ('l_thalamus_ProbabilityMap', 'inputspec.l_thalamus_ProbabilityMap'), ('r_thalamus_ProbabilityMap', 'inputspec.r_thalamus_ProbabilityMap'), ('l_accumben_ProbabilityMap', 'inputspec.l_accumben_ProbabilityMap'), ('r_accumben_ProbabilityMap', 'inputspec.r_accumben_ProbabilityMap'), ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'), ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap'), ('trainModelFile_txtD0060NT0060_gz', 'inputspec.trainModelFile_txtD0060NT0060_gz') ] )] ) if not onlyT1: baw200.connect([(inputsSpec, myLocalSegWF, [('t2_average', 'inputspec.T2Volume')]), (inputsSpec, MergeStage2AverageImages, [('t2_average', 'in2')])]) file_count = 15 # Count of files to merge into MergeSessionSubjectToAtlas else: file_count = 14 # Count of files to merge into MergeSessionSubjectToAtlas ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue # baw200.connect([(AccumulateLikeTissuePosteriorsNode, myLocalSegWF, # [(('AccumulatePriorsList', getListIndex, 0), "inputspec.TotalGM")]), # ]) ### Now define where the final organized outputs should go. DataSink = pe.Node(nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid)) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.base_directory = master_config['resultdir'] # DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut') # DataSink.inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) DataSink.inputs.substitutions = [ ('Segmentations', os.path.join(projectid, subjectid, sessionid, 'CleanedDenoisedRFSegmentations')), ('subjectANNLabel_', ''), ('ANNContinuousPrediction', ''), ('subject.nii.gz', '.nii.gz'), ('_seg.nii.gz', '_seg.nii.gz'), ('.nii.gz', '_seg.nii.gz'), ('_seg_seg', '_seg')] baw200.connect([(myLocalSegWF, DataSink, [('outputspec.outputBinaryLeftCaudate', 'Segmentations.@LeftCaudate'), ('outputspec.outputBinaryRightCaudate', 'Segmentations.@RightCaudate'), ('outputspec.outputBinaryLeftHippocampus', 'Segmentations.@LeftHippocampus'), ('outputspec.outputBinaryRightHippocampus', 'Segmentations.@RightHippocampus'), ('outputspec.outputBinaryLeftPutamen', 'Segmentations.@LeftPutamen'), ('outputspec.outputBinaryRightPutamen', 'Segmentations.@RightPutamen'), ('outputspec.outputBinaryLeftThalamus', 'Segmentations.@LeftThalamus'), ('outputspec.outputBinaryRightThalamus', 'Segmentations.@RightThalamus'), ('outputspec.outputBinaryLeftAccumben', 'Segmentations.@LeftAccumben'), ('outputspec.outputBinaryRightAccumben', 'Segmentations.@RightAccumben'), ('outputspec.outputBinaryLeftGlobus', 'Segmentations.@LeftGlobus'), ('outputspec.outputBinaryRightGlobus', 'Segmentations.@RightGlobus'), ('outputspec.outputLabelImageName', 'Segmentations.@LabelImageName'), ('outputspec.outputCSVFileName', 'Segmentations.@CSVFileName')]), # (myLocalSegWF, DataSink, [('outputspec.cleaned_labels', 'Segmentations.@cleaned_labels')]) ]) MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str(sessionid) MergeStage2BinaryVolumes = pe.Node(interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName) baw200.connect([(myLocalSegWF, MergeStage2BinaryVolumes, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')]) ]) ## SnapShotWriter for Segmented result checking: SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) SnapShotWriter = pe.Node(interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) SnapShotWriter.inputs.outputFilename = 'snapShot' + str(sessionid) + '.png' # output specification SnapShotWriter.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] SnapShotWriter.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] baw200.connect([(MergeStage2AverageImages, SnapShotWriter, [('out', 'inputVolumes')]), (MergeStage2BinaryVolumes, SnapShotWriter, [('out', 'inputBinaryVolumes')]), (SnapShotWriter, DataSink, [('outputFilename', 'Segmentations.@outputSnapShot')]) ]) # currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str(subjectid) + "_" + str(sessionid) # AntsLabelWarpToSubject = pe.Node(interface=ants.ApplyTransforms(), name=currentAntsLabelWarpToSubject) # # AntsLabelWarpToSubject.inputs.num_threads = -1 # AntsLabelWarpToSubject.inputs.dimension = 3 # AntsLabelWarpToSubject.inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' # AntsLabelWarpToSubject.inputs.interpolation = "MultiLabel" # # baw200.connect([(A2SantsRegistrationPostABCSyN, AntsLabelWarpToSubject, [('composite_transform', 'transforms')]), # (inputsSpec, AntsLabelWarpToSubject, [('t1_average', 'reference_image'), # ('hncma_atlas', 'input_image')]) # ]) # ##### # ### Now define where the final organized outputs should go. # AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str(sessionid) # AntsLabelWarpedToSubject_DS = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) # AntsLabelWarpedToSubject_DS.overwrite = master_config['ds_overwrite'] # AntsLabelWarpedToSubject_DS.inputs.base_directory = master_config['resultdir'] # AntsLabelWarpedToSubject_DS.inputs.substitutions = [('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject'))] # # baw200.connect([(AntsLabelWarpToSubject, AntsLabelWarpedToSubject_DS, [('output_image', 'AntsLabelWarpedToSubject')])]) MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str(sessionid) MergeSessionSubjectToAtlas = pe.Node(interface=Merge(file_count), run_without_submitting=True, name=MergeSessionSubjectToAtlasName) baw200.connect([(myLocalSegWF, MergeSessionSubjectToAtlas, [('outputspec.outputBinaryLeftAccumben', 'in1'), ('outputspec.outputBinaryLeftCaudate', 'in2'), ('outputspec.outputBinaryLeftPutamen', 'in3'), ('outputspec.outputBinaryLeftGlobus', 'in4'), ('outputspec.outputBinaryLeftThalamus', 'in5'), ('outputspec.outputBinaryLeftHippocampus', 'in6'), ('outputspec.outputBinaryRightAccumben', 'in7'), ('outputspec.outputBinaryRightCaudate', 'in8'), ('outputspec.outputBinaryRightPutamen', 'in9'), ('outputspec.outputBinaryRightGlobus', 'in10'), ('outputspec.outputBinaryRightThalamus', 'in11'), ('outputspec.outputBinaryRightHippocampus', 'in12')]), # (FixWMPartitioningNode, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('UpdatedPosteriorsList', 'in13')]), (inputsSpec, MergeSessionSubjectToAtlas, [('t1_average', 'in14')]) ]) if not onlyT1: assert file_count == 15 baw200.connect([(inputsSpec, MergeSessionSubjectToAtlas, [('t2_average', 'in15')])]) LinearSubjectToAtlasANTsApplyTransformsName = 'LinearSubjectToAtlasANTsApplyTransforms_' + str(sessionid) LinearSubjectToAtlasANTsApplyTransforms = pe.MapNode(interface=ants.ApplyTransforms(), iterfield=['input_image'], name=LinearSubjectToAtlasANTsApplyTransformsName) LinearSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 LinearSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'Linear' baw200.connect( [(A2SantsRegistrationPostABCSyN, LinearSubjectToAtlasANTsApplyTransforms, [('inverse_composite_transform', 'transforms')]), (inputsSpec, LinearSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeSessionSubjectToAtlas, LinearSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) MergeMultiLabelSessionSubjectToAtlasName = "99_MergeMultiLabelSessionSubjectToAtlas_" + str(sessionid) MergeMultiLabelSessionSubjectToAtlas = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName) baw200.connect([(inputsSpec, MergeMultiLabelSessionSubjectToAtlas, [('inputLabels', 'in1'), ('inputHeadLabels', 'in2')]) ]) ### This is taking this sessions RF label map back into NAC atlas space. # { MultiLabelSubjectToAtlasANTsApplyTransformsName = 'MultiLabelSubjectToAtlasANTsApplyTransforms_' + str( sessionid) + '_map' MultiLabelSubjectToAtlasANTsApplyTransforms = pe.MapNode(interface=ants.ApplyTransforms(), iterfield=['input_image'], name=MultiLabelSubjectToAtlasANTsApplyTransformsName) MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.num_threads = -1 MultiLabelSubjectToAtlasANTsApplyTransforms.inputs.interpolation = 'MultiLabel' baw200.connect([(A2SantsRegistrationPostABCSyN, MultiLabelSubjectToAtlasANTsApplyTransforms, [('inverse_composite_transform', 'transforms')]), (inputsSpec, MultiLabelSubjectToAtlasANTsApplyTransforms, [('template_t1', 'reference_image')]), (MergeMultiLabelSessionSubjectToAtlas, MultiLabelSubjectToAtlasANTsApplyTransforms, [('out', 'input_image')]) ]) # } ### Now we must take the sessions to THIS SUBJECTS personalized atlas. # { # } ### Now define where the final organized outputs should go. Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) Subj2Atlas_DS = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) Subj2Atlas_DS.overwrite = master_config['ds_overwrite'] Subj2Atlas_DS.inputs.base_directory = master_config['resultdir'] Subj2Atlas_DS.inputs.regexp_substitutions = [(r'_LinearSubjectToAtlasANTsApplyTransforms_[^/]*', r'' + sessionid + '/')] baw200.connect([(LinearSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@linear_output_images')])]) Subj2AtlasTransforms_DSName = "SubjectToAtlasTransforms_DS_" + str(sessionid) Subj2AtlasTransforms_DS = pe.Node(nio.DataSink(), name=Subj2AtlasTransforms_DSName) Subj2AtlasTransforms_DS.overwrite = master_config['ds_overwrite'] Subj2AtlasTransforms_DS.inputs.base_directory = master_config['resultdir'] Subj2AtlasTransforms_DS.inputs.regexp_substitutions = [(r'SubjectToAtlasWarped', r'SubjectToAtlasWarped/' + sessionid + '/')] baw200.connect([(A2SantsRegistrationPostABCSyN, Subj2AtlasTransforms_DS, [('composite_transform', 'SubjectToAtlasWarped.@composite_transform'), ('inverse_composite_transform', 'SubjectToAtlasWarped.@inverse_composite_transform')])]) # baw200.connect([(MultiLabelSubjectToAtlasANTsApplyTransforms, Subj2Atlas_DS, [('output_image', 'SubjectToAtlasWarped.@multilabel_output_images')])]) if master_config['plugin_name'].startswith( 'SGE'): # for some nodes, the qsub call needs to be modified on the cluster A2SantsRegistrationPostABCSyN.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], 8, 8, 24)} SnapShotWriter.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1)} LinearSubjectToAtlasANTsApplyTransforms.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1)} MultiLabelSubjectToAtlasANTsApplyTransforms.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True, 'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1)} return baw200
def CreateMALFWorkflow(WFname, master_config,good_subjects,BASE_DATA_GRABBER_DIR): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'atlasWeightFilename' #The static weights file name ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_neuro2012_labelmap']), run_without_submitting=True, name='outputspec') BLICreator = dict() MALF_DG = dict() A2SantsRegistrationPreABCRigid =dict() A2SantsRegistrationPreABCSyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(good_subjects)),name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="LblMergeAtlas") malf_atlas_mergeindex = 1; for malf_atlas_subject in good_subjects: ## Need DataGrabber Here For the Atlas MALF_DG[malf_atlas_subject] = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['malf_atlas_t1', 'malf_atlas_lbls', 'malf_atlas_lmks' ]), run_without_submitting=True,name='MALF_DG_'+malf_atlas_subject) #MALF_DG[malf_atlas_subject].inputs.base_directory = master_config['previousresult'] MALF_DG[malf_atlas_subject].inputs.base_directory = BASE_DATA_GRABBER_DIR MALF_DG[malf_atlas_subject].inputs.subject = malf_atlas_subject MALF_DG[malf_atlas_subject].inputs.field_template = { 'malf_atlas_t1': '%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'malf_atlas_lbls': '%s/TissueClassify/neuro_lbls.nii.gz', 'malf_atlas_lmks': '%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', } MALF_DG[malf_atlas_subject].inputs.template_args = { 'malf_atlas_t1': [['subject']], 'malf_atlas_lbls': [['subject']], 'malf_atlas_lmks': [['subject']], } MALF_DG[malf_atlas_subject].inputs.template = '*' MALF_DG[malf_atlas_subject].inputs.sort_filelist = True MALF_DG[malf_atlas_subject].inputs.raise_on_empty = True ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'Rigid_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCRigid[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,2,1,1), 'overwrite': True} A2SantsRegistrationPreABCRigid[malf_atlas_subject].plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transforms = ["Affine",] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000, 500, 100]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' MALFWF.connect(BLICreator[malf_atlas_subject], 'outputTransformFilename',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCSyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} A2SantsRegistrationPreABCSyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transforms = ["SyN","SyN"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transform_parameters = [[0.1, 3, 0],[0.1, 3, 0] ] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric = ['MI','MI'] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_strategy = [None,None] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_percentage = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.number_of_iterations = [[500, 500, 500, 500 ], [70]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-4] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 3, 2], [1]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 2, 1], [0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from SEMTools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image_mask') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(A2SantsRegistrationPreABCRigid[malf_atlas_subject], ('composite_transform', getListIndexOrNoneIfOutOfRange, 0 ), A2SantsRegistrationPreABCSyN[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image') labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="WLABEL_"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_atlas_lbls', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject],'warped_image',warpedAtlasT1MergeNode,'in'+str(malf_atlas_mergeindex) ) MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.num_modalities=1 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='fusion_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') MALFWF.connect(jointFusion,'output_label_image',outputsSpec,'MALF_neuro2012_labelmap') return MALFWF
def CreateJointFusionWorkflow(WFname, onlyT1, master_config, runFixFusionLabelMap=True): from nipype.interfaces import ants if onlyT1: n_modality = 1 else: n_modality = 2 CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] JointFusionWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_t2_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_posteriors', #The BABC posteriors 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename', #The static weights file name 'labelBaseFilename' #Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['JointFusion_HDAtlas20_2015_label', 'JointFusion_HDAtlas20_2015_CSFVBInjected_label', 'JointFusion_HDAtlas20_2015_fs_standard_label', 'JointFusion_HDAtlas20_2015_lobe_label', 'JointFusion_extended_snapshot', 'JointFusion_HDAtlas20_2015_dustCleaned_label', 'JointFusion_volumes_csv', 'JointFusion_volumes_json', 'JointFusion_lobe_volumes_csv', 'JointFusion_lobe_volumes_json']), run_without_submitting=True, name='outputspec') BLICreator = dict() A2SantsRegistrationPreJointFusion_SyN = dict() movingROIAuto = dict() labelMapResample = dict() NewlabelMapResample = dict() jointFusion_atlas_mergeindex = 0 merge_input_offset = 1 #Merge nodes are indexed from 1, not zero! """ multimodal ants registration if t2 exists """ sessionMakeMultimodalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeMultimodalInput") sessionMakeMultimodalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeMultimodalInput, 'inFN1') """ T2 resample to T1 average image :: BRAINSABC changed its behavior to retain image's original spacing & origin :: Since antsJointFusion only works for the identical origin images for targets, :: Resampling is placed at this stage """ subjectT2Resample = pe.Node(interface=BRAINSResample(), name="BRAINSResample_T2_forAntsJointFusion") if not onlyT1: subjectT2Resample.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} subjectT2Resample.inputs.pixelType = 'short' subjectT2Resample.inputs.interpolationMode = 'Linear' subjectT2Resample.inputs.outputVolume = "t2_resampled_in_t1.nii.gz" #subjectT2Resample.inputs.warpTransform= "Identity" # Default is "Identity" JointFusionWF.connect(inputsSpec, 'subj_t1_image', subjectT2Resample, 'referenceVolume') JointFusionWF.connect(inputsSpec, 'subj_t2_image', subjectT2Resample, 'inputVolume') JointFusionWF.connect(subjectT2Resample, 'outputVolume', sessionMakeMultimodalInput, 'inFN2') else: pass #print('jointFusion_atlas_db_base') print("master_config") print(master_config) print("master_config['jointfusion_atlas_db_base']") print(master_config['jointfusion_atlas_db_base']) jointFusionAtlasDict = readMalfAtlasDbBase( master_config['jointfusion_atlas_db_base'] ) number_of_atlas_sources = len(jointFusionAtlasDict) jointFusionAtlases = dict() atlasMakeMultimodalInput = dict() t2Resample = dict() warpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources),name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources),name="fswmLblMergeAtlas") # "HACK NOT to use T2 for JointFusion only" #warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources*n_modality),name="MergeAtlases") warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources*1),name="MergeAtlases") ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize=10 fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" JointFusionWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto,'inputVolume') for jointFusion_atlas_subject in list(jointFusionAtlasDict.keys()): ## Need DataGrabber Here For the Atlas jointFusionAtlases[jointFusion_atlas_subject] = pe.Node(interface = IdentityInterface( fields=['t1','t2','label','lmks','regisration_mask']), name='jointFusionAtlasInput'+jointFusion_atlas_subject) jointFusionAtlases[jointFusion_atlas_subject].inputs.t1 = jointFusionAtlasDict[jointFusion_atlas_subject]['t1'] jointFusionAtlases[jointFusion_atlas_subject].inputs.t2 = jointFusionAtlasDict[jointFusion_atlas_subject]['t2'] jointFusionAtlases[jointFusion_atlas_subject].inputs.label = jointFusionAtlasDict[jointFusion_atlas_subject]['label'] jointFusionAtlases[jointFusion_atlas_subject].inputs.lmks = jointFusionAtlasDict[jointFusion_atlas_subject]['lmks'] jointFusionAtlases[jointFusion_atlas_subject].inputs.regisration_mask = jointFusionAtlasDict[jointFusion_atlas_subject]['regisration_mask'] ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[jointFusion_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+jointFusion_atlas_subject) BLICreator[jointFusion_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(jointFusion_atlas_subject) JointFusionWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[jointFusion_atlas_subject], 'inputWeightFilename') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'lmks', BLICreator[jointFusion_atlas_subject], 'inputMovingLandmarkFilename') JointFusionWF.connect(inputsSpec, 'subj_lmks', BLICreator[jointFusion_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For SyN currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreJointFusion_'+jointFusion_atlas_subject A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,16), 'overwrite': True} A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary if onlyT1: JFregistrationTypeDescription="FiveStageAntsRegistrationT1Only" else: JFregistrationTypeDescription="FiveStageAntsRegistrationMultiModal" CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], registrationTypeDescription=JFregistrationTypeDescription, output_transform_prefix=jointFusion_atlas_subject+'_ToSubjectPreJointFusion_SyN', output_warped_image=jointFusion_atlas_subject + '_2subject.nii.gz', output_inverse_warped_image=None, #NO NEED FOR THIS save_state=None, #NO NEED FOR THIS invert_initial_moving_transform=False) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume',A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'fixed_image_mask') # JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', # A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'fixed_image_mask') # NOTE: Moving image mask can be taken from Atlas directly so that it does not need to be read in #movingROIAuto[jointFusion_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+jointFusion_atlas_subject) #movingROIAuto.inputs.ROIAutoDilateSize=10 #movingROIAuto[jointFusion_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" #JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', movingROIAuto[jointFusion_atlas_subject],'inputVolume') #JointFusionWF.connect(movingROIAuto[jointFusion_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'moving_image_mask') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'regisration_mask', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'moving_image_mask') JointFusionWF.connect(BLICreator[jointFusion_atlas_subject],'outputTransformFilename', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'initial_moving_transform') """ make multimodal input for atlases """ atlasMakeMultimodalInput[jointFusion_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2','jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="atlasMakeMultimodalInput"+jointFusion_atlas_subject) atlasMakeMultimodalInput[jointFusion_atlas_subject].inputs.jointFusion = False JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN1') if not onlyT1: JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't2', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN2') else: pass JointFusionWF.connect(sessionMakeMultimodalInput, 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'fixed_image') JointFusionWF.connect(atlasMakeMultimodalInput[jointFusion_atlas_subject], 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'moving_image') "HACK NOT to use T2 for JointFusion" #JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'warped_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality) ) JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'warped_image', warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*1) ) """ Original t2 resampling """ for modality_index in range(1,n_modality): t2Resample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="resampledT2"+jointFusion_atlas_subject) many_cpu_t2Resample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} t2Resample[jointFusion_atlas_subject].plugin_args = many_cpu_t2Resample_options_dictionary t2Resample[jointFusion_atlas_subject].inputs.num_threads=-1 t2Resample[jointFusion_atlas_subject].inputs.dimension=3 t2Resample[jointFusion_atlas_subject].inputs.output_image=jointFusion_atlas_subject+'_t2.nii.gz' t2Resample[jointFusion_atlas_subject].inputs.interpolation='BSpline' t2Resample[jointFusion_atlas_subject].inputs.default_value=0 t2Resample[jointFusion_atlas_subject].inputs.invert_transform_flags=[False] JointFusionWF.connect( A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'composite_transform', t2Resample[jointFusion_atlas_subject],'transforms') JointFusionWF.connect( inputsSpec, 'subj_t1_image', t2Resample[jointFusion_atlas_subject],'reference_image') JointFusionWF.connect( jointFusionAtlases[jointFusion_atlas_subject], 't2', t2Resample[jointFusion_atlas_subject],'input_image') "HACK NOT to use T2 for JointFusion only" #JointFusionWF.connect(t2Resample[jointFusion_atlas_subject],'output_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality+modality_index) ) """ Original labelmap resampling """ labelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="resampledLabel"+jointFusion_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[jointFusion_atlas_subject].inputs.num_threads=-1 labelMapResample[jointFusion_atlas_subject].inputs.dimension=3 labelMapResample[jointFusion_atlas_subject].inputs.output_image=jointFusion_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[jointFusion_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[jointFusion_atlas_subject].inputs.default_value=0 labelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags=[False] JointFusionWF.connect( A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'composite_transform', labelMapResample[jointFusion_atlas_subject],'transforms') JointFusionWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[jointFusion_atlas_subject],'reference_image') JointFusionWF.connect( jointFusionAtlases[jointFusion_atlas_subject], 'label', labelMapResample[jointFusion_atlas_subject],'input_image') JointFusionWF.connect(labelMapResample[jointFusion_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex) ) ### New labelmap resampling NewlabelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="FSWM_WLABEL_"+jointFusion_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} NewlabelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[jointFusion_atlas_subject].inputs.num_threads=-1 NewlabelMapResample[jointFusion_atlas_subject].inputs.dimension=3 NewlabelMapResample[jointFusion_atlas_subject].inputs.output_image=jointFusion_atlas_subject+'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[jointFusion_atlas_subject].inputs.interpolation='MultiLabel' NewlabelMapResample[jointFusion_atlas_subject].inputs.default_value=0 NewlabelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags=[False] JointFusionWF.connect( A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'composite_transform', NewlabelMapResample[jointFusion_atlas_subject],'transforms') JointFusionWF.connect( inputsSpec, 'subj_t1_image', NewlabelMapResample[jointFusion_atlas_subject],'reference_image') JointFusionWF.connect( jointFusionAtlases[jointFusion_atlas_subject], 'label', NewlabelMapResample[jointFusion_atlas_subject],'input_image') JointFusionWF.connect(NewlabelMapResample[jointFusion_atlas_subject],'output_image',NewwarpedAtlasLblMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex) ) jointFusion_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from .FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from .FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.AntsJointFusion(),name="AntsJointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,10,8,16), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.num_threads = -1 jointFusion.inputs.dimension=3 jointFusion.inputs.search_radius=[3] #jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.out_label_fusion='JointFusion_HDAtlas20_2015_label.nii.gz' #JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', jointFusion, 'mask_image') JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume', jointFusion, 'mask_image') JointFusionWF.connect(warpedAtlasLblMergeNode,'out', jointFusion,'atlas_segmentation_image') AdjustMergeListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList','n_modality'], output_names=['out']), name="AdjustMergeListNode") "*** HACK JointFusion only uses T1" #AdjustMergeListNode.inputs.n_modality = n_modality AdjustMergeListNode.inputs.n_modality = 1 JointFusionWF.connect(warpedAtlasesMergeNode,'out',AdjustMergeListNode,'allList') JointFusionWF.connect(AdjustMergeListNode,'out',jointFusion,'atlas_image') AdjustTargetImageListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList','n_modality'], output_names=['out']), name="AdjustTargetImageListNode") AdjustTargetImageListNode.inputs.n_modality = n_modality "*** HACK JointFusion only uses T1" """ Once JointFusion works with T2 properly, delete sessionMakeListSingleModalInput and use sessionMakeMultimodalInput instead """ sessionMakeListSingleModalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeListSingleModalInput") sessionMakeListSingleModalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeListSingleModalInput, 'inFN1') JointFusionWF.connect(sessionMakeListSingleModalInput, 'outFNs', jointFusion,'target_image') JointFusionWF.connect(jointFusion, 'out_label_fusion', outputsSpec,'JointFusion_HDAtlas20_2015_label') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071,47),(15072,47),(15073,47),(15145,1011),(15157,1011),(15161,1011), (15179,1012),(15141,1014),(15151,1017),(15163,1018),(15165,1019),(15143,1027), (15191,1028),(15193,1028),(15185,1030),(15201,1030),(15175,1031),(15195,1031), (15173,1035),(15144,2011),(15156,2011),(15160,2011),(15178,2012),(15140,2014), (15150,2017),(15162,2018),(15164,2019),(15142,2027),(15190,2028),(15192,2028), (15184,2030),(15174,2031),(15194,2031),(15172,2035),(15200,2030)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_fs_standard_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM,'OutputFileName',outputsSpec,'JointFusion_HDAtlas20_2015_fs_standard_label') ## JointFusion_SNAPSHOT_WRITER for Segmented result checking: # JointFusion_SNAPSHOT_WRITERNodeName = "JointFusion_ExtendedJointFusion_SNAPSHOT_WRITER" # JointFusion_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=JointFusion_SNAPSHOT_WRITERNodeName) # JointFusion_SNAPSHOT_WRITER.inputs.outputFilename = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.png' # output specification # JointFusion_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] # JointFusion_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] # JointFusionWF.connect(JointFusion_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'JointFusion_extended_snapshot') myLocalDustCleanup = CreateDustCleanupWorkflow("DUST_CLEANUP", onlyT1, master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', myLocalDustCleanup, 'inputspec.subj_t1_image') if not onlyT1: JointFusionWF.connect(subjectT2Resample, 'outputVolume', myLocalDustCleanup, 'inputspec.subj_t2_image') if runFixFusionLabelMap: ## post processing of jointfusion injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN', 'FixedHeadFN', 'posterior_dict', 'LeftHemisphereFN', 'outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.nii.gz' FREESURFER_DICT = { 'BRAINSTEM': 16, 'RH_CSF':24, 'LH_CSF':24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11,12,13,9,17,26,50,51,52,48,53,58] } injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT JointFusionWF.connect(jointFusion, 'out_label_fusion', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') JointFusionWF.connect(inputsSpec, 'subj_posteriors', injectSurfaceCSFandVBIntoLabelMap, 'posterior_dict') JointFusionWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap,'fixedFusionLabelFN', outputsSpec,'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM,'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (injectSurfaceCSFandVBIntoLabelMap, JointFusion_SNAPSHOT_WRITER, # [('fixedFusionLabelFN', 'inputBinaryVolumes')]) # ]) else: JointFusionWF.connect(jointFusion, 'output_label_image', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(jointFusion, 'output_label_image', outputsSpec,'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM,'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (jointFusion, JointFusion_SNAPSHOT_WRITER, # [('output_label_image', 'inputBinaryVolumes')]) # ]) """ Compute label volumes """ computeLabelVolumes = CreateVolumeMeasureWorkflow("LabelVolume", master_config) JointFusionWF.connect( inputsSpec, 'subj_t1_image', computeLabelVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect( myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', computeLabelVolumes, 'inputspec.subj_label_image') JointFusionWF.connect( computeLabelVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_volumes_csv') JointFusionWF.connect( computeLabelVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_volumes_json') ## Lobe Pacellation by recoding if master_config['relabel2lobes_filename'] != None: #print("Generate relabeled version based on {0}".format(master_config['relabel2lobes_filename'])) RECODE_LABELS_2_LobePacellation = readRecodingList( master_config['relabel2lobes_filename'] ) RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobePacellation RecordToFSLobes.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_lobe_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM, 'OutputFileName',RecordToFSLobes,'InputFileName') JointFusionWF.connect(RecordToFSLobes,'OutputFileName',outputsSpec,'JointFusion_HDAtlas20_2015_lobe_label') """ Compute lobe volumes """ computeLobeVolumes = CreateVolumeMeasureWorkflow("LobeVolume", master_config) JointFusionWF.connect( inputsSpec, 'subj_t1_image', computeLobeVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect( RecordToFSLobes, 'OutputFileName', computeLobeVolumes, 'inputspec.subj_label_image') JointFusionWF.connect( computeLobeVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_lobe_volumes_csv') JointFusionWF.connect( computeLobeVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_lobe_volumes_json') return JointFusionWF
def CreateTissueClassifyWorkflow(WFname, master_config, InterpolationMode, UseRegistrationMasking): from nipype.interfaces import ants CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['T1List', 'T2List', 'PDList', 'FLList', 'OTHERList', 'T1_count', 'PrimaryT1', 'atlasDefinition', 'atlasToSubjectInitialTransform', 'atlasVolume', 'atlasheadregion' ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'outputLabels', 'outputHeadLabels', # ??? # 't1_corrected', 't2_corrected', 't1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', ]), run_without_submitting=True, name='outputspec') ######################################################## # Run BABCext on Multi-modal images ######################################################## makeOutImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OTHERList', 'postfix', 'postfixBFC', 'postfixUnwrapped', 'PrimaryT1', 'ListOutType'], output_names=['inImageList', 'outImageList', 'outBFCImageList', 'outUnwrappedImageList', 'imageTypeList']), run_without_submitting=True, name="99_makeOutImageList") tissueClassifyWF.connect(inputsSpec, 'T1List', makeOutImageList, 'T1List') tissueClassifyWF.connect(inputsSpec, 'T2List', makeOutImageList, 'T2List') tissueClassifyWF.connect(inputsSpec, 'PDList', makeOutImageList, 'PDList') tissueClassifyWF.connect(inputsSpec, 'FLList', makeOutImageList, 'FLList') tissueClassifyWF.connect(inputsSpec, 'OTHERList', makeOutImageList, 'OTHERList') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', makeOutImageList, 'PrimaryT1') makeOutImageList.inputs.ListOutType = False makeOutImageList.inputs.postfix = "_corrected.nii.gz" makeOutImageList.inputs.postfixBFC = "_NOT_USED" makeOutImageList.inputs.postfixUnwrapped = "_NOT_USED" ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'AtlasToSubjectANTsPreABC_Affine' A2SantsRegistrationPreABCAffine = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 4, 2, 8), 'overwrite': True} A2SantsRegistrationPreABCAffine.plugin_args = many_cpu_ANTsRigid_options_dictionary CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreABCAffine, registrationTypeDescription='AtlasToSubjectANTsPreABC_Affine', output_transform_prefix='AtlasToSubjectPreBABC_Rigid', output_warped_image='atlas2subjectRigid.nii.gz', output_inverse_warped_image='subject2atlasRigid.nii.gz', save_state=None, invert_initial_moving_transform=False, initial_moving_transform=None) tissueClassifyWF.connect(inputsSpec, 'atlasToSubjectInitialTransform', A2SantsRegistrationPreABCAffine, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCAffine, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCAffine, 'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'AtlasToSubjectANTsPreABC_SyN' A2SantsRegistrationPreABCSyN = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 8, 8, 16), 'overwrite': True} A2SantsRegistrationPreABCSyN.plugin_args = many_cpu_ANTsSyN_options_dictionary CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreABCSyN, registrationTypeDescription='AtlasToSubjectANTsPreABC_SyN', output_transform_prefix='AtlasToSubjectPreBABC_SyN', output_warped_image='atlas2subject.nii.gz', output_inverse_warped_image='subject2atlas.nii.gz', save_state='SavedInternalSyNState.h5', invert_initial_moving_transform=False, initial_moving_transform=None) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedImageROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 15 ## NOTE Very large to include some skull in bad cases of bias where back of head is very dark fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', fixedROIAuto, 'inputVolume') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCAffine, 'fixed_image_masks') tissueClassifyWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreABCSyN, 'fixed_image_masks') ## NOTE: Always use atlas head region to avoid computing this every time. tissueClassifyWF.connect(inputsSpec, 'atlasheadregion', A2SantsRegistrationPreABCAffine, 'moving_image_masks') tissueClassifyWF.connect(inputsSpec, 'atlasheadregion', A2SantsRegistrationPreABCSyN, 'moving_image_masks') tissueClassifyWF.connect(A2SantsRegistrationPreABCAffine, 'composite_transform', A2SantsRegistrationPreABCSyN, 'initial_moving_transform') tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', A2SantsRegistrationPreABCSyN, 'fixed_image') tissueClassifyWF.connect(inputsSpec, 'atlasVolume', A2SantsRegistrationPreABCSyN, 'moving_image') BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") many_cpu_BABC_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 13, 8, 16), 'overwrite': True} BABCext.plugin_args = many_cpu_BABC_options_dictionary tissueClassifyWF.connect(makeOutImageList, 'inImageList', BABCext, 'inputVolumes') tissueClassifyWF.connect(makeOutImageList, 'imageTypeList', BABCext, 'inputVolumeTypes') tissueClassifyWF.connect(makeOutImageList, 'outImageList', BABCext, 'outputVolumes') BABCext.inputs.debuglevel = 0 BABCext.inputs.useKNN = True BABCext.inputs.purePlugsThreshold = 0.1 # New feature to allow for pure plug processing and improvements. BABCext.inputs.maxIterations = 2 BABCext.inputs.maxBiasDegree = 0 BABCext.inputs.filterIteration = 3 # BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' ## If inputs are denoised, we don't need this BABCext.inputs.filterMethod = 'None' BABCext.inputs.atlasToSubjectTransformType = 'SyN' # Using SyN, so no bsplines here BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = './' BABCext.inputs.saveState = 'SavedBABCInternalSyNState.h5' tissueClassifyWF.connect(inputsSpec, 'atlasDefinition', BABCext, 'atlasDefinition') # NOTE: MUTUALLY EXCLUSIVE with restoreState # tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, # 'composite_transform', # BABCext, 'atlasToSubjectInitialTransform') tissueClassifyWF.connect(A2SantsRegistrationPreABCSyN, 'save_state', BABCext, 'restoreState') """ Get the first T1 and T2 corrected images from BABCext """ """ HACK: THIS IS NOT NEEDED! We should use the averged t1 and averaged t2 images instead! def get_first_T1_and_T2(in_files,T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' return in_files[0],in_files[T1_count] bfc_files = pe.Node(Function(input_names=['in_files','T1_count'], output_names=['t1_corrected','t2_corrected'], function=get_first_T1_and_T2), run_without_submitting=True, name='99_bfc_files' ) tissueClassifyWF.connect( inputsSpec, 'T1_count', bfc_files, 'T1_count') tissueClassifyWF.connect(BABCext,'outputVolumes',bfc_files, 'in_files') tissueClassifyWF.connect(bfc_files,'t1_corrected',outputsSpec,'t1_corrected') tissueClassifyWF.connect(bfc_files,'t2_corrected',outputsSpec,'t2_corrected') #tissueClassifyWF.connect(bfc_files,'pd_corrected',outputsSpec,'pd_corrected') #tissueClassifyWF.connect(bfc_files,'fl_corrected',outputsSpec,'fl_corrected') """ ############# tissueClassifyWF.connect(BABCext, 'saveState', outputsSpec, 'atlasToSubjectRegistrationState') tissueClassifyWF.connect(BABCext, 'atlasToSubjectTransform', outputsSpec, 'atlasToSubjectTransform') def MakeInverseTransformFileName(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs""" fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return [fixed_inverse_name] tissueClassifyWF.connect([(BABCext, outputsSpec, [ (('atlasToSubjectTransform', MakeInverseTransformFileName), "atlasToSubjectInverseTransform")]), ]) tissueClassifyWF.connect(BABCext, 'outputLabels', outputsSpec, 'outputLabels') tissueClassifyWF.connect(BABCext, 'outputDirtyLabels', outputsSpec, 'outputHeadLabels') tissueClassifyWF.connect(BABCext, 'outputT1AverageImage', outputsSpec, 't1_average') tissueClassifyWF.connect(BABCext, 'outputT2AverageImage', outputsSpec, 't2_average') tissueClassifyWF.connect(BABCext, 'outputPDAverageImage', outputsSpec, 'pd_average') tissueClassifyWF.connect(BABCext, 'outputFLAverageImage', outputsSpec, 'fl_average') ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 2 ), "pd_average")] ), ] ) MakePosteriorListOfTuplesNode = pe.Node(Function(function=MakePosteriorListOfTuplesFunc, input_names=['posteriorImages'], output_names=['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") tissueClassifyWF.connect(BABCext, 'posteriorImages', MakePosteriorListOfTuplesNode, 'posteriorImages') tissueClassifyWF.connect(MakePosteriorListOfTuplesNode, 'posteriorDictionary', outputsSpec, 'posteriorImages') return tissueClassifyWF
def generate_single_session_template_WF(projectid, subjectid, sessionid, onlyT1, master_config, phase, interpMode, pipeline_name, doDenoise=True): """ Run autoworkup on a single sessionid This is the main function to call when processing a data set with T1 & T2 data. ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images are the lists of images to be used in the auto-workup. atlas_fname_wpath is the path and filename of the atlas to use. """ #if not 'landmark' in master_config['components'] or not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']: # print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!" # raise NotImplementedError # master_config['components'].append('auxlmk') # master_config['components'].append('tissue_classify') assert phase in ['atlas-based-reference', 'subject-based-reference'], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'" if 'tissue_classify' in master_config['components']: assert ('landmark' in master_config['components'] ), "tissue_classify Requires landmark step!" if 'landmark' in master_config['components']: assert 'denoise' in master_config['components'], "landmark Requires denoise step!" from workflows.atlasNode import MakeAtlasNode baw201 = pe.Workflow(name=pipeline_name) inputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s', 'PDs', 'FLs', 'OTHERs', 'hncma_atlas', 'template_rightHemisphere', 'template_leftHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles']), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', 'outputLabels', 'outputHeadLabels', 'atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'BCD_ACPC_T1_CROPPED', 'outputLandmarksInACPCAlignedSpace', 'outputLandmarksInInputSpace', 'output_tx', 'LMIatlasToSubject_tx', 'writeBranded2DImage', 'brainStemMask', 'UpdatedPosteriorsList' # Longitudinal ]), run_without_submitting=True, name='outputspec') dsName = "{0}_ds_{1}".format(phase, sessionid) DataSink = pe.Node(name=dsName, interface=nio.DataSink()) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid, sessionid) DataSink.inputs.base_directory = master_config['resultdir'] atlas_static_directory = master_config['atlascache'] if master_config['workflow_phase'] == 'atlas-based-reference': atlas_warped_directory = master_config['atlascache'] atlasABCNode_XML = MakeAtlasNode(atlas_warped_directory, 'BABCXMLAtlas_{0}'.format(sessionid), ['W_BRAINSABCSupport']) baw201.connect(atlasABCNode_XML, 'ExtendedAtlasDefinition_xml', inputsSpec, 'atlasDefinition') atlasABCNode_W = MakeAtlasNode(atlas_warped_directory, 'BABCAtlas_W{0}'.format(sessionid), ['W_BRAINSABCSupport', 'W_LabelMapsSupport']) baw201.connect([( atlasABCNode_W, inputsSpec, [ ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case atlasBCDNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCDAtlas_W{0}'.format(sessionid), ['W_BCDSupport']) baw201.connect([(atlasBCDNode_W, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) ## Needed for both segmentation and template building prep atlasBCUTNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCUTAtlas_W{0}'.format(sessionid), ['W_BRAINSCutSupport']) elif master_config['workflow_phase'] == 'subject-based-reference': print master_config['previousresult'] atlas_warped_directory = os.path.join(master_config['previousresult'], subjectid, 'Atlas') template_DG = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['outAtlasXMLFullPath', 'hncma_atlas', 'template_leftHemisphere', 'template_rightHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles', 'template_t1', 'template_landmarks_50Lmks_fcsv' ]), name='Template_DG') template_DG.inputs.base_directory = master_config['previousresult'] template_DG.inputs.subject = subjectid template_DG.inputs.field_template = {'outAtlasXMLFullPath': '%s/Atlas/AtlasDefinition_%s.xml', 'hncma_atlas': '%s/Atlas/AVG_hncma_atlas.nii.gz', 'template_leftHemisphere': '%s/Atlas/AVG_template_leftHemisphere.nii.gz', 'template_rightHemisphere': '%s/Atlas/AVG_template_rightHemisphere.nii.gz', 'template_WMPM2_labels': '%s/Atlas/AVG_template_WMPM2_labels.nii.gz', 'template_nac_labels': '%s/Atlas/AVG_template_nac_labels.nii.gz', 'template_ventricles': '%s/Atlas/AVG_template_ventricles.nii.gz', 'template_t1': '%s/Atlas/AVG_T1.nii.gz', 'template_landmarks_50Lmks_fcsv': '%s/Atlas/AVG_LMKS.fcsv', } template_DG.inputs.template_args = {'outAtlasXMLFullPath': [['subject', 'subject']], 'hncma_atlas': [['subject']], 'template_leftHemisphere': [['subject']], 'template_rightHemisphere': [['subject']], 'template_WMPM2_labels': [['subject']], 'template_nac_labels': [['subject']], 'template_ventricles': [['subject']], 'template_t1': [['subject']], 'template_landmarks_50Lmks_fcsv': [['subject']] } template_DG.inputs.template = '*' template_DG.inputs.sort_filelist = True template_DG.inputs.raise_on_empty = True baw201.connect(template_DG, 'outAtlasXMLFullPath', inputsSpec, 'atlasDefinition') baw201.connect([(template_DG, inputsSpec, [ ## Already connected ('template_t1','template_t1'), ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case baw201.connect([(template_DG, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) else: assert 0 == 1, "Invalid workflow type specified for singleSession" atlasBCDNode_S = MakeAtlasNode(atlas_static_directory, 'BBCDAtlas_S{0}'.format(sessionid), ['S_BCDSupport']) baw201.connect([(atlasBCDNode_S, inputsSpec, [('template_weights_50Lmks_wts', 'atlasWeightFilename'), ('LLSModel_50Lmks_h5', 'LLSModel'), ('T1_50Lmks_mdl', 'inputTemplateModel') ]), ]) if doDenoise: print("\ndenoise image filter\n") makeDenoiseInImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix', 'PrimaryT1'], output_names=['inImageList', 'outImageList', 'imageTypeList']), run_without_submitting=True, name="99_makeDenoiseInImageList") baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List') baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List') baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList') makeDenoiseInImageList.inputs.FLList = [] # an emptyList HACK makeDenoiseInImageList.inputs.PrimaryT1 = None # an emptyList HACK makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz" # HACK baw201.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' ) baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList, 'OtherList') print("\nDenoise:\n") DenoiseInputImgs = pe.MapNode(interface=UnbiasedNonLocalMeans(), name='denoiseInputImgs', iterfield=['inputVolume', 'outputVolume']) DenoiseInputImgs.inputs.rc = [1, 1, 1] DenoiseInputImgs.inputs.rs = [4, 4, 4] DenoiseInputImgs.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], .2, 1, 1), 'overwrite': True} baw201.connect([(makeDenoiseInImageList, DenoiseInputImgs, [('inImageList', 'inputVolume')]), (makeDenoiseInImageList, DenoiseInputImgs, [('outImageList', 'outputVolume')]) ]) print("\nMerge all T1 and T2 List\n") makePreprocessingOutList = pe.Node(Function(function=GenerateSeparateImageTypeList, input_names=['inFileList', 'inTypeList'], output_names=['T1s', 'T2s', 'PDs', 'FLs', 'OtherList']), run_without_submitting=True, name="99_makePreprocessingOutList") baw201.connect(DenoiseInputImgs, 'outputVolume', makePreprocessingOutList, 'inFileList') baw201.connect(makeDenoiseInImageList, 'imageTypeList', makePreprocessingOutList, 'inTypeList') else: makePreprocessingOutList = inputsSpec if 'landmark' in master_config['components']: DoReverseMapping = False # Set to true for debugging outputs if 'auxlmk' in master_config['components']: DoReverseMapping = True myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", interpMode, DoReverseMapping) baw201.connect([(makePreprocessingOutList, myLocalLMIWF, [(('T1s', get_list_element, 0), 'inputspec.inputVolume' )]), (inputsSpec, myLocalLMIWF, [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'), ('atlasWeightFilename', 'inputspec.atlasWeightFilename'), ('LLSModel', 'inputspec.LLSModel'), ('inputTemplateModel', 'inputspec.inputTemplateModel'), ('template_t1', 'inputspec.atlasVolume')]), (myLocalLMIWF, outputsSpec, [('outputspec.outputResampledCroppedVolume', 'BCD_ACPC_T1_CROPPED'), ('outputspec.outputLandmarksInACPCAlignedSpace', 'outputLandmarksInACPCAlignedSpace'), ('outputspec.outputLandmarksInInputSpace', 'outputLandmarksInInputSpace'), ('outputspec.outputTransform', 'output_tx'), ('outputspec.atlasToSubjectTransform', 'LMIatlasToSubject_tx'), ('outputspec.writeBranded2DImage', 'writeBranded2DImage')]) ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalLMIWF -> DataSink [('outputLandmarksInACPCAlignedSpace', 'ACPCAlign.@outputLandmarks_ACPC'), ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'), ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'), ('outputLandmarksInInputSpace', 'ACPCAlign.@outputLandmarks_Input'), ('output_tx', 'ACPCAlign.@output_tx'), ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'), ] ) ] ) if 'tissue_classify' in master_config['components']: useRegistrationMask = master_config['use_registration_masking'] myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", master_config, interpMode,useRegistrationMask) baw201.connect([(makePreprocessingOutList, myLocalTCWF, [('T1s', 'inputspec.T1List')]), (makePreprocessingOutList, myLocalTCWF, [('T2s', 'inputspec.T2List')]), (inputsSpec, myLocalTCWF, [('atlasDefinition', 'inputspec.atlasDefinition'), ('template_t1', 'inputspec.atlasVolume'), (('T1s', getAllT1sLength), 'inputspec.T1_count'), ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'), ('OTHERs', 'inputspec.OtherList') ]), (myLocalLMIWF, myLocalTCWF, [('outputspec.outputResampledCroppedVolume', 'inputspec.PrimaryT1'), ('outputspec.atlasToSubjectTransform', 'inputspec.atlasToSubjectInitialTransform')]), (myLocalTCWF, outputsSpec, [('outputspec.t1_average', 't1_average'), ('outputspec.t2_average', 't2_average'), ('outputspec.pd_average', 'pd_average'), ('outputspec.fl_average', 'fl_average'), ('outputspec.posteriorImages', 'posteriorImages'), ('outputspec.outputLabels', 'outputLabels'), ('outputspec.outputHeadLabels', 'outputHeadLabels'), ('outputspec.atlasToSubjectTransform', 'atlasToSubjectTransform'), ('outputspec.atlasToSubjectInverseTransform', 'atlasToSubjectInverseTransform'), ('outputspec.atlasToSubjectRegistrationState', 'atlasToSubjectRegistrationState') ]), ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalTCWF -> DataSink [(('t1_average', convertToList), 'TissueClassify.@t1'), (('t2_average', convertToList), 'TissueClassify.@t2'), (('pd_average', convertToList), 'TissueClassify.@pd'), (('fl_average', convertToList), 'TissueClassify.@fl')]) ]) currentFixWMPartitioningName = "_".join(['FixWMPartitioning', str(subjectid), str(sessionid)]) FixWMNode = pe.Node(interface=Function(function=FixWMPartitioning, input_names=['brainMask', 'PosteriorsList'], output_names=['UpdatedPosteriorsList', 'MatchingFGCodeList', 'MatchingLabelList', 'nonAirRegionMask']), name=currentFixWMPartitioningName) baw201.connect([(myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'), (('outputspec.posteriorImages', flattenDict), 'PosteriorsList')]), (FixWMNode, outputsSpec, [('UpdatedPosteriorsList', 'UpdatedPosteriorsList')]), ]) currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(subjectid) + "_" + str( sessionid) BRAINSCreateLabelMapNode = pe.Node(interface=BRAINSCreateLabelMapFromProbabilityMaps(), name=currentBRAINSCreateLabelMapName) ## TODO: Fix the file names BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz' BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz' baw201.connect([(FixWMNode, BRAINSCreateLabelMapNode, [('UpdatedPosteriorsList', 'inputProbabilityVolume'), ('MatchingFGCodeList', 'foregroundPriors'), ('MatchingLabelList', 'priorLabelCodes'), ('nonAirRegionMask', 'nonAirRegionMask')]), (BRAINSCreateLabelMapNode, DataSink, [ # brainstem code below replaces this ('cleanLabelVolume', 'TissueClassify.@outputLabels'), ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')]), (myLocalTCWF, DataSink, [('outputspec.atlasToSubjectTransform', 'TissueClassify.@atlas2session_tx'), ('outputspec.atlasToSubjectInverseTransform', 'TissueClassify.@atlas2sessionInverse_tx')]), (FixWMNode, DataSink, [('UpdatedPosteriorsList', 'TissueClassify.@posteriors')]), ]) currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str( sessionid) AccumulateLikeTissuePosteriorsNode = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, input_names=['posteriorImages'], output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), name=currentAccumulateLikeTissuePosteriorsName) baw201.connect([(FixWMNode, AccumulateLikeTissuePosteriorsNode, [('UpdatedPosteriorsList', 'posteriorImages')]), (AccumulateLikeTissuePosteriorsNode, DataSink, [('AccumulatePriorsList', 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir')])]) """ brain stem adds on feature inputs: - landmark (fcsv) file - fixed brainlabels seg.nii.gz output: - complete_brainlabels_seg.nii.gz Segmentation """ myLocalBrainStemWF = CreateBrainstemWorkflow("BrainStem", master_config['queue'], "complete_brainlabels_seg.nii.gz") baw201.connect([(myLocalLMIWF, myLocalBrainStemWF, [('outputspec.outputLandmarksInACPCAlignedSpace', 'inputspec.inputLandmarkFilename')]), (BRAINSCreateLabelMapNode, myLocalBrainStemWF, [('cleanLabelVolume', 'inputspec.inputTissueLabelFilename')]) ]) baw201.connect(myLocalBrainStemWF, 'outputspec.ouputTissuelLabelFilename', DataSink, 'TissueClassify.@complete_brainlabels_seg') ########################### do_BRAINSCut_Segmentation = DetermineIfSegmentationShouldBeDone(master_config) if do_BRAINSCut_Segmentation: from workflows.segmentation import segmentation from workflows.WorkupT1T2BRAINSCut import GenerateWFName sname = 'segmentation' segWF = segmentation(projectid, subjectid, sessionid, master_config, onlyT1, pipeline_name=sname) baw201.connect([(inputsSpec, segWF, [ ('template_t1', 'inputspec.template_t1') ]) ]) atlasBCUTNode_W = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=[ "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap", "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap", "l_globus_ProbabilityMap", "r_globus_ProbabilityMap", "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap", "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap", "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap", "phi", "rho", "theta" ]), name='PerSubject_atlasBCUTNode_W') atlasBCUTNode_W.inputs.base_directory = master_config['previousresult'] atlasBCUTNode_W.inputs.subject = subjectid atlasBCUTNode_W.inputs.field_template = { 'l_accumben_ProbabilityMap': '%s/Atlas/AVG_l_accumben_ProbabilityMap.nii.gz', 'r_accumben_ProbabilityMap': '%s/Atlas/AVG_r_accumben_ProbabilityMap.nii.gz', 'l_caudate_ProbabilityMap': '%s/Atlas/AVG_l_caudate_ProbabilityMap.nii.gz', 'r_caudate_ProbabilityMap': '%s/Atlas/AVG_r_caudate_ProbabilityMap.nii.gz', 'l_globus_ProbabilityMap': '%s/Atlas/AVG_l_globus_ProbabilityMap.nii.gz', 'r_globus_ProbabilityMap': '%s/Atlas/AVG_r_globus_ProbabilityMap.nii.gz', 'l_hippocampus_ProbabilityMap': '%s/Atlas/AVG_l_hippocampus_ProbabilityMap.nii.gz', 'r_hippocampus_ProbabilityMap': '%s/Atlas/AVG_r_hippocampus_ProbabilityMap.nii.gz', 'l_putamen_ProbabilityMap': '%s/Atlas/AVG_l_putamen_ProbabilityMap.nii.gz', 'r_putamen_ProbabilityMap': '%s/Atlas/AVG_r_putamen_ProbabilityMap.nii.gz', 'l_thalamus_ProbabilityMap': '%s/Atlas/AVG_l_thalamus_ProbabilityMap.nii.gz', 'r_thalamus_ProbabilityMap': '%s/Atlas/AVG_r_thalamus_ProbabilityMap.nii.gz', 'phi': '%s/Atlas/AVG_phi.nii.gz', 'rho': '%s/Atlas/AVG_rho.nii.gz', 'theta': '%s/Atlas/AVG_theta.nii.gz' } atlasBCUTNode_W.inputs.template_args = { 'l_accumben_ProbabilityMap': [['subject']], 'r_accumben_ProbabilityMap': [['subject']], 'l_caudate_ProbabilityMap': [['subject']], 'r_caudate_ProbabilityMap': [['subject']], 'l_globus_ProbabilityMap': [['subject']], 'r_globus_ProbabilityMap': [['subject']], 'l_hippocampus_ProbabilityMap': [['subject']], 'r_hippocampus_ProbabilityMap': [['subject']], 'l_putamen_ProbabilityMap': [['subject']], 'r_putamen_ProbabilityMap': [['subject']], 'l_thalamus_ProbabilityMap': [['subject']], 'r_thalamus_ProbabilityMap': [['subject']], 'phi': [['subject']], 'rho': [['subject']], 'theta': [['subject']] } atlasBCUTNode_W.inputs.template = '*' atlasBCUTNode_W.inputs.sort_filelist = True atlasBCUTNode_W.inputs.raise_on_empty = True baw201.connect([(atlasBCUTNode_W, segWF, [ ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'), ('theta', 'inputspec.theta'), ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'), ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'), ('l_hippocampus_ProbabilityMap', 'inputspec.l_hippocampus_ProbabilityMap'), ('r_hippocampus_ProbabilityMap', 'inputspec.r_hippocampus_ProbabilityMap'), ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'), ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'), ('l_thalamus_ProbabilityMap', 'inputspec.l_thalamus_ProbabilityMap'), ('r_thalamus_ProbabilityMap', 'inputspec.r_thalamus_ProbabilityMap'), ('l_accumben_ProbabilityMap', 'inputspec.l_accumben_ProbabilityMap'), ('r_accumben_ProbabilityMap', 'inputspec.r_accumben_ProbabilityMap'), ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'), ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap') ] )]) atlasBCUTNode_S = MakeAtlasNode(atlas_static_directory, 'BBCUTAtlas_S{0}'.format(sessionid), ['S_BRAINSCutSupport']) baw201.connect(atlasBCUTNode_S, 'trainModelFile_txtD0060NT0060_gz', segWF, 'inputspec.trainModelFile_txtD0060NT0060_gz') ## baw201_outputspec = baw201.get_node('outputspec') baw201.connect([(myLocalTCWF, segWF, [('outputspec.t1_average', 'inputspec.t1_average'), ('outputspec.atlasToSubjectRegistrationState', 'inputspec.atlasToSubjectRegistrationState'), ('outputspec.outputLabels', 'inputspec.inputLabels'), ('outputspec.posteriorImages', 'inputspec.posteriorImages'), ('outputspec.outputHeadLabels', 'inputspec.inputHeadLabels') ] ), (myLocalLMIWF, segWF, [('outputspec.atlasToSubjectTransform', 'inputspec.LMIatlasToSubject_tx') ] ), (FixWMNode, segWF, [('UpdatedPosteriorsList', 'inputspec.UpdatedPosteriorsList') ] ), ]) if not onlyT1: baw201.connect([(myLocalTCWF, segWF, [('outputspec.t2_average', 'inputspec.t2_average')])]) if 'warp_atlas_to_subject' in master_config['components']: ## ##~/src/NEP-build/bin/BRAINSResample # --warpTransform AtlasToSubjectPreBABC_Composite.h5 # --inputVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/Atlas/hncma-atlas.nii.gz # --referenceVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/singleSession_KID1_KT1/LandmarkInitialize/BROIAuto_cropped/Cropped_BCD_ACPC_Aligned.nii.gz # !--outputVolume hncma.nii.gz # !--interpolationMode NearestNeighbor # !--pixelType short ## ## ## TODO : SHOULD USE BRAINSCut transform that was refined even further! BResample = dict() AtlasLabelMapsToResample = [ 'hncma_atlas', 'template_WMPM2_labels', 'template_nac_labels', ] for atlasImage in AtlasLabelMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'short' BResample[atlasImage].inputs.interpolationMode = 'NearestNeighbor' BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) AtlasBinaryMapsToResample = [ 'template_rightHemisphere', 'template_leftHemisphere', 'template_ventricles'] for atlasImage in AtlasBinaryMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'binary' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) BRAINSCutAtlasImages = [ 'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap', 'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap', 'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap', 'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap', 'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap', 'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap', 'r_globus_ProbabilityMap' ] for atlasImage in BRAINSCutAtlasImages: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BCUTBRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'float' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(atlasBCUTNode_W, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) WhiteMatterHemisphereNode = pe.Node(interface=Function(function=CreateLeftRightWMHemispheres, input_names=['BRAINLABELSFile', 'HDCMARegisteredVentricleMaskFN', 'LeftHemisphereMaskName', 'RightHemisphereMaskName', 'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'], output_names=['WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName']), name="WhiteMatterHemisphere") WhiteMatterHemisphereNode.inputs.WM_LeftHemisphereFileName ="left_hemisphere_wm.nii.gz" WhiteMatterHemisphereNode.inputs.WM_RightHemisphereFileName ="right_hemisphere_wm.nii.gz" baw201.connect(myLocalBrainStemWF,'outputspec.ouputTissuelLabelFilename',WhiteMatterHemisphereNode,'BRAINLABELSFile') baw201.connect(BResample['hncma_atlas'],'outputVolume',WhiteMatterHemisphereNode,'HDCMARegisteredVentricleMaskFN') baw201.connect(BResample['template_leftHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'LeftHemisphereMaskName') baw201.connect(BResample['template_rightHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'RightHemisphereMaskName') baw201.connect(WhiteMatterHemisphereNode,'WM_LeftHemisphereFileName',DataSink,'WarpedAtlas2Subject.@LeftHemisphereWM') baw201.connect(WhiteMatterHemisphereNode,'WM_RightHemisphereFileName',DataSink,'WarpedAtlas2Subject.@RightHemisphereWM') if 'malf_2012_neuro' in master_config['components']: ## HACK Do MALF labeling good_subjects = [ '1001', '1004', '1005','1011', '1012', '1018', '1019', '1102', '1103', '1104', '1120', '1129', '1009', '1010', '1013', '1014', '1036', '1109', '1117', '1122'] ## HACK FOR NOW SHOULD BE MORE ELEGANT FROM THE .config file BASE_DATA_GRABBER_DIR='/Shared/johnsonhj/HDNI/Neuromorphometrics/20141116_Neuromorphometrics_base_Results/Neuromorphometrics/2012Subscription' myLocalMALF = CreateMALFWorkflow("MALF", master_config,good_subjects,BASE_DATA_GRABBER_DIR) baw201.connect(myLocalTCWF,'outputspec.t1_average',myLocalMALF,'inputspec.subj_t1_image') baw201.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace' ,myLocalMALF,'inputspec.subj_lmks') baw201.connect(atlasBCDNode_S,'template_weights_50Lmks_wts',myLocalMALF,'inputspec.atlasWeightFilename') baw201.connect(myLocalMALF,'outputspec.MALF_neuro2012_labelmap',DataSink,'TissueClassify.@MALF_neuro2012_labelmap') return baw201
def CreateMALFWorkflow(WFname, master_config,good_subjects,BASE_DATA_GRABBER_DIR): from nipype.interfaces import ants CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] MALFWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', #Desired image to create label map for 'subj_lmks', #The landmarks corresponding to t1_image 'subj_fixed_head_labels', #The fixed head labels from BABC 'subj_left_hemisphere', #The warped left hemisphere mask 'atlasWeightFilename' #The static weights file name ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['MALF_neuro2012_labelmap', 'MALF_fswm_extended_neuro2012_labelmap', 'MALF_fswm_standard_neuro2012_labelmap', 'MALF_fswm_lobar_neuro2012_labelmap', 'MALF_extended_snapshot']), run_without_submitting=True, name='outputspec') BLICreator = dict() MALF_DG = dict() A2SantsRegistrationPreABCRigid =dict() A2SantsRegistrationPreABCSyN = dict() fixedROIAuto = dict() movingROIAuto = dict() labelMapResample = dict() NewlabelMapResample = dict() warpedAtlasT1MergeNode = pe.Node(interface=Merge(len(good_subjects)),name="T1sMergeAtlas") warpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(len(good_subjects)),name="fswmLblMergeAtlas") malf_atlas_mergeindex = 1; for malf_atlas_subject in good_subjects: ## Need DataGrabber Here For the Atlas MALF_DG[malf_atlas_subject] = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['malf_atlas_t1', 'malf_atlas_lbls', 'malf_fswm_atlas_lbls', 'malf_atlas_lmks' ]), run_without_submitting=True,name='MALF_DG_'+malf_atlas_subject) #MALF_DG[malf_atlas_subject].inputs.base_directory = master_config['previousresult'] MALF_DG[malf_atlas_subject].inputs.base_directory = BASE_DATA_GRABBER_DIR MALF_DG[malf_atlas_subject].inputs.subject = malf_atlas_subject MALF_DG[malf_atlas_subject].inputs.field_template = { 'malf_atlas_t1': '%s/TissueClassify/t1_average_BRAINSABC.nii.gz', 'malf_atlas_lbls': '%s/TissueClassify/neuro_lbls.nii.gz', 'malf_fswm_atlas_lbls': '%s/TissueClassify/neuro_lbls_MALF_In_FSLabel.nii.gz', 'malf_atlas_lmks': '%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv', } MALF_DG[malf_atlas_subject].inputs.template_args = { 'malf_atlas_t1': [['subject']], 'malf_atlas_lbls': [['subject']], 'malf_fswm_atlas_lbls': [['subject']], 'malf_atlas_lmks': [['subject']], } MALF_DG[malf_atlas_subject].inputs.template = '*' MALF_DG[malf_atlas_subject].inputs.sort_filelist = True MALF_DG[malf_atlas_subject].inputs.raise_on_empty = True ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[malf_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_"+malf_atlas_subject) BLICreator[malf_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format(malf_atlas_subject) MALFWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[malf_atlas_subject], 'inputWeightFilename') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_lmks', BLICreator[malf_atlas_subject], 'inputMovingLandmarkFilename') MALFWF.connect(inputsSpec, 'subj_lmks', BLICreator[malf_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For AffineComponentBABC currentAtlasToSubjectantsRigidRegistration = 'Rigid_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCRigid[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRigidRegistration) many_cpu_ANTsRigid_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,2,1,1), 'overwrite': True} A2SantsRegistrationPreABCRigid[malf_atlas_subject].plugin_args = many_cpu_ANTsRigid_options_dictionary A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transforms = ["Affine",] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.transform_parameters = [[0.1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric = ['MI'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_strategy = ['Regular'] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sampling_percentage = [0.5] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.metric_weight = [1.0] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.radius_or_number_of_bins = [32] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.number_of_iterations = [[1000,1000, 500, 100]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_threshold = [1e-8] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.convergence_window_size = [10] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_histogram_matching = [True] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 2, 1]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 1, 0]] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.sigma_units = ["vox"] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False] A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.initialize_transforms_per_stage = True A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_transform_prefix = 'AtlasToSubjectPreBABC_Rigid' A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.float = True ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_warped_image = 'atlas2subjectRigid.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCRigid[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlasRigid.nii.gz' MALFWF.connect(BLICreator[malf_atlas_subject], 'outputTransformFilename',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image') ##### Initialize with ANTS Transform For SyN component BABC currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreABC_'+malf_atlas_subject A2SantsRegistrationPreABCSyN[malf_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,16), 'overwrite': True} A2SantsRegistrationPreABCSyN[malf_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.num_threads = -1 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.dimension = 3 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transforms = ["SyN","SyN"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.transform_parameters = [[0.1, 3, 0],[0.1, 3, 0] ] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric = ['MI','MI'] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_strategy = [None,None] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sampling_percentage = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.metric_weight = [1.0,1.0] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.radius_or_number_of_bins = [32,32] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.number_of_iterations = [[500, 500, 500, 500 ], [70]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_threshold = [1e-8,1e-4] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.convergence_window_size = [12] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_histogram_matching = [True,True] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.shrink_factors = [[8, 4, 3, 2], [1]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.smoothing_sigmas = [[3, 2, 2, 1], [0]] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.sigma_units = ["vox","vox"] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.use_estimate_learning_rate_once = [False,False] A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.write_composite_transform = True # Required for initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.collapse_output_transforms = False # Mutually Exclusive with initialize_transforms_per_stage A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.initialize_transforms_per_stage = True ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.save_state = 'SavedInternalSyNState.h5' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_transform_prefix = malf_atlas_subject+'_ToSubjectPreBABC_SyN' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_lower_quantile = 0.01 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.winsorize_upper_quantile = 0.99 A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_warped_image = malf_atlas_subject + '_2subject.nii.gz' ## NO NEED FOR THIS A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' A2SantsRegistrationPreABCSyN[malf_atlas_subject].inputs.float = True ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from SEMTools.segmentation.specialized import BRAINSROIAuto fixedROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 fixedROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" movingROIAuto[malf_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+malf_atlas_subject) fixedROIAuto[malf_atlas_subject].inputs.ROIAutoDilateSize=10 movingROIAuto[malf_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" MALFWF.connect(inputsSpec, 'subj_t1_image',fixedROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1', movingROIAuto[malf_atlas_subject],'inputVolume') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCRigid[malf_atlas_subject],'moving_image_mask') MALFWF.connect(fixedROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image_mask') MALFWF.connect(movingROIAuto[malf_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image_mask') MALFWF.connect(A2SantsRegistrationPreABCRigid[malf_atlas_subject], ('composite_transform', getListIndexOrNoneIfOutOfRange, 0 ), A2SantsRegistrationPreABCSyN[malf_atlas_subject],'initial_moving_transform') MALFWF.connect(inputsSpec, 'subj_t1_image',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'fixed_image') MALFWF.connect(MALF_DG[malf_atlas_subject], 'malf_atlas_t1',A2SantsRegistrationPreABCSyN[malf_atlas_subject],'moving_image') MALFWF.connect(A2SantsRegistrationPreABCSyN[malf_atlas_subject],'warped_image',warpedAtlasT1MergeNode,'in'+str(malf_atlas_mergeindex) ) ### Original labelmap resampling labelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="WLABEL_"+malf_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} labelMapResample[malf_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[malf_atlas_subject].inputs.dimension=3 labelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'_2_subj_lbl.nii.gz' labelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' labelMapResample[malf_atlas_subject].inputs.default_value=0 labelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', labelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', labelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_atlas_lbls', labelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(labelMapResample[malf_atlas_subject],'output_image',warpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) ### New labelmap resampling NewlabelMapResample[malf_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(),name="FSWM_WLABEL_"+malf_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,1,1,1), 'overwrite': True} NewlabelMapResample[malf_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[malf_atlas_subject].inputs.dimension=3 NewlabelMapResample[malf_atlas_subject].inputs.output_image=malf_atlas_subject+'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[malf_atlas_subject].inputs.interpolation='MultiLabel' NewlabelMapResample[malf_atlas_subject].inputs.default_value=0 NewlabelMapResample[malf_atlas_subject].inputs.invert_transform_flags=[False] MALFWF.connect( A2SantsRegistrationPreABCSyN[malf_atlas_subject],'composite_transform', NewlabelMapResample[malf_atlas_subject],'transforms') MALFWF.connect( inputsSpec, 'subj_t1_image', NewlabelMapResample[malf_atlas_subject],'reference_image') MALFWF.connect( MALF_DG[malf_atlas_subject], 'malf_fswm_atlas_lbls', NewlabelMapResample[malf_atlas_subject],'input_image') MALFWF.connect(NewlabelMapResample[malf_atlas_subject],'output_image',NewwarpedAtlasLblMergeNode,'in'+str(malf_atlas_mergeindex) ) malf_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.JointFusion(),name="JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.dimension=3 jointFusion.inputs.modalities=1 jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.output_label_image='fusion_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',jointFusion,'warped_intensity_images') MALFWF.connect(warpedAtlasLblMergeNode,'out',jointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',jointFusion,'target_image') NEUROLABELS_DICT = { 'BRAINSTEM': 35, 'RH_CSF': 51, 'LH_CSF': 52, 'BLOOD': 230 , 'UNKNOWN': 255 , 'CONNECTED': [36,37,57,58,55,56,59,60,47,48,23,30] } fixFusionLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="FixedFusionLabelmap") fixFusionLabelMap.inputs.outFN = 'neuro2012_20fusion_merge_seg.nii.gz' fixFusionLabelMap.inputs.OUT_DICT = NEUROLABELS_DICT MALFWF.connect(jointFusion, 'output_label_image', fixFusionLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', fixFusionLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', fixFusionLabelMap, 'LeftHemisphereFN') MALFWF.connect(fixFusionLabelMap,'fixedFusionLabelFN',outputsSpec,'MALF_neuro2012_labelmap') ## 2014-02-19 Updated fs_wmparcelation_improved malf newJointFusion = pe.Node(interface=ants.JointFusion(),name="FSWM_JointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE,8,4,4), 'overwrite': True} newJointFusion.plugin_args = many_cpu_JointFusion_options_dictionary newJointFusion.inputs.dimension=3 newJointFusion.inputs.modalities=1 newJointFusion.inputs.method='Joint[0.1,2]' newJointFusion.inputs.output_label_image='fswm_neuro2012_20.nii.gz' MALFWF.connect(warpedAtlasT1MergeNode,'out',newJointFusion,'warped_intensity_images') MALFWF.connect(NewwarpedAtlasLblMergeNode,'out',newJointFusion,'warped_label_images') MALFWF.connect(inputsSpec, 'subj_t1_image',newJointFusion,'target_image') FREESURFER_DICT = { 'BRAINSTEM': 16, 'RH_CSF':24, 'LH_CSF':24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11,12,13,9,17,26,50,51,52,48,53,58] } injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN','FixedHeadFN','LeftHemisphereFN','outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'fswm_neuro2012_20_merge_seg.nii.gz' injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT MALFWF.connect(newJointFusion, 'output_label_image', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') MALFWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') MALFWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') ## We need to recode values to ensure that there are no conflicts in the future RECODE_LABELS_2_Extended_FSWM = [ (7071,15071),(7072,15072),(7073,15073),(7145,15145),(7157,15157), (7161,15161),(7179,15179),(7141,15141),(7151,15151),(7163,15163), (7165,15165),(7143,15143),(7191,15191),(7193,15193),(7185,15185), (7201,15201),(7175,15175),(7195,15195),(7173,15173),(7144,15144), (7156,15156),(7160,15160),(7178,15178),(7140,15140),(7150,15150), (7162,15162),(7164,15164),(7142,15142),(7190,15190),(7192,15192), (7184,15184),(7174,15174),(7194,15194),(7172,15172)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToExtended = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToExteneded") RecodeToExtended.inputs.RECODE_TABLE = RECODE_LABELS_2_Extended_FSWM RecodeToExtended.inputs.OutputFileName = 'fswm_extended_neuro2012_20_merge_seg.nii.gz' MALFWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN',RecodeToExtended,'InputFileName') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071,47),(15072,47),(15073,47),(15145,1011),(15157,1011),(15161,1011), (15179,1012),(15141,1014),(15151,1017),(15163,1018),(15165,1019),(15143,1027), (15191,1028),(15193,1028),(15185,1030),(15201,1030),(15175,1031),(15195,1031), (15173,1035),(15144,2011),(15156,2011),(15160,2011),(15178,2012),(15140,2014), (15150,2017),(15162,2018),(15164,2019),(15142,2027),(15190,2028),(15192,2028), (15184,2030),(15174,2031),(15194,2031),(15172,2035)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'fswm_standard_neuro2012_20_merge_seg.nii.gz' MALFWF.connect(RecodeToExtended, 'OutputFileName',RecodeToStandardFSWM,'InputFileName') MALFWF.connect(RecodeToExtended,'OutputFileName',outputsSpec,'MALF_fswm_extended_neuro2012_labelmap') MALFWF.connect(RecodeToStandardFSWM,'OutputFileName',outputsSpec,'MALF_fswm_standard_neuro2012_labelmap') ## MALF_SNAPSHOT_WRITER for Segmented result checking: MALF_SNAPSHOT_WRITERNodeName = "MALF_ExtendedMALF_SNAPSHOT_WRITER" MALF_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=MALF_SNAPSHOT_WRITERNodeName) MALF_SNAPSHOT_WRITER.inputs.outputFilename = 'fswm_extended_neuro2012_labelmap.png' # output specification MALF_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] MALF_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] MALFWF.connect([(inputsSpec, MALF_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), (RecodeToExtended, MALF_SNAPSHOT_WRITER, [('OutputFileName', 'inputBinaryVolumes')]) ]) MALFWF.connect(MALF_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'MALF_extended_snapshot') ## Lobar Pacellation by recoding #### HACK: #### LAbel 2001 in FS standard is WRONG. It supposed to be, 2030,ctx-rh-superiortemporal in FS #### which is from 200 Right_STG_superior_temporal_gyrus in neuromorphometric. #### 20 Atlas has to be properly changed/considered RECODE_LABELS_2_LobarPacellation = [(4,4),(5,5),(7,7),(8,8),(10,10), (11,11),(12,12),(13,13),(14,14),(15,15),(16,16),(17,17),(18,18),(24,24),(26,26),(28,28), (30,30),(31,31),(43,43),(44,44),(46,46),(47,47),(49,49),(50,50),(51,51),(52,52),(53,53), (54,54),(58,58),(60,60),(62,62),(63,63),(85,85), (251,251),(252,252),(253,253),(254,254),(255,255),(1000,1000), (1002,1002),(1005,1005),(1006,1004),(1007,1004),(1008,1006),(1009,1004),(1010,1010),(1011,1005),(1012,1001), (1013,1005),(1014,1001),(1015,1004),(1016,1016),(1017,1001),(1018,1001),(1019,1001),(1020,1001),(1021,1005), (1022,1006),(1024,1001),(1025,1006),(1026,1026),(1027,1001),(1028,1001),(1029,1006),(1030,1004),(1031,1006), (1032,1001),(1033,1004),(1034,1004),(1035,1035),(1116,1005),(1129,1129),(2000,2000),(2001,1004),(2002,2002), (2005,2005),(2006,2004),(2007,2004),(2008,2006),(2009,2004),(2010,2010),(2011,2005),(2012,2001),(2013,2005), (2014,2001),(2015,2004),(2016,2016),(2017,2001),(2018,2001),(2019,2001),(2020,2001),(2021,2005),(2022,2006), (2024,2001),(2025,2006),(2026,2026),(2027,2001),(2028,2001),(2029,2006),(2030,2004),(2031,2006),(2032,2001), (2033,2004),(2034,2004),(2035,2035),(2116,2005),(2129,2129),(3001,3001),(3002,3002),(3003,3001),(3005,3005), (3006,3004),(3007,3004),(3008,3006),(3009,3004),(3010,3010),(3011,3005),(3012,3001),(3013,3005),(3014,3001), (3015,3004),(3016,3016),(3017,3001),(3018,3001),(3019,3001),(3020,3001),(3021,3005),(3022,3006),(3023,3023), (3024,3001),(3025,3006),(3026,3026),(3027,3001),(3028,3001),(3029,3006),(3030,3004),(3031,3006),(3032,3001), (3033,3004),(3034,3004),(3035,3035),(4001,4001),(4002,4002),(4003,4001),(4005,4005),(4006,4004),(4007,4004), (4008,4006),(4009,4004),(4010,4010),(4011,4005),(4012,4001),(4013,4005),(4014,4001),(4015,4004),(4016,4016), (4017,4001),(4018,4001),(4019,4001),(4020,4001),(4021,4005),(4022,4006),(4023,4023),(4024,4001),(4025,4006), (4026,4026),(4027,4001),(4028,4001),(4029,4006),(4030,4004),(4031,4006),(4032,4001),(4033,4004),(4034,4004), (4035,4035),(5001,5001),(5002,5002)] RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName','OutputFileName','RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobarPacellation RecordToFSLobes.inputs.OutputFileName = 'fswm_standard_neuro2012_20_lobar_seg.nii.gz' MALFWF.connect(RecodeToStandardFSWM, 'OutputFileName',RecordToFSLobes,'InputFileName') MALFWF.connect(RecordToFSLobes,'OutputFileName',outputsSpec,'MALF_fswm_lobar_neuro2012_labelmap') return MALFWF