def test_BRAINSResample_inputs(): input_map = dict( args=dict(argstr='%s', ), defaultValue=dict(argstr='--defaultValue %f', ), deformationVolume=dict(argstr='--deformationVolume %s', ), environ=dict( nohash=True, usedefault=True, ), gridSpacing=dict( argstr='--gridSpacing %s', sep=',', ), ignore_exception=dict( nohash=True, usedefault=True, ), inputVolume=dict(argstr='--inputVolume %s', ), interpolationMode=dict(argstr='--interpolationMode %s', ), inverseTransform=dict(argstr='--inverseTransform ', ), numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputVolume=dict( argstr='--outputVolume %s', hash_files=False, ), pixelType=dict(argstr='--pixelType %s', ), referenceVolume=dict(argstr='--referenceVolume %s', ), terminal_output=dict(nohash=True, ), warpTransform=dict(argstr='--warpTransform %s', ), ) inputs = BRAINSResample.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_BRAINSResample_outputs(): output_map = dict(outputVolume=dict(), ) outputs = BRAINSResample.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_BRAINSResample_inputs(): input_map = dict(args=dict(argstr='%s', ), defaultValue=dict(argstr='--defaultValue %f', ), deformationVolume=dict(argstr='--deformationVolume %s', ), environ=dict(nohash=True, usedefault=True, ), gridSpacing=dict(argstr='--gridSpacing %s', sep=',', ), ignore_exception=dict(nohash=True, usedefault=True, ), inputVolume=dict(argstr='--inputVolume %s', ), interpolationMode=dict(argstr='--interpolationMode %s', ), inverseTransform=dict(argstr='--inverseTransform ', ), numberOfThreads=dict(argstr='--numberOfThreads %d', ), outputVolume=dict(argstr='--outputVolume %s', hash_files=False, ), pixelType=dict(argstr='--pixelType %s', ), referenceVolume=dict(argstr='--referenceVolume %s', ), terminal_output=dict(nohash=True, ), warpTransform=dict(argstr='--warpTransform %s', ), ) inputs = BRAINSResample.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def generate_single_session_template_WF(projectid, subjectid, sessionid, onlyT1, master_config, phase, interpMode, pipeline_name, doDenoise=True): """ Run autoworkup on a single sessionid This is the main function to call when processing a data set with T1 & T2 data. ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images are the lists of images to be used in the auto-workup. atlas_fname_wpath is the path and filename of the atlas to use. """ #if not 'landmark' in master_config['components'] or not 'auxlmk' in master_config['components'] or not 'tissue_classify' in master_config['components']: # print "Baseline DataSink requires 'AUXLMK' and/or 'TISSUE_CLASSIFY'!!!" # raise NotImplementedError # master_config['components'].append('auxlmk') # master_config['components'].append('tissue_classify') assert phase in ['atlas-based-reference', 'subject-based-reference'], "Unknown phase! Valid entries: 'atlas-based-reference', 'subject-based-reference'" if 'tissue_classify' in master_config['components']: assert ('landmark' in master_config['components'] ), "tissue_classify Requires landmark step!" # NOT TRUE if 'landmark' in master_config['components']: # assert 'denoise' in master_config['components'], "landmark Requires denoise step!" if 'malf_2015_wholebrain' in master_config['components']: assert ('warp_atlas_to_subject' in master_config['components'] ), "malf_2015_wholebrain requires warp_atlas_to_subject!" from workflows.atlasNode import MakeAtlasNode baw201 = pe.Workflow(name=pipeline_name) inputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'template_t1', 'atlasDefinition', 'T1s', 'T2s', 'PDs', 'FLs', 'OTHERs', 'hncma_atlas', 'template_rightHemisphere', 'template_leftHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles']), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['t1_average', 't2_average', 'pd_average', 'fl_average', 'posteriorImages', 'outputLabels', 'outputHeadLabels', 'atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'atlasToSubjectRegistrationState', 'BCD_ACPC_T1_CROPPED', 'outputLandmarksInACPCAlignedSpace', 'outputLandmarksInInputSpace', 'output_tx', 'LMIatlasToSubject_tx', 'writeBranded2DImage', 'brainStemMask', 'UpdatedPosteriorsList' # Longitudinal ]), run_without_submitting=True, name='outputspec') dsName = "{0}_ds_{1}".format(phase, sessionid) DataSink = pe.Node(name=dsName, interface=nio.DataSink()) DataSink.overwrite = master_config['ds_overwrite'] DataSink.inputs.container = '{0}/{1}/{2}'.format(projectid, subjectid, sessionid) DataSink.inputs.base_directory = master_config['resultdir'] atlas_static_directory = master_config['atlascache'] if master_config['workflow_phase'] == 'atlas-based-reference': atlas_warped_directory = master_config['atlascache'] atlasABCNode_XML = MakeAtlasNode(atlas_warped_directory, 'BABCXMLAtlas_{0}'.format(sessionid), ['W_BRAINSABCSupport']) baw201.connect(atlasABCNode_XML, 'ExtendedAtlasDefinition_xml', inputsSpec, 'atlasDefinition') atlasABCNode_W = MakeAtlasNode(atlas_warped_directory, 'BABCAtlas_W{0}'.format(sessionid), ['W_BRAINSABCSupport', 'W_LabelMapsSupport']) baw201.connect([( atlasABCNode_W, inputsSpec, [ ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case atlasBCDNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCDAtlas_W{0}'.format(sessionid), ['W_BCDSupport']) baw201.connect([(atlasBCDNode_W, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) ## Needed for both segmentation and template building prep atlasBCUTNode_W = MakeAtlasNode(atlas_warped_directory, 'BBCUTAtlas_W{0}'.format(sessionid), ['W_BRAINSCutSupport']) elif master_config['workflow_phase'] == 'subject-based-reference': print(master_config['previousresult']) atlas_warped_directory = os.path.join(master_config['previousresult'], subjectid, 'Atlas') atlasBCUTNode_W = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=[ "l_accumben_ProbabilityMap", "r_accumben_ProbabilityMap", "l_caudate_ProbabilityMap", "r_caudate_ProbabilityMap", "l_globus_ProbabilityMap", "r_globus_ProbabilityMap", "l_hippocampus_ProbabilityMap", "r_hippocampus_ProbabilityMap", "l_putamen_ProbabilityMap", "r_putamen_ProbabilityMap", "l_thalamus_ProbabilityMap", "r_thalamus_ProbabilityMap", "phi", "rho", "theta" ]), name='PerSubject_atlasBCUTNode_W') atlasBCUTNode_W.inputs.base_directory = master_config['previousresult'] atlasBCUTNode_W.inputs.subject = subjectid atlasBCUTNode_W.inputs.field_template = { 'l_accumben_ProbabilityMap': '%s/Atlas/AVG_l_accumben_ProbabilityMap.nii.gz', 'r_accumben_ProbabilityMap': '%s/Atlas/AVG_r_accumben_ProbabilityMap.nii.gz', 'l_caudate_ProbabilityMap': '%s/Atlas/AVG_l_caudate_ProbabilityMap.nii.gz', 'r_caudate_ProbabilityMap': '%s/Atlas/AVG_r_caudate_ProbabilityMap.nii.gz', 'l_globus_ProbabilityMap': '%s/Atlas/AVG_l_globus_ProbabilityMap.nii.gz', 'r_globus_ProbabilityMap': '%s/Atlas/AVG_r_globus_ProbabilityMap.nii.gz', 'l_hippocampus_ProbabilityMap': '%s/Atlas/AVG_l_hippocampus_ProbabilityMap.nii.gz', 'r_hippocampus_ProbabilityMap': '%s/Atlas/AVG_r_hippocampus_ProbabilityMap.nii.gz', 'l_putamen_ProbabilityMap': '%s/Atlas/AVG_l_putamen_ProbabilityMap.nii.gz', 'r_putamen_ProbabilityMap': '%s/Atlas/AVG_r_putamen_ProbabilityMap.nii.gz', 'l_thalamus_ProbabilityMap': '%s/Atlas/AVG_l_thalamus_ProbabilityMap.nii.gz', 'r_thalamus_ProbabilityMap': '%s/Atlas/AVG_r_thalamus_ProbabilityMap.nii.gz', 'phi': '%s/Atlas/AVG_phi.nii.gz', 'rho': '%s/Atlas/AVG_rho.nii.gz', 'theta': '%s/Atlas/AVG_theta.nii.gz' } atlasBCUTNode_W.inputs.template_args = { 'l_accumben_ProbabilityMap': [['subject']], 'r_accumben_ProbabilityMap': [['subject']], 'l_caudate_ProbabilityMap': [['subject']], 'r_caudate_ProbabilityMap': [['subject']], 'l_globus_ProbabilityMap': [['subject']], 'r_globus_ProbabilityMap': [['subject']], 'l_hippocampus_ProbabilityMap': [['subject']], 'r_hippocampus_ProbabilityMap': [['subject']], 'l_putamen_ProbabilityMap': [['subject']], 'r_putamen_ProbabilityMap': [['subject']], 'l_thalamus_ProbabilityMap': [['subject']], 'r_thalamus_ProbabilityMap': [['subject']], 'phi': [['subject']], 'rho': [['subject']], 'theta': [['subject']] } atlasBCUTNode_W.inputs.template = '*' atlasBCUTNode_W.inputs.sort_filelist = True atlasBCUTNode_W.inputs.raise_on_empty = True template_DG = pe.Node(interface=nio.DataGrabber(infields=['subject'], outfields=['outAtlasXMLFullPath', 'hncma_atlas', 'template_leftHemisphere', 'template_rightHemisphere', 'template_WMPM2_labels', 'template_nac_labels', 'template_ventricles', 'template_t1', 'template_landmarks_50Lmks_fcsv' ]), name='Template_DG') template_DG.inputs.base_directory = master_config['previousresult'] template_DG.inputs.subject = subjectid template_DG.inputs.field_template = {'outAtlasXMLFullPath': '%s/Atlas/AtlasDefinition_%s.xml', 'hncma_atlas': '%s/Atlas/AVG_hncma_atlas.nii.gz', 'template_leftHemisphere': '%s/Atlas/AVG_template_leftHemisphere.nii.gz', 'template_rightHemisphere': '%s/Atlas/AVG_template_rightHemisphere.nii.gz', 'template_WMPM2_labels': '%s/Atlas/AVG_template_WMPM2_labels.nii.gz', 'template_nac_labels': '%s/Atlas/AVG_template_nac_labels.nii.gz', 'template_ventricles': '%s/Atlas/AVG_template_ventricles.nii.gz', 'template_t1': '%s/Atlas/AVG_T1.nii.gz', 'template_landmarks_50Lmks_fcsv': '%s/Atlas/AVG_LMKS.fcsv', } template_DG.inputs.template_args = {'outAtlasXMLFullPath': [['subject', 'subject']], 'hncma_atlas': [['subject']], 'template_leftHemisphere': [['subject']], 'template_rightHemisphere': [['subject']], 'template_WMPM2_labels': [['subject']], 'template_nac_labels': [['subject']], 'template_ventricles': [['subject']], 'template_t1': [['subject']], 'template_landmarks_50Lmks_fcsv': [['subject']] } template_DG.inputs.template = '*' template_DG.inputs.sort_filelist = True template_DG.inputs.raise_on_empty = True baw201.connect(template_DG, 'outAtlasXMLFullPath', inputsSpec, 'atlasDefinition') baw201.connect([(template_DG, inputsSpec, [ ## Already connected ('template_t1','template_t1'), ('hncma_atlas', 'hncma_atlas'), ('template_leftHemisphere', 'template_leftHemisphere'), ('template_rightHemisphere', 'template_rightHemisphere'), ('template_WMPM2_labels', 'template_WMPM2_labels'), ('template_nac_labels', 'template_nac_labels'), ('template_ventricles', 'template_ventricles')] )] ) ## These landmarks are only relevant for the atlas-based-reference case baw201.connect([(template_DG, inputsSpec, [('template_t1', 'template_t1'), ('template_landmarks_50Lmks_fcsv', 'atlasLandmarkFilename'), ]), ]) else: assert 0 == 1, "Invalid workflow type specified for singleSession" atlasBCDNode_S = MakeAtlasNode(atlas_static_directory, 'BBCDAtlas_S{0}'.format(sessionid), ['S_BCDSupport']) baw201.connect([(atlasBCDNode_S, inputsSpec, [('template_weights_50Lmks_wts', 'atlasWeightFilename'), ('LLSModel_50Lmks_h5', 'LLSModel'), ('T1_50Lmks_mdl', 'inputTemplateModel') ]), ]) if doDenoise: print("\ndenoise image filter\n") makeDenoiseInImageList = pe.Node(Function(function=MakeOutFileList, input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'postfix', 'PrimaryT1'], output_names=['inImageList', 'outImageList', 'imageTypeList']), run_without_submitting=True, name="99_makeDenoiseInImageList") baw201.connect(inputsSpec, 'T1s', makeDenoiseInImageList, 'T1List') baw201.connect(inputsSpec, 'T2s', makeDenoiseInImageList, 'T2List') baw201.connect(inputsSpec, 'PDs', makeDenoiseInImageList, 'PDList') makeDenoiseInImageList.inputs.FLList = [] # an emptyList HACK makeDenoiseInImageList.inputs.PrimaryT1 = None # an emptyList HACK makeDenoiseInImageList.inputs.postfix = "_UNM_denoised.nii.gz" # HACK baw201.connect( inputsSpec, 'FLList', makeDenoiseInImageList, 'FLList' ) baw201.connect(inputsSpec, 'OTHERs', makeDenoiseInImageList, 'OtherList') print("\nDenoise:\n") DenoiseInputImgs = pe.MapNode(interface=UnbiasedNonLocalMeans(), name='denoiseInputImgs', iterfield=['inputVolume', 'outputVolume']) DenoiseInputImgs.inputs.rc = [1, 1, 1] DenoiseInputImgs.inputs.rs = [4, 4, 4] DenoiseInputImgs.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], .2, 1, 1), 'overwrite': True} baw201.connect([(makeDenoiseInImageList, DenoiseInputImgs, [('inImageList', 'inputVolume')]), (makeDenoiseInImageList, DenoiseInputImgs, [('outImageList', 'outputVolume')]) ]) print("\nMerge all T1 and T2 List\n") makePreprocessingOutList = pe.Node(Function(function=GenerateSeparateImageTypeList, input_names=['inFileList', 'inTypeList'], output_names=['T1s', 'T2s', 'PDs', 'FLs', 'OtherList']), run_without_submitting=True, name="99_makePreprocessingOutList") baw201.connect(DenoiseInputImgs, 'outputVolume', makePreprocessingOutList, 'inFileList') baw201.connect(makeDenoiseInImageList, 'imageTypeList', makePreprocessingOutList, 'inTypeList') else: makePreprocessingOutList = inputsSpec if 'landmark' in master_config['components']: DoReverseMapping = False # Set to true for debugging outputs if 'auxlmk' in master_config['components']: DoReverseMapping = True myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", interpMode, DoReverseMapping) baw201.connect([(makePreprocessingOutList, myLocalLMIWF, [(('T1s', get_list_element, 0), 'inputspec.inputVolume' )]), (inputsSpec, myLocalLMIWF, [('atlasLandmarkFilename', 'inputspec.atlasLandmarkFilename'), ('atlasWeightFilename', 'inputspec.atlasWeightFilename'), ('LLSModel', 'inputspec.LLSModel'), ('inputTemplateModel', 'inputspec.inputTemplateModel'), ('template_t1', 'inputspec.atlasVolume')]), (myLocalLMIWF, outputsSpec, [('outputspec.outputResampledCroppedVolume', 'BCD_ACPC_T1_CROPPED'), ('outputspec.outputLandmarksInACPCAlignedSpace', 'outputLandmarksInACPCAlignedSpace'), ('outputspec.outputLandmarksInInputSpace', 'outputLandmarksInInputSpace'), ('outputspec.outputTransform', 'output_tx'), ('outputspec.atlasToSubjectTransform', 'LMIatlasToSubject_tx'), ('outputspec.writeBranded2DImage', 'writeBranded2DImage')]) ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalLMIWF -> DataSink [('outputLandmarksInACPCAlignedSpace', 'ACPCAlign.@outputLandmarks_ACPC'), ('writeBranded2DImage', 'ACPCAlign.@writeBranded2DImage'), ('BCD_ACPC_T1_CROPPED', 'ACPCAlign.@BCD_ACPC_T1_CROPPED'), ('outputLandmarksInInputSpace', 'ACPCAlign.@outputLandmarks_Input'), ('output_tx', 'ACPCAlign.@output_tx'), ('LMIatlasToSubject_tx', 'ACPCAlign.@LMIatlasToSubject_tx'), ] ) ] ) if 'tissue_classify' in master_config['components']: useRegistrationMask = master_config['use_registration_masking'] myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", master_config, interpMode,useRegistrationMask) baw201.connect([(makePreprocessingOutList, myLocalTCWF, [('T1s', 'inputspec.T1List')]), (makePreprocessingOutList, myLocalTCWF, [('T2s', 'inputspec.T2List')]), (inputsSpec, myLocalTCWF, [('atlasDefinition', 'inputspec.atlasDefinition'), ('template_t1', 'inputspec.atlasVolume'), (('T1s', getAllT1sLength), 'inputspec.T1_count'), ('PDs', 'inputspec.PDList'), ('FLs', 'inputspec.FLList'), ('OTHERs', 'inputspec.OtherList') ]), (myLocalLMIWF, myLocalTCWF, [('outputspec.outputResampledCroppedVolume', 'inputspec.PrimaryT1'), ('outputspec.atlasToSubjectTransform', 'inputspec.atlasToSubjectInitialTransform')]), (myLocalTCWF, outputsSpec, [('outputspec.t1_average', 't1_average'), ('outputspec.t2_average', 't2_average'), ('outputspec.pd_average', 'pd_average'), ('outputspec.fl_average', 'fl_average'), ('outputspec.posteriorImages', 'posteriorImages'), ('outputspec.outputLabels', 'outputLabels'), ('outputspec.outputHeadLabels', 'outputHeadLabels'), ('outputspec.atlasToSubjectTransform', 'atlasToSubjectTransform'), ('outputspec.atlasToSubjectInverseTransform', 'atlasToSubjectInverseTransform'), ('outputspec.atlasToSubjectRegistrationState', 'atlasToSubjectRegistrationState') ]), ]) baw201.connect([(outputsSpec, DataSink, # TODO: change to myLocalTCWF -> DataSink [(('t1_average', convertToList), 'TissueClassify.@t1'), (('t2_average', convertToList), 'TissueClassify.@t2'), (('pd_average', convertToList), 'TissueClassify.@pd'), (('fl_average', convertToList), 'TissueClassify.@fl')]) ]) currentFixWMPartitioningName = "_".join(['FixWMPartitioning', str(subjectid), str(sessionid)]) FixWMNode = pe.Node(interface=Function(function=FixWMPartitioning, input_names=['brainMask', 'PosteriorsList'], output_names=['UpdatedPosteriorsList', 'MatchingFGCodeList', 'MatchingLabelList', 'nonAirRegionMask']), name=currentFixWMPartitioningName) baw201.connect([(myLocalTCWF, FixWMNode, [('outputspec.outputLabels', 'brainMask'), (('outputspec.posteriorImages', flattenDict), 'PosteriorsList')]), (FixWMNode, outputsSpec, [('UpdatedPosteriorsList', 'UpdatedPosteriorsList')]), ]) currentBRAINSCreateLabelMapName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(subjectid) + "_" + str( sessionid) BRAINSCreateLabelMapNode = pe.Node(interface=BRAINSCreateLabelMapFromProbabilityMaps(), name=currentBRAINSCreateLabelMapName) ## TODO: Fix the file names BRAINSCreateLabelMapNode.inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz' BRAINSCreateLabelMapNode.inputs.cleanLabelVolume = 'fixed_brainlabels_seg.nii.gz' baw201.connect([(FixWMNode, BRAINSCreateLabelMapNode, [('UpdatedPosteriorsList', 'inputProbabilityVolume'), ('MatchingFGCodeList', 'foregroundPriors'), ('MatchingLabelList', 'priorLabelCodes'), ('nonAirRegionMask', 'nonAirRegionMask')]), (BRAINSCreateLabelMapNode, DataSink, [ # brainstem code below replaces this ('cleanLabelVolume', 'TissueClassify.@outputLabels'), ('dirtyLabelVolume', 'TissueClassify.@outputHeadLabels')]), (myLocalTCWF, DataSink, [('outputspec.atlasToSubjectTransform', 'TissueClassify.@atlas2session_tx'), ('outputspec.atlasToSubjectInverseTransform', 'TissueClassify.@atlas2sessionInverse_tx')]), (FixWMNode, DataSink, [('UpdatedPosteriorsList', 'TissueClassify.@posteriors')]), ]) currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str( sessionid) AccumulateLikeTissuePosteriorsNode = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, input_names=['posteriorImages'], output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), name=currentAccumulateLikeTissuePosteriorsName) baw201.connect([(FixWMNode, AccumulateLikeTissuePosteriorsNode, [('UpdatedPosteriorsList', 'posteriorImages')]), (AccumulateLikeTissuePosteriorsNode, DataSink, [('AccumulatePriorsList', 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir')])]) """ brain stem adds on feature inputs: - landmark (fcsv) file - fixed brainlabels seg.nii.gz output: - complete_brainlabels_seg.nii.gz Segmentation """ myLocalBrainStemWF = CreateBrainstemWorkflow("BrainStem", master_config['queue'], "complete_brainlabels_seg.nii.gz") baw201.connect([(myLocalLMIWF, myLocalBrainStemWF, [('outputspec.outputLandmarksInACPCAlignedSpace', 'inputspec.inputLandmarkFilename')]), (BRAINSCreateLabelMapNode, myLocalBrainStemWF, [('cleanLabelVolume', 'inputspec.inputTissueLabelFilename')]) ]) baw201.connect(myLocalBrainStemWF, 'outputspec.ouputTissuelLabelFilename', DataSink, 'TissueClassify.@complete_brainlabels_seg') ########################### do_BRAINSCut_Segmentation = DetermineIfSegmentationShouldBeDone(master_config) if do_BRAINSCut_Segmentation: from workflows.segmentation import segmentation from workflows.WorkupT1T2BRAINSCut import GenerateWFName sname = 'segmentation' segWF = segmentation(projectid, subjectid, sessionid, master_config, onlyT1, pipeline_name=sname) baw201.connect([(inputsSpec, segWF, [ ('template_t1', 'inputspec.template_t1') ]) ]) baw201.connect([(atlasBCUTNode_W, segWF, [ ('rho', 'inputspec.rho'), ('phi', 'inputspec.phi'), ('theta', 'inputspec.theta'), ('l_caudate_ProbabilityMap', 'inputspec.l_caudate_ProbabilityMap'), ('r_caudate_ProbabilityMap', 'inputspec.r_caudate_ProbabilityMap'), ('l_hippocampus_ProbabilityMap', 'inputspec.l_hippocampus_ProbabilityMap'), ('r_hippocampus_ProbabilityMap', 'inputspec.r_hippocampus_ProbabilityMap'), ('l_putamen_ProbabilityMap', 'inputspec.l_putamen_ProbabilityMap'), ('r_putamen_ProbabilityMap', 'inputspec.r_putamen_ProbabilityMap'), ('l_thalamus_ProbabilityMap', 'inputspec.l_thalamus_ProbabilityMap'), ('r_thalamus_ProbabilityMap', 'inputspec.r_thalamus_ProbabilityMap'), ('l_accumben_ProbabilityMap', 'inputspec.l_accumben_ProbabilityMap'), ('r_accumben_ProbabilityMap', 'inputspec.r_accumben_ProbabilityMap'), ('l_globus_ProbabilityMap', 'inputspec.l_globus_ProbabilityMap'), ('r_globus_ProbabilityMap', 'inputspec.r_globus_ProbabilityMap') ] )]) atlasBCUTNode_S = MakeAtlasNode(atlas_static_directory, 'BBCUTAtlas_S{0}'.format(sessionid), ['S_BRAINSCutSupport']) baw201.connect(atlasBCUTNode_S, 'trainModelFile_txtD0060NT0060_gz', segWF, 'inputspec.trainModelFile_txtD0060NT0060_gz') ## baw201_outputspec = baw201.get_node('outputspec') baw201.connect([(myLocalTCWF, segWF, [('outputspec.t1_average', 'inputspec.t1_average'), ('outputspec.atlasToSubjectRegistrationState', 'inputspec.atlasToSubjectRegistrationState'), ('outputspec.outputLabels', 'inputspec.inputLabels'), ('outputspec.posteriorImages', 'inputspec.posteriorImages'), ('outputspec.outputHeadLabels', 'inputspec.inputHeadLabels') ] ), (myLocalLMIWF, segWF, [('outputspec.atlasToSubjectTransform', 'inputspec.LMIatlasToSubject_tx') ] ), (FixWMNode, segWF, [('UpdatedPosteriorsList', 'inputspec.UpdatedPosteriorsList') ] ), ]) if not onlyT1: baw201.connect([(myLocalTCWF, segWF, [('outputspec.t2_average', 'inputspec.t2_average')])]) if 'warp_atlas_to_subject' in master_config['components']: ## ##~/src/NEP-build/bin/BRAINSResample # --warpTransform AtlasToSubjectPreBABC_Composite.h5 # --inputVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/Atlas/hncma-atlas.nii.gz # --referenceVolume /Shared/sinapse/CACHE/x20141001_KIDTEST_base_CACHE/singleSession_KID1_KT1/LandmarkInitialize/BROIAuto_cropped/Cropped_BCD_ACPC_Aligned.nii.gz # !--outputVolume hncma.nii.gz # !--interpolationMode NearestNeighbor # !--pixelType short ## ## ## TODO : SHOULD USE BRAINSCut transform that was refined even further! BResample = dict() AtlasLabelMapsToResample = [ 'hncma_atlas', 'template_WMPM2_labels', 'template_nac_labels', ] for atlasImage in AtlasLabelMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'short' BResample[atlasImage].inputs.interpolationMode = 'NearestNeighbor' BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) AtlasBinaryMapsToResample = [ 'template_rightHemisphere', 'template_leftHemisphere', 'template_ventricles'] for atlasImage in AtlasBinaryMapsToResample: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'binary' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(inputsSpec, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) BRAINSCutAtlasImages = [ 'rho', 'phi', 'theta', 'l_caudate_ProbabilityMap', 'r_caudate_ProbabilityMap', 'l_hippocampus_ProbabilityMap', 'r_hippocampus_ProbabilityMap', 'l_putamen_ProbabilityMap', 'r_putamen_ProbabilityMap', 'l_thalamus_ProbabilityMap', 'r_thalamus_ProbabilityMap', 'l_accumben_ProbabilityMap', 'r_accumben_ProbabilityMap', 'l_globus_ProbabilityMap', 'r_globus_ProbabilityMap' ] for atlasImage in BRAINSCutAtlasImages: BResample[atlasImage] = pe.Node(interface=BRAINSResample(), name="BCUTBRAINSResample_" + atlasImage) BResample[atlasImage].plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} BResample[atlasImage].inputs.pixelType = 'float' BResample[ atlasImage].inputs.interpolationMode = 'Linear' ## Conversion to distance map, so use linear to resample distance map BResample[atlasImage].inputs.outputVolume = atlasImage + ".nii.gz" baw201.connect(myLocalTCWF, 'outputspec.t1_average', BResample[atlasImage], 'referenceVolume') baw201.connect(atlasBCUTNode_W, atlasImage, BResample[atlasImage], 'inputVolume') baw201.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', BResample[atlasImage], 'warpTransform') baw201.connect(BResample[atlasImage], 'outputVolume', DataSink, 'WarpedAtlas2Subject.@' + atlasImage) WhiteMatterHemisphereNode = pe.Node(interface=Function(function=CreateLeftRightWMHemispheres, input_names=['BRAINLABELSFile', 'HDCMARegisteredVentricleMaskFN', 'LeftHemisphereMaskName', 'RightHemisphereMaskName', 'WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName'], output_names=['WM_LeftHemisphereFileName', 'WM_RightHemisphereFileName']), name="WhiteMatterHemisphere") WhiteMatterHemisphereNode.inputs.WM_LeftHemisphereFileName ="left_hemisphere_wm.nii.gz" WhiteMatterHemisphereNode.inputs.WM_RightHemisphereFileName ="right_hemisphere_wm.nii.gz" baw201.connect(myLocalBrainStemWF,'outputspec.ouputTissuelLabelFilename',WhiteMatterHemisphereNode,'BRAINLABELSFile') baw201.connect(BResample['hncma_atlas'],'outputVolume',WhiteMatterHemisphereNode,'HDCMARegisteredVentricleMaskFN') baw201.connect(BResample['template_leftHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'LeftHemisphereMaskName') baw201.connect(BResample['template_rightHemisphere'],'outputVolume',WhiteMatterHemisphereNode,'RightHemisphereMaskName') baw201.connect(WhiteMatterHemisphereNode,'WM_LeftHemisphereFileName',DataSink,'WarpedAtlas2Subject.@LeftHemisphereWM') baw201.connect(WhiteMatterHemisphereNode,'WM_RightHemisphereFileName',DataSink,'WarpedAtlas2Subject.@RightHemisphereWM') if 'malf_2015_wholebrain' in master_config['components']: ## HACK Do MALF labeling ## HACK FOR NOW SHOULD BE MORE ELEGANT FROM THE .config file BASE_DATA_GRABBER_DIR='/Shared/johnsonhj/HDNI/ReferenceData/Neuromorphometrics/2012Subscription' if onlyT1: print("T1 only processing in baseline") else: print("Multimodal processing in baseline") myLocalMALF = CreateMALFWorkflow("MALF", onlyT1, master_config,BASE_DATA_GRABBER_DIR) baw201.connect(myLocalTCWF,'outputspec.t1_average',myLocalMALF,'inputspec.subj_t1_image') baw201.connect(myLocalTCWF,'outputspec.t2_average',myLocalMALF,'inputspec.subj_t2_image') baw201.connect(myLocalBrainStemWF, 'outputspec.ouputTissuelLabelFilename',myLocalMALF,'inputspec.subj_fixed_head_labels') baw201.connect(BResample['template_leftHemisphere'],'outputVolume',myLocalMALF,'inputspec.subj_left_hemisphere') baw201.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace' ,myLocalMALF,'inputspec.subj_lmks') baw201.connect(atlasBCDNode_S,'template_weights_50Lmks_wts',myLocalMALF,'inputspec.atlasWeightFilename') inputLabelFileMALFnameSpec = pe.Node( interface=IdentityInterface( fields=['labelBaseFilename']), run_without_submitting = True, name="inputLabelFileMALFnameSpec") baw201.connect( inputLabelFileMALFnameSpec, 'labelBaseFilename', myLocalMALF, 'inputspec.labelBaseFilename') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_CSFVBInjected_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_CSFVBInjected_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_fs_standard_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_fs_standard_label') baw201.connect(myLocalMALF,'outputspec.MALF_HDAtlas20_2015_lobar_label',DataSink,'TissueClassify.@MALF_HDAtlas20_2015_lobar_label') baw201.connect(myLocalMALF,'outputspec.MALF_extended_snapshot',DataSink,'TissueClassify.@MALF_extended_snapshot') return baw201
def CreateLandmarkInitializeWorkflow(WFname, master_config, InterpolationMode, PostACPCAlignToAtlas, DoReverseInit, useEMSP=False, Debug=False): CLUSTER_QUEUE=master_config['queue'] CLUSTER_QUEUE_LONG=master_config['long_q'] landmarkInitializeWF = pe.Workflow(name=WFname) ############# inputsSpec = pe.Node(interface=IdentityInterface(fields=['inputVolume', 'atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'atlasVolume', 'EMSP']), run_without_submitting=True, name='inputspec') ############# outputsSpec = pe.Node(interface=IdentityInterface(fields=['outputLandmarksInACPCAlignedSpace', 'outputResampledVolume', 'outputResampledCroppedVolume', 'outputLandmarksInInputSpace', 'writeBranded2DImage', 'outputTransform', 'outputMRML', 'atlasToSubjectTransform' ]), run_without_submitting=True, name='outputspec') ########################################################/ # Run ACPC Detect on first T1 Image - Base Image ######################################################## BCD = pe.Node(interface=BRAINSConstellationDetector(), name="BCD") many_cpu_BCD_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} BCD.plugin_args = many_cpu_BCD_options_dictionary ## Use program default BCD.inputs.inputTemplateModel = T1ACPCModelFile # BCD.inputs.outputVolume = "BCD_OUT" + "_ACPC_InPlace.nii.gz" #$# T1AcpcImageList BCD.inputs.outputTransform = "BCD" + "_Original2ACPC_transform.h5" BCD.inputs.outputResampledVolume = "BCD" + "_ACPC.nii.gz" BCD.inputs.outputLandmarksInInputSpace = "BCD" + "_Original.fcsv" BCD.inputs.outputLandmarksInACPCAlignedSpace = "BCD" + "_ACPC_Landmarks.fcsv" BCD.inputs.writeBranded2DImage = "BCD"+"_Branded2DQCimage.png" # BCD.inputs.outputMRML = "BCD" + "_Scene.mrml" BCD.inputs.interpolationMode = InterpolationMode BCD.inputs.houghEyeDetectorMode = 1 # Look for dark eyes like on a T1 image, 0=Look for bright eyes like in a T2 image BCD.inputs.acLowerBound = 80.0 # Chop the data set 80mm below the AC PC point. # Entries below are of the form: landmarkInitializeWF.connect(inputsSpec, 'inputVolume', BCD, 'inputVolume') landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BCD, 'atlasLandmarkWeights') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename',BCD, 'atlasLandmarks') landmarkInitializeWF.connect(inputsSpec, 'LLSModel', BCD, 'LLSModel') landmarkInitializeWF.connect(inputsSpec, 'inputTemplateModel', BCD, 'inputTemplateModel') # If EMSP, pre-selected landmarks are given, force to use. if useEMSP: print("*** Use pre-selected landmark file for Landmark Detection") landmarkInitializeWF.connect(inputsSpec, 'EMSP', BCD, 'inputLandmarksEMSP') # If the atlas volume is from this subject (i.e. after template building for the longitudinal phase) then set this to True # Otherwise, it is probably best to let the ACPC alignment be fully defined by the landmark points themselves. if PostACPCAlignToAtlas: landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', BCD, 'atlasVolume') ######################################################## # Run BLI atlas_to_subject ######################################################## BLI = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI") BLI.inputs.outputTransformFilename = "landmarkInitializer_atlas_to_subject_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI, 'inputMovingLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', BLI, 'inputFixedLandmarkFilename') ## This is for debugging purposes, and it is not intended for general use. if DoReverseInit == True: ######################################################## # Run BLI subject_to_atlas ######################################################## BLI2Atlas = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI2Atlas") BLI2Atlas.inputs.outputTransformFilename = "landmarkInitializer_subject_to_atlas_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI2Atlas, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI2Atlas, 'inputFixedLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', BLI2Atlas, 'inputMovingLandmarkFilename') Resample2Atlas = pe.Node(interface=BRAINSResample(), name="Resample2Atlas") Resample2Atlas.inputs.interpolationMode = "Linear" Resample2Atlas.inputs.outputVolume = "subject2atlas.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'inputVolume', Resample2Atlas, 'inputVolume') landmarkInitializeWF.connect(BLI2Atlas, 'outputTransformFilename', Resample2Atlas, 'warpTransform') if (DoReverseInit == True) and (Debug == True): ResampleFromAtlas = pe.Node(interface=BRAINSResample(), name="ResampleFromAtlas") ResampleFromAtlas.inputs.interpolationMode = "Linear" ResampleFromAtlas.inputs.outputVolume = "atlas2subject.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', ResampleFromAtlas, 'inputVolume') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', ResampleFromAtlas, 'warpTransform') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', ResampleFromAtlas, 'referenceVolume') BROIAUTO = pe.Node(interface=BRAINSROIAuto(), name="BROIAuto_cropped") many_cpu_BROIAUTO_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG,4,2,4), 'overwrite': True} BROIAUTO.plugin_args = many_cpu_BROIAUTO_options_dictionary BROIAUTO.inputs.outputVolume = "Cropped_BCD_ACPC_Aligned.nii.gz" BROIAUTO.inputs.ROIAutoDilateSize = 10 BROIAUTO.inputs.cropOutput = True landmarkInitializeWF.connect(BCD, 'outputResampledVolume', BROIAUTO, 'inputVolume') landmarkInitializeWF.connect(BROIAUTO, 'outputVolume', outputsSpec, 'outputResampledCroppedVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', outputsSpec, 'outputLandmarksInACPCAlignedSpace') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', outputsSpec, 'outputResampledVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', outputsSpec, 'outputLandmarksInInputSpace') landmarkInitializeWF.connect(BCD, 'outputTransform', outputsSpec, 'outputTransform') landmarkInitializeWF.connect(BCD, 'outputMRML', outputsSpec, 'outputMRML') landmarkInitializeWF.connect(BCD, 'writeBranded2DImage', outputsSpec, 'writeBranded2DImage') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', outputsSpec, 'atlasToSubjectTransform') return landmarkInitializeWF
def CreateJointFusionWorkflow(WFname, onlyT1, master_config, runFixFusionLabelMap=True): """ This function... :param WFname: :param onlyT1: :param master_config: :param runFixFusionLabelMap: True :return: JointFusionWF """ from nipype.interfaces import ants if onlyT1: n_modality = 1 else: n_modality = 2 CLUSTER_QUEUE = master_config['queue'] CLUSTER_QUEUE_LONG = master_config['long_q'] JointFusionWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_t1_image', # Desired image to create label map for 'subj_t2_image', # Desired image to create label map for 'subj_lmks', # The landmarks corresponding to t1_image 'subj_fixed_head_labels', # The fixed head labels from BABC 'subj_posteriors', # The BABC posteriors 'subj_left_hemisphere', # The warped left hemisphere mask 'atlasWeightFilename', # The static weights file name 'labelBaseFilename' # Atlas label base name ex) neuro_lbls.nii.gz ]), run_without_submitting=True, name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['JointFusion_HDAtlas20_2015_label', 'JointFusion_HDAtlas20_2015_CSFVBInjected_label', 'JointFusion_HDAtlas20_2015_fs_standard_label', 'JointFusion_HDAtlas20_2015_lobe_label', 'JointFusion_extended_snapshot', 'JointFusion_HDAtlas20_2015_dustCleaned_label', 'JointFusion_volumes_csv', 'JointFusion_volumes_json', 'JointFusion_lobe_volumes_csv', 'JointFusion_lobe_volumes_json']), run_without_submitting=True, name='outputspec') from collections import OrderedDict # Need OrderedDict internally to ensure consistent ordering BLICreator = OrderedDict() A2SantsRegistrationPreJointFusion_SyN = OrderedDict() movingROIAuto = OrderedDict() labelMapResample = OrderedDict() NewlabelMapResample = OrderedDict() jointFusion_atlas_mergeindex = 0 merge_input_offset = 1 # Merge nodes are indexed from 1, not zero! """ multimodal ants registration if t2 exists """ sessionMakeMultimodalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeMultimodalInput") sessionMakeMultimodalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeMultimodalInput, 'inFN1') """ T2 resample to T1 average image :: BRAINSABC changed its behavior to retain image's original spacing & origin :: Since antsJointFusion only works for the identical origin images for targets, :: Resampling is placed at this stage """ subjectT2Resample = pe.Node(interface=BRAINSResample(), name="BRAINSResample_T2_forAntsJointFusion") if not onlyT1: subjectT2Resample.plugin_args = {'qsub_args': modify_qsub_args(master_config['queue'], 1, 1, 1), 'overwrite': True} subjectT2Resample.inputs.pixelType = 'short' subjectT2Resample.inputs.interpolationMode = 'Linear' subjectT2Resample.inputs.outputVolume = "t2_resampled_in_t1.nii.gz" # subjectT2Resample.inputs.warpTransform= "Identity" # Default is "Identity" JointFusionWF.connect(inputsSpec, 'subj_t1_image', subjectT2Resample, 'referenceVolume') JointFusionWF.connect(inputsSpec, 'subj_t2_image', subjectT2Resample, 'inputVolume') JointFusionWF.connect(subjectT2Resample, 'outputVolume', sessionMakeMultimodalInput, 'inFN2') else: pass # print('jointFusion_atlas_db_base') print("master_config") print(master_config) print("master_config['jointfusion_atlas_db_base']") print((master_config['jointfusion_atlas_db_base'])) jointFusionAtlasDict = readMalfAtlasDbBase(master_config['jointfusion_atlas_db_base']) number_of_atlas_sources = len(jointFusionAtlasDict) jointFusionAtlases = OrderedDict() atlasMakeMultimodalInput = OrderedDict() t2Resample = OrderedDict() warpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources), name="LblMergeAtlas") NewwarpedAtlasLblMergeNode = pe.Node(interface=Merge(number_of_atlas_sources), name="fswmLblMergeAtlas") # "HACK NOT to use T2 for JointFusion only" # warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources*n_modality),name="MergeAtlases") warpedAtlasesMergeNode = pe.Node(interface=Merge(number_of_atlas_sources * 1), name="MergeAtlases") ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons UseRegistrationMasking = True if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto fixedROIAuto = pe.Node(interface=BRAINSROIAuto(), name="fixedROIAUTOMask") fixedROIAuto.inputs.ROIAutoDilateSize = 10 fixedROIAuto.inputs.outputROIMaskVolume = "fixedImageROIAutoMask.nii.gz" JointFusionWF.connect(inputsSpec, 'subj_t1_image', fixedROIAuto, 'inputVolume') for jointFusion_atlas_subject in list(jointFusionAtlasDict.keys()): ## Need DataGrabber Here For the Atlas jointFusionAtlases[jointFusion_atlas_subject] = pe.Node(interface=IdentityInterface( fields=['t1', 't2', 'label', 'lmks', 'registration_mask']), name='jointFusionAtlasInput' + jointFusion_atlas_subject) jointFusionAtlases[jointFusion_atlas_subject].inputs.t1 = jointFusionAtlasDict[jointFusion_atlas_subject]['t1'] jointFusionAtlases[jointFusion_atlas_subject].inputs.t2 = jointFusionAtlasDict[jointFusion_atlas_subject]['t2'] jointFusionAtlases[jointFusion_atlas_subject].inputs.label = jointFusionAtlasDict[jointFusion_atlas_subject][ 'label'] jointFusionAtlases[jointFusion_atlas_subject].inputs.lmks = jointFusionAtlasDict[jointFusion_atlas_subject][ 'lmks'] jointFusionAtlases[jointFusion_atlas_subject].inputs.registration_mask = \ jointFusionAtlasDict[jointFusion_atlas_subject]['registration_mask'] ## Create BLI first ######################################################## # Run BLI atlas_to_subject ######################################################## BLICreator[jointFusion_atlas_subject] = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI_" + jointFusion_atlas_subject) BLICreator[ jointFusion_atlas_subject].inputs.outputTransformFilename = "landmarkInitializer_{0}_to_subject_transform.h5".format( jointFusion_atlas_subject) JointFusionWF.connect(inputsSpec, 'atlasWeightFilename', BLICreator[jointFusion_atlas_subject], 'inputWeightFilename') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'lmks', BLICreator[jointFusion_atlas_subject], 'inputMovingLandmarkFilename') JointFusionWF.connect(inputsSpec, 'subj_lmks', BLICreator[jointFusion_atlas_subject], 'inputFixedLandmarkFilename') ##### Initialize with ANTS Transform For SyN currentAtlasToSubjectantsRegistration = 'SyN_AtlasToSubjectANTsPreJointFusion_' + jointFusion_atlas_subject A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject] = pe.Node(interface=ants.Registration(), name=currentAtlasToSubjectantsRegistration) many_cpu_ANTsSyN_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE_LONG, 4, 2, 16), 'overwrite': True} A2SantsRegistrationPreJointFusion_SyN[ jointFusion_atlas_subject].plugin_args = many_cpu_ANTsSyN_options_dictionary if onlyT1: JFregistrationTypeDescription = "FiveStageAntsRegistrationT1Only" else: JFregistrationTypeDescription = "FiveStageAntsRegistrationMultiModal" CommonANTsRegistrationSettings( antsRegistrationNode=A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], registrationTypeDescription=JFregistrationTypeDescription, output_transform_prefix=jointFusion_atlas_subject + '_ToSubjectPreJointFusion_SyN', output_warped_image=jointFusion_atlas_subject + '_2subject.nii.gz', output_inverse_warped_image=None, # NO NEED FOR THIS save_state=None, # NO NEED FOR THIS invert_initial_moving_transform=False, initial_moving_transform=None) ## if using Registration masking, then do ROIAuto on fixed and moving images and connect to registraitons if UseRegistrationMasking == True: from nipype.interfaces.semtools.segmentation.specialized import BRAINSROIAuto JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'fixed_image_masks') # JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', # A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'fixed_image_masks') # NOTE: Moving image mask can be taken from Atlas directly so that it does not need to be read in # movingROIAuto[jointFusion_atlas_subject] = pe.Node(interface=BRAINSROIAuto(), name="movingROIAUTOMask_"+jointFusion_atlas_subject) # movingROIAuto.inputs.ROIAutoDilateSize=10 # movingROIAuto[jointFusion_atlas_subject].inputs.outputROIMaskVolume = "movingImageROIAutoMask.nii.gz" # JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', movingROIAuto[jointFusion_atlas_subject],'inputVolume') # JointFusionWF.connect(movingROIAuto[jointFusion_atlas_subject], 'outputROIMaskVolume',A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'moving_image_masks') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'registration_mask', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'moving_image_masks') JointFusionWF.connect(BLICreator[jointFusion_atlas_subject], 'outputTransformFilename', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'initial_moving_transform') """ make multimodal input for atlases """ atlasMakeMultimodalInput[jointFusion_atlas_subject] = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="atlasMakeMultimodalInput" + jointFusion_atlas_subject) atlasMakeMultimodalInput[jointFusion_atlas_subject].inputs.jointFusion = False JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't1', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN1') if not onlyT1: JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't2', atlasMakeMultimodalInput[jointFusion_atlas_subject], 'inFN2') else: pass JointFusionWF.connect(sessionMakeMultimodalInput, 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'fixed_image') JointFusionWF.connect(atlasMakeMultimodalInput[jointFusion_atlas_subject], 'outFNs', A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'moving_image') "HACK NOT to use T2 for JointFusion" # JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject],'warped_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality) ) JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'warped_image', warpedAtlasesMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex * 1)) """ Original t2 resampling """ for modality_index in range(1, n_modality): t2Resample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="resampledT2" + jointFusion_atlas_subject) many_cpu_t2Resample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} t2Resample[jointFusion_atlas_subject].plugin_args = many_cpu_t2Resample_options_dictionary t2Resample[jointFusion_atlas_subject].inputs.num_threads = -1 t2Resample[jointFusion_atlas_subject].inputs.dimension = 3 t2Resample[jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + '_t2.nii.gz' t2Resample[jointFusion_atlas_subject].inputs.interpolation = 'BSpline' t2Resample[jointFusion_atlas_subject].inputs.default_value = 0 t2Resample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', t2Resample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', t2Resample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 't2', t2Resample[jointFusion_atlas_subject], 'input_image') "HACK NOT to use T2 for JointFusion only" # JointFusionWF.connect(t2Resample[jointFusion_atlas_subject],'output_image', # warpedAtlasesMergeNode,'in'+str(merge_input_offset + jointFusion_atlas_mergeindex*n_modality+modality_index) ) """ Original labelmap resampling """ labelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="resampledLabel" + jointFusion_atlas_subject) many_cpu_labelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} labelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_labelMapResample_options_dictionary labelMapResample[jointFusion_atlas_subject].inputs.num_threads = -1 labelMapResample[jointFusion_atlas_subject].inputs.dimension = 3 labelMapResample[ jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + '_2_subj_lbl.nii.gz' labelMapResample[jointFusion_atlas_subject].inputs.interpolation = 'MultiLabel' labelMapResample[jointFusion_atlas_subject].inputs.default_value = 0 labelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', labelMapResample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', labelMapResample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'label', labelMapResample[jointFusion_atlas_subject], 'input_image') JointFusionWF.connect(labelMapResample[jointFusion_atlas_subject], 'output_image', warpedAtlasLblMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex)) ### New labelmap resampling NewlabelMapResample[jointFusion_atlas_subject] = pe.Node(interface=ants.ApplyTransforms(), name="FSWM_WLABEL_" + jointFusion_atlas_subject) many_cpu_NewlabelMapResample_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 1, 1, 1), 'overwrite': True} NewlabelMapResample[jointFusion_atlas_subject].plugin_args = many_cpu_NewlabelMapResample_options_dictionary NewlabelMapResample[jointFusion_atlas_subject].inputs.num_threads = -1 NewlabelMapResample[jointFusion_atlas_subject].inputs.dimension = 3 NewlabelMapResample[ jointFusion_atlas_subject].inputs.output_image = jointFusion_atlas_subject + 'fswm_2_subj_lbl.nii.gz' NewlabelMapResample[jointFusion_atlas_subject].inputs.interpolation = 'MultiLabel' NewlabelMapResample[jointFusion_atlas_subject].inputs.default_value = 0 NewlabelMapResample[jointFusion_atlas_subject].inputs.invert_transform_flags = [False] JointFusionWF.connect(A2SantsRegistrationPreJointFusion_SyN[jointFusion_atlas_subject], 'composite_transform', NewlabelMapResample[jointFusion_atlas_subject], 'transforms') JointFusionWF.connect(inputsSpec, 'subj_t1_image', NewlabelMapResample[jointFusion_atlas_subject], 'reference_image') JointFusionWF.connect(jointFusionAtlases[jointFusion_atlas_subject], 'label', NewlabelMapResample[jointFusion_atlas_subject], 'input_image') JointFusionWF.connect(NewlabelMapResample[jointFusion_atlas_subject], 'output_image', NewwarpedAtlasLblMergeNode, 'in' + str(merge_input_offset + jointFusion_atlas_mergeindex)) jointFusion_atlas_mergeindex += 1 ## Now work on cleaning up the label maps from .FixLabelMapsTools import FixLabelMapFromNeuromorphemetrics2012 from .FixLabelMapsTools import RecodeLabelMap ### Original NeuroMorphometrica merged fusion jointFusion = pe.Node(interface=ants.AntsJointFusion(), name="AntsJointFusion") many_cpu_JointFusion_options_dictionary = {'qsub_args': modify_qsub_args(CLUSTER_QUEUE, 10, 8, 16), 'overwrite': True} jointFusion.plugin_args = many_cpu_JointFusion_options_dictionary jointFusion.inputs.num_threads = -1 jointFusion.inputs.dimension = 3 jointFusion.inputs.search_radius = [3] # jointFusion.inputs.method='Joint[0.1,2]' jointFusion.inputs.out_label_fusion = 'JointFusion_HDAtlas20_2015_label.nii.gz' # JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', jointFusion, 'mask_image') JointFusionWF.connect(fixedROIAuto, 'outputROIMaskVolume', jointFusion, 'mask_image') JointFusionWF.connect(warpedAtlasLblMergeNode, 'out', jointFusion, 'atlas_segmentation_image') AdjustMergeListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList', 'n_modality'], output_names=['out']), name="AdjustMergeListNode") "*** HACK JointFusion only uses T1" # AdjustMergeListNode.inputs.n_modality = n_modality AdjustMergeListNode.inputs.n_modality = 1 JointFusionWF.connect(warpedAtlasesMergeNode, 'out', AdjustMergeListNode, 'allList') JointFusionWF.connect(AdjustMergeListNode, 'out', jointFusion, 'atlas_image') AdjustTargetImageListNode = pe.Node(Function(function=adjustMergeList, input_names=['allList', 'n_modality'], output_names=['out']), name="AdjustTargetImageListNode") AdjustTargetImageListNode.inputs.n_modality = n_modality "*** HACK JointFusion only uses T1" """ Once JointFusion works with T2 properly, delete sessionMakeListSingleModalInput and use sessionMakeMultimodalInput instead """ sessionMakeListSingleModalInput = pe.Node(Function(function=MakeVector, input_names=['inFN1', 'inFN2', 'jointFusion'], output_names=['outFNs']), run_without_submitting=True, name="sessionMakeListSingleModalInput") sessionMakeListSingleModalInput.inputs.jointFusion = False JointFusionWF.connect(inputsSpec, 'subj_t1_image', sessionMakeListSingleModalInput, 'inFN1') JointFusionWF.connect(sessionMakeListSingleModalInput, 'outFNs', jointFusion, 'target_image') JointFusionWF.connect(jointFusion, 'out_label_fusion', outputsSpec, 'JointFusion_HDAtlas20_2015_label') ## We need to recode values to ensure that the labels match FreeSurer as close as possible by merging ## some labels together to standard FreeSurfer confenventions (i.e. for WMQL) RECODE_LABELS_2_Standard_FSWM = [ (15071, 47), (15072, 47), (15073, 47), (15145, 1011), (15157, 1011), (15161, 1011), (15179, 1012), (15141, 1014), (15151, 1017), (15163, 1018), (15165, 1019), (15143, 1027), (15191, 1028), (15193, 1028), (15185, 1030), (15201, 1030), (15175, 1031), (15195, 1031), (15173, 1035), (15144, 2011), (15156, 2011), (15160, 2011), (15178, 2012), (15140, 2014), (15150, 2017), (15162, 2018), (15164, 2019), (15142, 2027), (15190, 2028), (15192, 2028), (15184, 2030), (15174, 2031), (15194, 2031), (15172, 2035), (15200, 2030)] ## def RecodeLabelMap(InputFileName,OutputFileName,RECODE_TABLE): RecodeToStandardFSWM = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName', 'OutputFileName', 'RECODE_TABLE'], output_names=['OutputFileName']), name="RecodeToStandardFSWM") RecodeToStandardFSWM.inputs.RECODE_TABLE = RECODE_LABELS_2_Standard_FSWM RecodeToStandardFSWM.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_fs_standard_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM, 'OutputFileName', outputsSpec, 'JointFusion_HDAtlas20_2015_fs_standard_label') ## JointFusion_SNAPSHOT_WRITER for Segmented result checking: # JointFusion_SNAPSHOT_WRITERNodeName = "JointFusion_ExtendedJointFusion_SNAPSHOT_WRITER" # JointFusion_SNAPSHOT_WRITER = pe.Node(interface=BRAINSSnapShotWriter(), name=JointFusion_SNAPSHOT_WRITERNodeName) # JointFusion_SNAPSHOT_WRITER.inputs.outputFilename = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.png' # output specification # JointFusion_SNAPSHOT_WRITER.inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] # JointFusion_SNAPSHOT_WRITER.inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] # JointFusionWF.connect(JointFusion_SNAPSHOT_WRITER,'outputFilename',outputsSpec,'JointFusion_extended_snapshot') myLocalDustCleanup = CreateDustCleanupWorkflow("DUST_CLEANUP", onlyT1, master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', myLocalDustCleanup, 'inputspec.subj_t1_image') if not onlyT1: JointFusionWF.connect(subjectT2Resample, 'outputVolume', myLocalDustCleanup, 'inputspec.subj_t2_image') if runFixFusionLabelMap: ## post processing of jointfusion injectSurfaceCSFandVBIntoLabelMap = pe.Node(Function(function=FixLabelMapFromNeuromorphemetrics2012, input_names=['fusionFN', 'FixedHeadFN', 'posteriorListOfTuples', 'LeftHemisphereFN', 'outFN', 'OUT_DICT'], output_names=['fixedFusionLabelFN']), name="injectSurfaceCSFandVBIntoLabelMap") injectSurfaceCSFandVBIntoLabelMap.inputs.outFN = 'JointFusion_HDAtlas20_2015_CSFVBInjected_label.nii.gz' from collections import OrderedDict # Need OrderedDict internally to ensure consistent ordering FREESURFER_DICT = OrderedDict({'BRAINSTEM': 16, 'RH_CSF': 24, 'LH_CSF': 24, 'BLOOD': 15000, 'UNKNOWN': 999, 'CONNECTED': [11, 12, 13, 9, 17, 26, 50, 51, 52, 48, 53, 58] }) injectSurfaceCSFandVBIntoLabelMap.inputs.OUT_DICT = FREESURFER_DICT JointFusionWF.connect(jointFusion, 'out_label_fusion', injectSurfaceCSFandVBIntoLabelMap, 'fusionFN') JointFusionWF.connect(inputsSpec, 'subj_fixed_head_labels', injectSurfaceCSFandVBIntoLabelMap, 'FixedHeadFN') JointFusionWF.connect(inputsSpec, 'subj_posteriors', injectSurfaceCSFandVBIntoLabelMap, 'posteriorListOfTuples') JointFusionWF.connect(inputsSpec, 'subj_left_hemisphere', injectSurfaceCSFandVBIntoLabelMap, 'LeftHemisphereFN') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(injectSurfaceCSFandVBIntoLabelMap, 'fixedFusionLabelFN', outputsSpec, 'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM, 'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (injectSurfaceCSFandVBIntoLabelMap, JointFusion_SNAPSHOT_WRITER, # [('fixedFusionLabelFN', 'inputBinaryVolumes')]) # ]) else: JointFusionWF.connect(jointFusion, 'output_label_image', myLocalDustCleanup, 'inputspec.subj_label_atlas') JointFusionWF.connect(jointFusion, 'output_label_image', outputsSpec, 'JointFusion_HDAtlas20_2015_CSFVBInjected_label') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', RecodeToStandardFSWM, 'InputFileName') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', outputsSpec, 'JointFusion_HDAtlas20_2015_dustCleaned_label') # JointFusionWF.connect([(inputsSpec, JointFusion_SNAPSHOT_WRITER, [( 'subj_t1_image','inputVolumes')]), # (jointFusion, JointFusion_SNAPSHOT_WRITER, # [('output_label_image', 'inputBinaryVolumes')]) # ]) """ Compute label volumes """ computeLabelVolumes = CreateVolumeMeasureWorkflow("LabelVolume", master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', computeLabelVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect(myLocalDustCleanup, 'outputspec.JointFusion_HDAtlas20_2015_dustCleaned_label', computeLabelVolumes, 'inputspec.subj_label_image') JointFusionWF.connect(computeLabelVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_volumes_csv') JointFusionWF.connect(computeLabelVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_volumes_json') ## Lobe Pacellation by recoding if master_config['relabel2lobes_filename'] != None: # print("Generate relabeled version based on {0}".format(master_config['relabel2lobes_filename'])) RECODE_LABELS_2_LobePacellation = readRecodingList(master_config['relabel2lobes_filename']) RecordToFSLobes = pe.Node(Function(function=RecodeLabelMap, input_names=['InputFileName', 'OutputFileName', 'RECODE_TABLE'], output_names=['OutputFileName']), name="RecordToFSLobes") RecordToFSLobes.inputs.RECODE_TABLE = RECODE_LABELS_2_LobePacellation RecordToFSLobes.inputs.OutputFileName = 'JointFusion_HDAtlas20_2015_lobe_label.nii.gz' JointFusionWF.connect(RecodeToStandardFSWM, 'OutputFileName', RecordToFSLobes, 'InputFileName') JointFusionWF.connect(RecordToFSLobes, 'OutputFileName', outputsSpec, 'JointFusion_HDAtlas20_2015_lobe_label') """ Compute lobe volumes """ computeLobeVolumes = CreateVolumeMeasureWorkflow("LobeVolume", master_config) JointFusionWF.connect(inputsSpec, 'subj_t1_image', computeLobeVolumes, 'inputspec.subj_t1_image') JointFusionWF.connect(RecordToFSLobes, 'OutputFileName', computeLobeVolumes, 'inputspec.subj_label_image') JointFusionWF.connect(computeLobeVolumes, 'outputspec.csvFilename', outputsSpec, 'JointFusion_lobe_volumes_csv') JointFusionWF.connect(computeLobeVolumes, 'outputspec.jsonFilename', outputsSpec, 'JointFusion_lobe_volumes_json') return JointFusionWF
def CreateLandmarkInitializeWorkflow(WFname, InterpolationMode, DoReverseInit=False, debug=False): landmarkInitializeWF = pe.Workflow(name=WFname) ############# inputsSpec = pe.Node(interface=IdentityInterface(fields=[ 'inputVolume', 'atlasLandmarkFilename', 'atlasWeightFilename', 'LLSModel', 'inputTemplateModel', 'atlasVolume' ]), run_without_submitting=True, name='inputspec') ############# outputsSpec = pe.Node(interface=IdentityInterface(fields=[ 'outputLandmarksInACPCAlignedSpace', 'outputResampledVolume', 'outputResampledCroppedVolume', 'outputLandmarksInInputSpace', 'writeBranded2DImage', 'outputTransform', 'outputMRML', 'atlasToSubjectTransform' ]), run_without_submitting=True, name='outputspec') ########################################################/ # Run ACPC Detect on first T1 Image - Base Image ######################################################## BCD = pe.Node(interface=BRAINSConstellationDetector(), name="BCD") ## Use program default BCD.inputs.inputTemplateModel = T1ACPCModelFile # BCD.inputs.outputVolume = "BCD_OUT" + "_ACPC_InPlace.nii.gz" #$# T1AcpcImageList BCD.inputs.outputTransform = "BCD" + "_Original2ACPC_transform.h5" BCD.inputs.outputResampledVolume = "BCD" + "_ACPC.nii.gz" BCD.inputs.outputLandmarksInInputSpace = "BCD" + "_Original.fcsv" BCD.inputs.outputLandmarksInACPCAlignedSpace = "BCD" + "_ACPC_Landmarks.fcsv" BCD.inputs.writeBranded2DImage = "BCD" + "_Branded2DQCimage.png" # BCD.inputs.outputMRML = "BCD" + "_Scene.mrml" BCD.inputs.interpolationMode = InterpolationMode BCD.inputs.houghEyeDetectorMode = 1 # Look for dark eyes like on a T1 image, 0=Look for bright eyes like in a T2 image BCD.inputs.acLowerBound = 80.0 # Chop the data set 80mm below the AC PC point. # Entries below are of the form: landmarkInitializeWF.connect(inputsSpec, 'inputVolume', BCD, 'inputVolume') landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BCD, 'atlasLandmarkWeights') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BCD, 'atlasLandmarks') landmarkInitializeWF.connect(inputsSpec, 'LLSModel', BCD, 'LLSModel') landmarkInitializeWF.connect(inputsSpec, 'inputTemplateModel', BCD, 'inputTemplateModel') landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', BCD, 'atlasVolume') ######################################################## # Run BLI atlas_to_subject ######################################################## BLI = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI") BLI.inputs.outputTransformFilename = "landmarkInitializer_atlas_to_subject_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI, 'inputMovingLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', BLI, 'inputFixedLandmarkFilename') ## This is for debugging purposes, and it is not intended for general use. if DoReverseInit == True: ######################################################## # Run BLI subject_to_atlas ######################################################## BLI2Atlas = pe.Node(interface=BRAINSLandmarkInitializer(), name="BLI2Atlas") BLI2Atlas.inputs.outputTransformFilename = "landmarkInitializer_subject_to_atlas_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI2Atlas, 'inputWeightFilename') landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI2Atlas, 'inputFixedLandmarkFilename') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', BLI2Atlas, 'inputMovingLandmarkFilename') Resample2Atlas = pe.Node(interface=BRAINSResample(), name="Resample2Atlas") Resample2Atlas.inputs.interpolationMode = "Linear" Resample2Atlas.inputs.outputVolume = "subject2atlas.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'inputVolume', Resample2Atlas, 'inputVolume') landmarkInitializeWF.connect(BLI2Atlas, 'outputTransformFilename', Resample2Atlas, 'warpTransform') landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', Resample2Atlas, 'referenceVolume') if (DoReverseInit == True) and (debug == True): ResampleFromAtlas = pe.Node(interface=BRAINSResample(), name="ResampleFromAtlas") ResampleFromAtlas.inputs.interpolationMode = "Linear" ResampleFromAtlas.inputs.outputVolume = "atlas2subject.nii.gz" landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', ResampleFromAtlas, 'inputVolume') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', ResampleFromAtlas, 'warpTransform') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', ResampleFromAtlas, 'referenceVolume') BROIAUTO = pe.Node(interface=BRAINSROIAuto(), name="BROIAuto_cropped") BROIAUTO.inputs.outputVolume = "Cropped_BCD_ACPC_Aligned.nii.gz" BROIAUTO.inputs.ROIAutoDilateSize = 10 BROIAUTO.inputs.cropOutput = True landmarkInitializeWF.connect(BCD, 'outputResampledVolume', BROIAUTO, 'inputVolume') landmarkInitializeWF.connect(BROIAUTO, 'outputVolume', outputsSpec, 'outputResampledCroppedVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', outputsSpec, 'outputLandmarksInACPCAlignedSpace') landmarkInitializeWF.connect(BCD, 'outputResampledVolume', outputsSpec, 'outputResampledVolume') landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', outputsSpec, 'outputLandmarksInInputSpace') landmarkInitializeWF.connect(BCD, 'outputTransform', outputsSpec, 'outputTransform') landmarkInitializeWF.connect(BCD, 'outputMRML', outputsSpec, 'outputMRML') landmarkInitializeWF.connect(BCD, 'writeBranded2DImage', outputsSpec, 'writeBranded2DImage') landmarkInitializeWF.connect(BLI, 'outputTransformFilename', outputsSpec, 'atlasToSubjectTransform') return landmarkInitializeWF