def _fugue_pipeline(self, **name_maps): pipeline = self.new_pipeline( name='preprocess_pipeline', desc=("Fugue distortion correction pipeline"), citations=[fsl_cite], name_maps=name_maps) reorient_epi_in = pipeline.add( 'reorient_epi_in', fsl.utils.Reorient2Std(output_type='NIFTI_GZ'), inputs={'in_file': ('series', nifti_gz_format)}, requirements=[fsl_req.v('5.0.9')]) fm_mag_reorient = pipeline.add( 'reorient_fm_mag', fsl.utils.Reorient2Std(output_type='NIFTI_GZ'), inputs={'in_file': ('field_map_mag', nifti_gz_format)}, requirements=[fsl_req.v('5.0.9')]) fm_phase_reorient = pipeline.add( 'reorient_fm_phase', fsl.utils.Reorient2Std(output_type='NIFTI_GZ'), inputs={'in_file': ('field_map_phase', nifti_gz_format)}, requirements=[fsl_req.v('5.0.9')]) bet = pipeline.add("bet", BET(robust=True, output_type='NIFTI_GZ'), inputs={'in_file': (fm_mag_reorient, 'out_file')}, wall_time=5, requirements=[fsl_req.v('5.0.9')]) create_fmap = pipeline.add( "prepfmap", PrepareFieldmap( # delta_TE=2.46 ), inputs={ 'delta_TE': ('field_map_delta_te', float), "in_magnitude": (bet, "out_file"), 'in_phase': (fm_phase_reorient, 'out_file') }, wall_time=5, requirements=[fsl_req.v('5.0.9')]) pipeline.add( 'fugue', FUGUE(unwarp_direction='x', dwell_time=self.parameter('fugue_echo_spacing'), unwarped_file='example_func.nii.gz', output_type='NIFTI_GZ'), inputs={ 'fmap_in_file': (create_fmap, 'out_fieldmap'), 'in_file': (reorient_epi_in, 'out_file') }, outputs={'series_preproc': ('unwarped_file', nifti_gz_format)}, wall_time=5, requirements=[fsl_req.v('5.0.9')]) return pipeline
def _fugue_pipeline(self, **kwargs): pipeline = self.create_pipeline( name='preproc_pipeline', inputs=[ DatasetSpec('primary', nifti_gz_format), DatasetSpec('field_map_mag', nifti_gz_format), DatasetSpec('field_map_phase', nifti_gz_format), FieldSpec('field_map_delta_te', float) ], outputs=[DatasetSpec('preproc', nifti_gz_format)], desc=("Fugue distortion correction pipeline"), version=1, citations=[fsl_cite], **kwargs) reorient_epi_in = pipeline.create_node(fsl.utils.Reorient2Std(), name='reorient_epi_in', requirements=[fsl509_req]) pipeline.connect_input('primary', reorient_epi_in, 'in_file') fm_mag_reorient = pipeline.create_node(fsl.utils.Reorient2Std(), name='reorient_fm_mag', requirements=[fsl509_req]) pipeline.connect_input('field_map_mag', fm_mag_reorient, 'in_file') fm_phase_reorient = pipeline.create_node(fsl.utils.Reorient2Std(), name='reorient_fm_phase', requirements=[fsl509_req]) pipeline.connect_input('field_map_phase', fm_phase_reorient, 'in_file') bet = pipeline.create_node(BET(), name="bet", wall_time=5, requirements=[fsl509_req]) bet.inputs.robust = True pipeline.connect(fm_mag_reorient, 'out_file', bet, 'in_file') create_fmap = pipeline.create_node(PrepareFieldmap(), name="prepfmap", wall_time=5, requirements=[fsl509_req]) # create_fmap.inputs.delta_TE = 2.46 pipeline.connect_input('field_map_delta_te', create_fmap, 'delta_TE') pipeline.connect(bet, "out_file", create_fmap, "in_magnitude") pipeline.connect(fm_phase_reorient, 'out_file', create_fmap, 'in_phase') fugue = pipeline.create_node(FUGUE(), name='fugue', wall_time=5, requirements=[fsl509_req]) fugue.inputs.unwarp_direction = 'x' fugue.inputs.dwell_time = self.parameter('fugue_echo_spacing') fugue.inputs.unwarped_file = 'example_func.nii.gz' pipeline.connect(create_fmap, 'out_fieldmap', fugue, 'fmap_in_file') pipeline.connect(reorient_epi_in, 'out_file', fugue, 'in_file') pipeline.connect_output('preproc', fugue, 'unwarped_file') return pipeline
def quality_check_image_similarity(caps_directory, tsv, ref_template, working_directory=None): """ This is a function to do a raw quality check for the preprocessed image. To mention, the preprocessing pipeline of DL includes: 1) N4 bias correction (Ants) 2) linear registration to MNI (MNI icbm152 nlinear sym template) (ANTS) 3) cropping the background to save the computational power To note, we use mutual information to quantify the similarity between the target and template MRI. Args: Returns: A tsv with an order based on the intensity differences between each individual and the template. """ import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe import tempfile from nipype.algorithms.metrics import Similarity from nipype.interfaces.fsl.preprocess import BET from T1_preprocessing_utils import get_caps_list, rank_mean if working_directory is None: working_directory = tempfile.mkdtemp() inputnode = npe.Node(nutil.IdentityInterface( fields=['caps_directory', 'tsv', 'ref_template']), name='inputnode') inputnode.inputs.caps_directory = caps_directory inputnode.inputs.tsv = tsv inputnode.inputs.ref_template = ref_template get_caps_list = npe.Node(name='get_caps_list', interface=nutil.Function( function=get_caps_list, input_names=['caps_directory', 'tsv'], output_names=['caps_intensity_nor_list'])) # get the mask of the template bet_ref_img = npe.Node(name='bet_ref_img', interface=BET()) bet_ref_img.inputs.frac = 0.5 bet_ref_img.inputs.mask = True bet_ref_img.inputs.robust = True img_similarity = npe.MapNode(name='img_similarity', iterfield=['volume1'], interface=Similarity()) img_similarity.inputs.metric = 'cc' # rand the mean intensity rank_mean = npe.Node(name='rank_mean', interface=nutil.Function(function=rank_mean, input_names=[ 'similarity', 'tsv', 'caps_directory' ], output_names=['result_tsv'])) wf = npe.Workflow(name='quality_check_dl') wf.base_dir = working_directory wf.connect([(inputnode, get_caps_list, [('caps_directory', 'caps_directory')]), (inputnode, get_caps_list, [('tsv', 'tsv')]), (inputnode, bet_ref_img, [('ref_template', 'in_file')]), (get_caps_list, img_similarity, [('caps_intensity_nor_list', 'volume1')]), (inputnode, img_similarity, [('ref_template', 'volume2')]), (bet_ref_img, img_similarity, [('mask_file', 'mask1')]), (bet_ref_img, img_similarity, [('mask_file', 'mask2')]), (inputnode, rank_mean, [('tsv', 'tsv')]), (inputnode, rank_mean, [('caps_directory', 'caps_directory')]), (img_similarity, rank_mean, [('similarity', 'similarity')])]) return wf
def Lesion_extractor( name='Lesion_Extractor', wf_name='Test', base_dir='/homes_unix/alaurent/', input_dir=None, subjects=None, main=None, acc=None, atlas='/homes_unix/alaurent/cbstools-public-master/atlases/brain-segmentation-prior3.0/brain-atlas-quant-3.0.8.txt' ): wf = Workflow(wf_name) wf.base_dir = base_dir #file = open(subjects,"r") #subjects = file.read().split("\n") #file.close() # Subject List subjectList = Node(IdentityInterface(fields=['subject_id'], mandatory_inputs=True), name="subList") subjectList.iterables = ('subject_id', [ sub for sub in subjects if sub != '' and sub != '\n' ]) # T1w and FLAIR scanList = Node(DataGrabber(infields=['subject_id'], outfields=['T1', 'FLAIR']), name="scanList") scanList.inputs.base_directory = input_dir scanList.inputs.ignore_exception = False scanList.inputs.raise_on_empty = True scanList.inputs.sort_filelist = True #scanList.inputs.template = '%s/%s.nii' #scanList.inputs.template_args = {'T1': [['subject_id','T1*']], # 'FLAIR': [['subject_id','FLAIR*']]} scanList.inputs.template = '%s/anat/%s' scanList.inputs.template_args = { 'T1': [['subject_id', '*_T1w.nii.gz']], 'FLAIR': [['subject_id', '*_FLAIR.nii.gz']] } wf.connect(subjectList, "subject_id", scanList, "subject_id") # # T1w and FLAIR # dg = Node(DataGrabber(outfields=['T1', 'FLAIR']), name="T1wFLAIR") # dg.inputs.base_directory = "/homes_unix/alaurent/LesionPipeline" # dg.inputs.template = "%s/NIFTI/*.nii.gz" # dg.inputs.template_args['T1']=[['7']] # dg.inputs.template_args['FLAIR']=[['9']] # dg.inputs.sort_filelist=True # Reorient Volume T1Conv = Node(Reorient2Std(), name="ReorientVolume") T1Conv.inputs.ignore_exception = False T1Conv.inputs.terminal_output = 'none' T1Conv.inputs.out_file = "T1_reoriented.nii.gz" wf.connect(scanList, "T1", T1Conv, "in_file") # Reorient Volume (2) T2flairConv = Node(Reorient2Std(), name="ReorientVolume2") T2flairConv.inputs.ignore_exception = False T2flairConv.inputs.terminal_output = 'none' T2flairConv.inputs.out_file = "FLAIR_reoriented.nii.gz" wf.connect(scanList, "FLAIR", T2flairConv, "in_file") # N3 Correction T1NUC = Node(N4BiasFieldCorrection(), name="N3Correction") T1NUC.inputs.dimension = 3 T1NUC.inputs.environ = {'NSLOTS': '1'} T1NUC.inputs.ignore_exception = False T1NUC.inputs.num_threads = 1 T1NUC.inputs.save_bias = False T1NUC.inputs.terminal_output = 'none' wf.connect(T1Conv, "out_file", T1NUC, "input_image") # N3 Correction (2) T2flairNUC = Node(N4BiasFieldCorrection(), name="N3Correction2") T2flairNUC.inputs.dimension = 3 T2flairNUC.inputs.environ = {'NSLOTS': '1'} T2flairNUC.inputs.ignore_exception = False T2flairNUC.inputs.num_threads = 1 T2flairNUC.inputs.save_bias = False T2flairNUC.inputs.terminal_output = 'none' wf.connect(T2flairConv, "out_file", T2flairNUC, "input_image") ''' ##################### ### PRE-NORMALIZE ### ##################### To make sure there's no outlier values (negative, or really high) to offset the initialization steps ''' # Intensity Range Normalization getMaxT1NUC = Node(ImageStats(op_string='-r'), name="getMaxT1NUC") wf.connect(T1NUC, 'output_image', getMaxT1NUC, 'in_file') T1NUCirn = Node(AbcImageMaths(), name="IntensityNormalization") T1NUCirn.inputs.op_string = "-div" T1NUCirn.inputs.out_file = "normT1.nii.gz" wf.connect(T1NUC, 'output_image', T1NUCirn, 'in_file') wf.connect(getMaxT1NUC, ('out_stat', getElementFromList, 1), T1NUCirn, "op_value") # Intensity Range Normalization (2) getMaxT2NUC = Node(ImageStats(op_string='-r'), name="getMaxT2") wf.connect(T2flairNUC, 'output_image', getMaxT2NUC, 'in_file') T2NUCirn = Node(AbcImageMaths(), name="IntensityNormalization2") T2NUCirn.inputs.op_string = "-div" T2NUCirn.inputs.out_file = "normT2.nii.gz" wf.connect(T2flairNUC, 'output_image', T2NUCirn, 'in_file') wf.connect(getMaxT2NUC, ('out_stat', getElementFromList, 1), T2NUCirn, "op_value") ''' ######################## #### COREGISTRATION #### ######################## ''' # Optimized Automated Registration T2flairCoreg = Node(FLIRT(), name="OptimizedAutomatedRegistration") T2flairCoreg.inputs.output_type = 'NIFTI_GZ' wf.connect(T2NUCirn, "out_file", T2flairCoreg, "in_file") wf.connect(T1NUCirn, "out_file", T2flairCoreg, "reference") ''' ######################### #### SKULL-STRIPPING #### ######################### ''' # SPECTRE T1ss = Node(BET(), name="SPECTRE") T1ss.inputs.frac = 0.45 #0.4 T1ss.inputs.mask = True T1ss.inputs.outline = True T1ss.inputs.robust = True wf.connect(T1NUCirn, "out_file", T1ss, "in_file") # Image Calculator T2ss = Node(ApplyMask(), name="ImageCalculator") wf.connect(T1ss, "mask_file", T2ss, "mask_file") wf.connect(T2flairCoreg, "out_file", T2ss, "in_file") ''' #################################### #### 2nd LAYER OF N3 CORRECTION #### #################################### This time without the skull: there were some significant amounts of inhomogeneities leftover. ''' # N3 Correction (3) T1ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction3") T1ssNUC.inputs.dimension = 3 T1ssNUC.inputs.environ = {'NSLOTS': '1'} T1ssNUC.inputs.ignore_exception = False T1ssNUC.inputs.num_threads = 1 T1ssNUC.inputs.save_bias = False T1ssNUC.inputs.terminal_output = 'none' wf.connect(T1ss, "out_file", T1ssNUC, "input_image") # N3 Correction (4) T2ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction4") T2ssNUC.inputs.dimension = 3 T2ssNUC.inputs.environ = {'NSLOTS': '1'} T2ssNUC.inputs.ignore_exception = False T2ssNUC.inputs.num_threads = 1 T2ssNUC.inputs.save_bias = False T2ssNUC.inputs.terminal_output = 'none' wf.connect(T2ss, "out_file", T2ssNUC, "input_image") ''' #################################### #### NORMALIZE FOR MGDM #### #################################### This normalization is a bit aggressive: only useful to have a cropped dynamic range into MGDM, but possibly harmful to further processing, so the unprocessed images are passed to the subsequent steps. ''' # Intensity Range Normalization getMaxT1ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT1ssNUC") wf.connect(T1ssNUC, 'output_image', getMaxT1ssNUC, 'in_file') T1ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization3") T1ssNUCirn.inputs.op_string = "-div" T1ssNUCirn.inputs.out_file = "normT1ss.nii.gz" wf.connect(T1ssNUC, 'output_image', T1ssNUCirn, 'in_file') wf.connect(getMaxT1ssNUC, ('out_stat', getElementFromList, 1), T1ssNUCirn, "op_value") # Intensity Range Normalization (2) getMaxT2ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT2ssNUC") wf.connect(T2ssNUC, 'output_image', getMaxT2ssNUC, 'in_file') T2ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization4") T2ssNUCirn.inputs.op_string = "-div" T2ssNUCirn.inputs.out_file = "normT2ss.nii.gz" wf.connect(T2ssNUC, 'output_image', T2ssNUCirn, 'in_file') wf.connect(getMaxT2ssNUC, ('out_stat', getElementFromList, 1), T2ssNUCirn, "op_value") ''' #################################### #### ESTIMATE CSF PV #### #################################### Here we try to get a better handle on CSF voxels to help the segmentation step ''' # Recursive Ridge Diffusion CSF_pv = Node(RecursiveRidgeDiffusion(), name='estimate_CSF_pv') CSF_pv.plugin_args = {'sbatch_args': '--mem 6000'} CSF_pv.inputs.ridge_intensities = "dark" CSF_pv.inputs.ridge_filter = "2D" CSF_pv.inputs.orientation = "undefined" CSF_pv.inputs.ang_factor = 1.0 CSF_pv.inputs.min_scale = 0 CSF_pv.inputs.max_scale = 3 CSF_pv.inputs.propagation_model = "diffusion" CSF_pv.inputs.diffusion_factor = 0.5 CSF_pv.inputs.similarity_scale = 0.1 CSF_pv.inputs.neighborhood_size = 4 CSF_pv.inputs.max_iter = 100 CSF_pv.inputs.max_diff = 0.001 CSF_pv.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, CSF_pv.name), CSF_pv, 'output_dir') wf.connect(T1ssNUCirn, 'out_file', CSF_pv, 'input_image') ''' #################################### #### MGDM #### #################################### ''' # Multi-contrast Brain Segmentation MGDM = Node(MGDMSegmentation(), name='MGDM') MGDM.plugin_args = {'sbatch_args': '--mem 7000'} MGDM.inputs.contrast_type1 = "Mprage3T" MGDM.inputs.contrast_type2 = "FLAIR3T" MGDM.inputs.contrast_type3 = "PVDURA" MGDM.inputs.save_data = True MGDM.inputs.atlas_file = atlas wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, MGDM.name), MGDM, 'output_dir') wf.connect(T1ssNUCirn, 'out_file', MGDM, 'contrast_image1') wf.connect(T2ssNUCirn, 'out_file', MGDM, 'contrast_image2') wf.connect(CSF_pv, 'ridge_pv', MGDM, 'contrast_image3') # Enhance Region Contrast ERC = Node(EnhanceRegionContrast(), name='ERC') ERC.plugin_args = {'sbatch_args': '--mem 7000'} ERC.inputs.enhanced_region = "crwm" ERC.inputs.contrast_background = "crgm" ERC.inputs.partial_voluming_distance = 2.0 ERC.inputs.save_data = True ERC.inputs.atlas_file = atlas wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC.name), ERC, 'output_dir') wf.connect(T1ssNUC, 'output_image', ERC, 'intensity_image') wf.connect(MGDM, 'segmentation', ERC, 'segmentation_image') wf.connect(MGDM, 'distance', ERC, 'levelset_boundary_image') # Enhance Region Contrast (2) ERC2 = Node(EnhanceRegionContrast(), name='ERC2') ERC2.plugin_args = {'sbatch_args': '--mem 7000'} ERC2.inputs.enhanced_region = "crwm" ERC2.inputs.contrast_background = "crgm" ERC2.inputs.partial_voluming_distance = 2.0 ERC2.inputs.save_data = True ERC2.inputs.atlas_file = atlas wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC2.name), ERC2, 'output_dir') wf.connect(T2ssNUC, 'output_image', ERC2, 'intensity_image') wf.connect(MGDM, 'segmentation', ERC2, 'segmentation_image') wf.connect(MGDM, 'distance', ERC2, 'levelset_boundary_image') # Define Multi-Region Priors DMRP = Node(DefineMultiRegionPriors(), name='DefineMultRegPriors') DMRP.plugin_args = {'sbatch_args': '--mem 6000'} #DMRP.inputs.defined_region = "ventricle-horns" #DMRP.inputs.definition_method = "closest-distance" DMRP.inputs.distance_offset = 3.0 DMRP.inputs.save_data = True DMRP.inputs.atlas_file = atlas wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, DMRP.name), DMRP, 'output_dir') wf.connect(MGDM, 'segmentation', DMRP, 'segmentation_image') wf.connect(MGDM, 'distance', DMRP, 'levelset_boundary_image') ''' ############################################### #### REMOVE VENTRICLE POSTERIOR #### ############################################### Due to topology constraints, the ventricles are often not fully segmented: here add back all ventricle voxels from the posterior probability (without the topology constraints) ''' # Posterior label PostLabel = Node(Split(), name='PosteriorLabel') PostLabel.inputs.dimension = "t" wf.connect(MGDM, 'labels', PostLabel, 'in_file') # Posterior proba PostProba = Node(Split(), name='PosteriorProba') PostProba.inputs.dimension = "t" wf.connect(MGDM, 'memberships', PostProba, 'in_file') # Threshold binary mask : ventricle label part 1 VentLabel1 = Node(Threshold(), name="VentricleLabel1") VentLabel1.inputs.thresh = 10.5 VentLabel1.inputs.direction = "below" wf.connect(PostLabel, ("out_files", getFirstElement), VentLabel1, "in_file") # Threshold binary mask : ventricle label part 2 VentLabel2 = Node(Threshold(), name="VentricleLabel2") VentLabel2.inputs.thresh = 13.5 VentLabel2.inputs.direction = "above" wf.connect(VentLabel1, "out_file", VentLabel2, "in_file") # Image calculator : ventricle proba VentProba = Node(ImageMaths(), name="VentricleProba") VentProba.inputs.op_string = "-mul" VentProba.inputs.out_file = "ventproba.nii.gz" wf.connect(PostProba, ("out_files", getFirstElement), VentProba, "in_file") wf.connect(VentLabel2, "out_file", VentProba, "in_file2") # Image calculator : remove inter ventricles RmInterVent = Node(ImageMaths(), name="RemoveInterVent") RmInterVent.inputs.op_string = "-sub" RmInterVent.inputs.out_file = "rmintervent.nii.gz" wf.connect(ERC, "region_pv", RmInterVent, "in_file") wf.connect(DMRP, "inter_ventricular_pv", RmInterVent, "in_file2") # Image calculator : add horns AddHorns = Node(ImageMaths(), name="AddHorns") AddHorns.inputs.op_string = "-add" AddHorns.inputs.out_file = "rmvent.nii.gz" wf.connect(RmInterVent, "out_file", AddHorns, "in_file") wf.connect(DMRP, "ventricular_horns_pv", AddHorns, "in_file2") # Image calculator : remove ventricles RmVent = Node(ImageMaths(), name="RemoveVentricles") RmVent.inputs.op_string = "-sub" RmVent.inputs.out_file = "rmvent.nii.gz" wf.connect(AddHorns, "out_file", RmVent, "in_file") wf.connect(VentProba, "out_file", RmVent, "in_file2") # Image calculator : remove internal capsule RmIC = Node(ImageMaths(), name="RemoveInternalCap") RmIC.inputs.op_string = "-sub" RmIC.inputs.out_file = "rmic.nii.gz" wf.connect(RmVent, "out_file", RmIC, "in_file") wf.connect(DMRP, "internal_capsule_pv", RmIC, "in_file2") # Intensity Range Normalization (3) getMaxRmIC = Node(ImageStats(op_string='-r'), name="getMaxRmIC") wf.connect(RmIC, 'out_file', getMaxRmIC, 'in_file') RmICirn = Node(AbcImageMaths(), name="IntensityNormalization5") RmICirn.inputs.op_string = "-div" RmICirn.inputs.out_file = "normRmIC.nii.gz" wf.connect(RmIC, 'out_file', RmICirn, 'in_file') wf.connect(getMaxRmIC, ('out_stat', getElementFromList, 1), RmICirn, "op_value") # Probability To Levelset : WM orientation WM_Orient = Node(ProbabilityToLevelset(), name='WM_Orientation') WM_Orient.plugin_args = {'sbatch_args': '--mem 6000'} WM_Orient.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_Orient.name), WM_Orient, 'output_dir') wf.connect(RmICirn, 'out_file', WM_Orient, 'probability_image') # Recursive Ridge Diffusion : PVS in WM only WM_pvs = Node(RecursiveRidgeDiffusion(), name='PVS_in_WM') WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'} WM_pvs.inputs.ridge_intensities = "bright" WM_pvs.inputs.ridge_filter = "1D" WM_pvs.inputs.orientation = "orthogonal" WM_pvs.inputs.ang_factor = 1.0 WM_pvs.inputs.min_scale = 0 WM_pvs.inputs.max_scale = 3 WM_pvs.inputs.propagation_model = "diffusion" WM_pvs.inputs.diffusion_factor = 1.0 WM_pvs.inputs.similarity_scale = 1.0 WM_pvs.inputs.neighborhood_size = 2 WM_pvs.inputs.max_iter = 100 WM_pvs.inputs.max_diff = 0.001 WM_pvs.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_pvs.name), WM_pvs, 'output_dir') wf.connect(ERC, 'background_proba', WM_pvs, 'input_image') wf.connect(WM_Orient, 'levelset', WM_pvs, 'surface_levelset') wf.connect(RmICirn, 'out_file', WM_pvs, 'loc_prior') # Extract Lesions : extract WM PVS extract_WM_pvs = Node(LesionExtraction(), name='ExtractPVSfromWM') extract_WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'} extract_WM_pvs.inputs.gm_boundary_partial_vol_dist = 1.0 extract_WM_pvs.inputs.csf_boundary_partial_vol_dist = 3.0 extract_WM_pvs.inputs.lesion_clust_dist = 1.0 extract_WM_pvs.inputs.prob_min_thresh = 0.1 extract_WM_pvs.inputs.prob_max_thresh = 0.33 extract_WM_pvs.inputs.small_lesion_size = 4.0 extract_WM_pvs.inputs.save_data = True extract_WM_pvs.inputs.atlas_file = atlas wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, extract_WM_pvs.name), extract_WM_pvs, 'output_dir') wf.connect(WM_pvs, 'propagation', extract_WM_pvs, 'probability_image') wf.connect(MGDM, 'segmentation', extract_WM_pvs, 'segmentation_image') wf.connect(MGDM, 'distance', extract_WM_pvs, 'levelset_boundary_image') wf.connect(RmICirn, 'out_file', extract_WM_pvs, 'location_prior_image') ''' 2nd branch ''' # Image calculator : internal capsule witout ventricules ICwoVent = Node(ImageMaths(), name="ICWithoutVentricules") ICwoVent.inputs.op_string = "-sub" ICwoVent.inputs.out_file = "icwovent.nii.gz" wf.connect(DMRP, "internal_capsule_pv", ICwoVent, "in_file") wf.connect(DMRP, "inter_ventricular_pv", ICwoVent, "in_file2") # Image calculator : remove ventricles IC RmVentIC = Node(ImageMaths(), name="RmVentIC") RmVentIC.inputs.op_string = "-sub" RmVentIC.inputs.out_file = "RmVentIC.nii.gz" wf.connect(ICwoVent, "out_file", RmVentIC, "in_file") wf.connect(VentProba, "out_file", RmVentIC, "in_file2") # Intensity Range Normalization (4) getMaxRmVentIC = Node(ImageStats(op_string='-r'), name="getMaxRmVentIC") wf.connect(RmVentIC, 'out_file', getMaxRmVentIC, 'in_file') RmVentICirn = Node(AbcImageMaths(), name="IntensityNormalization6") RmVentICirn.inputs.op_string = "-div" RmVentICirn.inputs.out_file = "normRmVentIC.nii.gz" wf.connect(RmVentIC, 'out_file', RmVentICirn, 'in_file') wf.connect(getMaxRmVentIC, ('out_stat', getElementFromList, 1), RmVentICirn, "op_value") # Probability To Levelset : IC orientation IC_Orient = Node(ProbabilityToLevelset(), name='IC_Orientation') IC_Orient.plugin_args = {'sbatch_args': '--mem 6000'} IC_Orient.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_Orient.name), IC_Orient, 'output_dir') wf.connect(RmVentICirn, 'out_file', IC_Orient, 'probability_image') # Recursive Ridge Diffusion : PVS in IC only IC_pvs = Node(RecursiveRidgeDiffusion(), name='RecursiveRidgeDiffusion2') IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'} IC_pvs.inputs.ridge_intensities = "bright" IC_pvs.inputs.ridge_filter = "1D" IC_pvs.inputs.orientation = "undefined" IC_pvs.inputs.ang_factor = 1.0 IC_pvs.inputs.min_scale = 0 IC_pvs.inputs.max_scale = 3 IC_pvs.inputs.propagation_model = "diffusion" IC_pvs.inputs.diffusion_factor = 1.0 IC_pvs.inputs.similarity_scale = 1.0 IC_pvs.inputs.neighborhood_size = 2 IC_pvs.inputs.max_iter = 100 IC_pvs.inputs.max_diff = 0.001 IC_pvs.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_pvs.name), IC_pvs, 'output_dir') wf.connect(ERC, 'background_proba', IC_pvs, 'input_image') wf.connect(IC_Orient, 'levelset', IC_pvs, 'surface_levelset') wf.connect(RmVentICirn, 'out_file', IC_pvs, 'loc_prior') # Extract Lesions : extract IC PVS extract_IC_pvs = Node(LesionExtraction(), name='ExtractPVSfromIC') extract_IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'} extract_IC_pvs.inputs.gm_boundary_partial_vol_dist = 1.0 extract_IC_pvs.inputs.csf_boundary_partial_vol_dist = 4.0 extract_IC_pvs.inputs.lesion_clust_dist = 1.0 extract_IC_pvs.inputs.prob_min_thresh = 0.25 extract_IC_pvs.inputs.prob_max_thresh = 0.5 extract_IC_pvs.inputs.small_lesion_size = 4.0 extract_IC_pvs.inputs.save_data = True extract_IC_pvs.inputs.atlas_file = atlas wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, extract_IC_pvs.name), extract_IC_pvs, 'output_dir') wf.connect(IC_pvs, 'propagation', extract_IC_pvs, 'probability_image') wf.connect(MGDM, 'segmentation', extract_IC_pvs, 'segmentation_image') wf.connect(MGDM, 'distance', extract_IC_pvs, 'levelset_boundary_image') wf.connect(RmVentICirn, 'out_file', extract_IC_pvs, 'location_prior_image') ''' 3rd branch ''' # Image calculator : RmInter = Node(ImageMaths(), name="RemoveInterVentricules") RmInter.inputs.op_string = "-sub" RmInter.inputs.out_file = "rminter.nii.gz" wf.connect(ERC2, 'region_pv', RmInter, "in_file") wf.connect(DMRP, "inter_ventricular_pv", RmInter, "in_file2") # Image calculator : AddVentHorns = Node(ImageMaths(), name="AddVentHorns") AddVentHorns.inputs.op_string = "-add" AddVentHorns.inputs.out_file = "rminter.nii.gz" wf.connect(RmInter, 'out_file', AddVentHorns, "in_file") wf.connect(DMRP, "ventricular_horns_pv", AddVentHorns, "in_file2") # Intensity Range Normalization (5) getMaxAddVentHorns = Node(ImageStats(op_string='-r'), name="getMaxAddVentHorns") wf.connect(AddVentHorns, 'out_file', getMaxAddVentHorns, 'in_file') AddVentHornsirn = Node(AbcImageMaths(), name="IntensityNormalization7") AddVentHornsirn.inputs.op_string = "-div" AddVentHornsirn.inputs.out_file = "normAddVentHorns.nii.gz" wf.connect(AddVentHorns, 'out_file', AddVentHornsirn, 'in_file') wf.connect(getMaxAddVentHorns, ('out_stat', getElementFromList, 1), AddVentHornsirn, "op_value") # Extract Lesions : extract White Matter Hyperintensities extract_WMH = Node(LesionExtraction(), name='Extract_WMH') extract_WMH.plugin_args = {'sbatch_args': '--mem 6000'} extract_WMH.inputs.gm_boundary_partial_vol_dist = 1.0 extract_WMH.inputs.csf_boundary_partial_vol_dist = 2.0 extract_WMH.inputs.lesion_clust_dist = 1.0 extract_WMH.inputs.prob_min_thresh = 0.84 extract_WMH.inputs.prob_max_thresh = 0.84 extract_WMH.inputs.small_lesion_size = 4.0 extract_WMH.inputs.save_data = True extract_WMH.inputs.atlas_file = atlas wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, extract_WMH.name), extract_WMH, 'output_dir') wf.connect(ERC2, 'background_proba', extract_WMH, 'probability_image') wf.connect(MGDM, 'segmentation', extract_WMH, 'segmentation_image') wf.connect(MGDM, 'distance', extract_WMH, 'levelset_boundary_image') wf.connect(AddVentHornsirn, 'out_file', extract_WMH, 'location_prior_image') #=========================================================================== # extract_WMH2 = extract_WMH.clone(name='Extract_WMH2') # extract_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0 # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH2.name),extract_WMH2,'output_dir') # wf.connect(ERC2,'background_proba',extract_WMH2,'probability_image') # wf.connect(MGDM,'segmentation',extract_WMH2,'segmentation_image') # wf.connect(MGDM,'distance',extract_WMH2,'levelset_boundary_image') # wf.connect(AddVentHornsirn,'out_file',extract_WMH2,'location_prior_image') # # extract_WMH3 = extract_WMH.clone(name='Extract_WMH3') # extract_WMH3.inputs.gm_boundary_partial_vol_dist = 3.0 # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH3.name),extract_WMH3,'output_dir') # wf.connect(ERC2,'background_proba',extract_WMH3,'probability_image') # wf.connect(MGDM,'segmentation',extract_WMH3,'segmentation_image') # wf.connect(MGDM,'distance',extract_WMH3,'levelset_boundary_image') # wf.connect(AddVentHornsirn,'out_file',extract_WMH3,'location_prior_image') #=========================================================================== ''' #################################### #### FINDING SMALL WMHs #### #################################### Small round WMHs near the cortex are often missed by the main algorithm, so we're adding this one that takes care of them. ''' # Recursive Ridge Diffusion : round WMH detection round_WMH = Node(RecursiveRidgeDiffusion(), name='round_WMH') round_WMH.plugin_args = {'sbatch_args': '--mem 6000'} round_WMH.inputs.ridge_intensities = "bright" round_WMH.inputs.ridge_filter = "0D" round_WMH.inputs.orientation = "undefined" round_WMH.inputs.ang_factor = 1.0 round_WMH.inputs.min_scale = 1 round_WMH.inputs.max_scale = 4 round_WMH.inputs.propagation_model = "none" round_WMH.inputs.diffusion_factor = 1.0 round_WMH.inputs.similarity_scale = 0.1 round_WMH.inputs.neighborhood_size = 4 round_WMH.inputs.max_iter = 100 round_WMH.inputs.max_diff = 0.001 round_WMH.inputs.save_data = True wf.connect( subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, round_WMH.name), round_WMH, 'output_dir') wf.connect(ERC2, 'background_proba', round_WMH, 'input_image') wf.connect(AddVentHornsirn, 'out_file', round_WMH, 'loc_prior') # Extract Lesions : extract round WMH extract_round_WMH = Node(LesionExtraction(), name='Extract_round_WMH') extract_round_WMH.plugin_args = {'sbatch_args': '--mem 6000'} extract_round_WMH.inputs.gm_boundary_partial_vol_dist = 1.0 extract_round_WMH.inputs.csf_boundary_partial_vol_dist = 2.0 extract_round_WMH.inputs.lesion_clust_dist = 1.0 extract_round_WMH.inputs.prob_min_thresh = 0.33 extract_round_WMH.inputs.prob_max_thresh = 0.33 extract_round_WMH.inputs.small_lesion_size = 6.0 extract_round_WMH.inputs.save_data = True extract_round_WMH.inputs.atlas_file = atlas wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir, wf.name, extract_round_WMH.name), extract_round_WMH, 'output_dir') wf.connect(round_WMH, 'ridge_pv', extract_round_WMH, 'probability_image') wf.connect(MGDM, 'segmentation', extract_round_WMH, 'segmentation_image') wf.connect(MGDM, 'distance', extract_round_WMH, 'levelset_boundary_image') wf.connect(AddVentHornsirn, 'out_file', extract_round_WMH, 'location_prior_image') #=========================================================================== # extract_round_WMH2 = extract_round_WMH.clone(name='Extract_round_WMH2') # extract_round_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0 # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH2.name),extract_round_WMH2,'output_dir') # wf.connect(round_WMH,'ridge_pv',extract_round_WMH2,'probability_image') # wf.connect(MGDM,'segmentation',extract_round_WMH2,'segmentation_image') # wf.connect(MGDM,'distance',extract_round_WMH2,'levelset_boundary_image') # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH2,'location_prior_image') # # extract_round_WMH3 = extract_round_WMH.clone(name='Extract_round_WMH3') # extract_round_WMH3.inputs.gm_boundary_partial_vol_dist = 2.0 # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH3.name),extract_round_WMH3,'output_dir') # wf.connect(round_WMH,'ridge_pv',extract_round_WMH3,'probability_image') # wf.connect(MGDM,'segmentation',extract_round_WMH3,'segmentation_image') # wf.connect(MGDM,'distance',extract_round_WMH3,'levelset_boundary_image') # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH3,'location_prior_image') #=========================================================================== ''' #################################### #### COMBINE BOTH TYPES #### #################################### Small round WMHs and regular WMH together before thresholding + PVS from white matter and internal capsule ''' # Image calculator : WM + IC DVRS DVRS = Node(ImageMaths(), name="DVRS") DVRS.inputs.op_string = "-max" DVRS.inputs.out_file = "DVRS_map.nii.gz" wf.connect(extract_WM_pvs, 'lesion_score', DVRS, "in_file") wf.connect(extract_IC_pvs, "lesion_score", DVRS, "in_file2") # Image calculator : WMH + round WMH = Node(ImageMaths(), name="WMH") WMH.inputs.op_string = "-max" WMH.inputs.out_file = "WMH_map.nii.gz" wf.connect(extract_WMH, 'lesion_score', WMH, "in_file") wf.connect(extract_round_WMH, "lesion_score", WMH, "in_file2") #=========================================================================== # WMH2 = Node(ImageMaths(), name="WMH2") # WMH2.inputs.op_string = "-max" # WMH2.inputs.out_file = "WMH2_map.nii.gz" # wf.connect(extract_WMH2,'lesion_score',WMH2,"in_file") # wf.connect(extract_round_WMH2,"lesion_score", WMH2, "in_file2") # # WMH3 = Node(ImageMaths(), name="WMH3") # WMH3.inputs.op_string = "-max" # WMH3.inputs.out_file = "WMH3_map.nii.gz" # wf.connect(extract_WMH3,'lesion_score',WMH3,"in_file") # wf.connect(extract_round_WMH3,"lesion_score", WMH3, "in_file2") #=========================================================================== # Image calculator : multiply by boundnary partial volume WMH_mul = Node(ImageMaths(), name="WMH_mul") WMH_mul.inputs.op_string = "-mul" WMH_mul.inputs.out_file = "final_mask.nii.gz" wf.connect(WMH, "out_file", WMH_mul, "in_file") wf.connect(MGDM, "distance", WMH_mul, "in_file2") #=========================================================================== # WMH2_mul = Node(ImageMaths(), name="WMH2_mul") # WMH2_mul.inputs.op_string = "-mul" # WMH2_mul.inputs.out_file = "final_mask.nii.gz" # wf.connect(WMH2,"out_file", WMH2_mul,"in_file") # wf.connect(MGDM,"distance", WMH2_mul, "in_file2") # # WMH3_mul = Node(ImageMaths(), name="WMH3_mul") # WMH3_mul.inputs.op_string = "-mul" # WMH3_mul.inputs.out_file = "final_mask.nii.gz" # wf.connect(WMH3,"out_file", WMH3_mul,"in_file") # wf.connect(MGDM,"distance", WMH3_mul, "in_file2") #=========================================================================== ''' ########################################## #### SEGMENTATION THRESHOLD #### ########################################## A threshold of 0.5 is very conservative, because the final lesion score is the product of two probabilities. This needs to be optimized to a value between 0.25 and 0.5 to balance false negatives (dominant at 0.5) and false positives (dominant at low values). ''' # Threshold binary mask : DVRS_mask = Node(Threshold(), name="DVRS_mask") DVRS_mask.inputs.thresh = 0.25 DVRS_mask.inputs.direction = "below" wf.connect(DVRS, "out_file", DVRS_mask, "in_file") # Threshold binary mask : 025 WMH1_025 = Node(Threshold(), name="WMH1_025") WMH1_025.inputs.thresh = 0.25 WMH1_025.inputs.direction = "below" wf.connect(WMH_mul, "out_file", WMH1_025, "in_file") #=========================================================================== # WMH2_025 = Node(Threshold(), name="WMH2_025") # WMH2_025.inputs.thresh = 0.25 # WMH2_025.inputs.direction = "below" # wf.connect(WMH2_mul,"out_file", WMH2_025, "in_file") # # WMH3_025 = Node(Threshold(), name="WMH3_025") # WMH3_025.inputs.thresh = 0.25 # WMH3_025.inputs.direction = "below" # wf.connect(WMH3_mul,"out_file", WMH3_025, "in_file") #=========================================================================== # Threshold binary mask : 050 WMH1_050 = Node(Threshold(), name="WMH1_050") WMH1_050.inputs.thresh = 0.50 WMH1_050.inputs.direction = "below" wf.connect(WMH_mul, "out_file", WMH1_050, "in_file") #=========================================================================== # WMH2_050 = Node(Threshold(), name="WMH2_050") # WMH2_050.inputs.thresh = 0.50 # WMH2_050.inputs.direction = "below" # wf.connect(WMH2_mul,"out_file", WMH2_050, "in_file") # # WMH3_050 = Node(Threshold(), name="WMH3_050") # WMH3_050.inputs.thresh = 0.50 # WMH3_050.inputs.direction = "below" # wf.connect(WMH3_mul,"out_file", WMH3_050, "in_file") #=========================================================================== # Threshold binary mask : 075 WMH1_075 = Node(Threshold(), name="WMH1_075") WMH1_075.inputs.thresh = 0.75 WMH1_075.inputs.direction = "below" wf.connect(WMH_mul, "out_file", WMH1_075, "in_file") #=========================================================================== # WMH2_075 = Node(Threshold(), name="WMH2_075") # WMH2_075.inputs.thresh = 0.75 # WMH2_075.inputs.direction = "below" # wf.connect(WMH2_mul,"out_file", WMH2_075, "in_file") # # WMH3_075 = Node(Threshold(), name="WMH3_075") # WMH3_075.inputs.thresh = 0.75 # WMH3_075.inputs.direction = "below" # wf.connect(WMH3_mul,"out_file", WMH3_075, "in_file") #=========================================================================== ## Outputs DVRS_Output = Node(IdentityInterface(fields=[ 'mask', 'region', 'lesion_size', 'lesion_proba', 'boundary', 'label', 'score' ]), name='DVRS_Output') wf.connect(DVRS_mask, 'out_file', DVRS_Output, 'mask') WMH_output = Node(IdentityInterface(fields=[ 'mask1025', 'mask1050', 'mask1075', 'mask2025', 'mask2050', 'mask2075', 'mask3025', 'mask3050', 'mask3075' ]), name='WMH_output') wf.connect(WMH1_025, 'out_file', WMH_output, 'mask1025') #wf.connect(WMH2_025,'out_file',WMH_output,'mask2025') #wf.connect(WMH3_025,'out_file',WMH_output,'mask3025') wf.connect(WMH1_050, 'out_file', WMH_output, 'mask1050') #wf.connect(WMH2_050,'out_file',WMH_output,'mask2050') #wf.connect(WMH3_050,'out_file',WMH_output,'mask3050') wf.connect(WMH1_075, 'out_file', WMH_output, 'mask1075') #wf.connect(WMH2_075,'out_file',WMH_output,'mask2070') #wf.connect(WMH3_075,'out_file',WMH_output,'mask3075') return wf
def custom_brain_extraction(output_base_name, in_file, output_dir, output_type, median_radius=3, numpass=1, autocrop=False, dilate=2, selem=10): """This function create a brain mask for the diffusion data. It calculate the average of the 4D diffusion image and use this average to create a rough brain mask with FSL bet. This mask is then dilated, used to mask the average diffusion image and the result is fed to the dipy.median_oshu function. Possible hole in the mask caused by failure of the oshu method are then filled with a morphological operation. Parameters ---------- output_base_name: string, a bids_compatible sub-name_ses-name in_file: string or path-like object, the original 4d diffusion data output_dir: a valid directory in the form of /base_bids/derivatives/fsl-dipy_dti-preproc/sub-name_ses-name output_type: ["NIFTI", "NIFTI_GZ"], the type of nifti output desired, ...: options from median_otsu and dilate Returns ---------- mask: brain mask masked_data: masked 4d diffusion data """ if output_type == "NIFTI": ext = ".nii" elif output_type == "NIFTI_GZ": ext = ".nii.gz" else: raise ValueError( "Output file type {} not recognized".format(output_type)) mask_output_filename = "{}/{}_mask{}".format(output_dir, output_base_name, ext) masked_output_filename = "{}/{}_masked{}".format(output_dir, output_base_name, ext) if isfile(masked_output_filename): print("skipping masking") return (None) diff_image = nb.load(in_file) diff_image_array = diff_image.get_fdata() diff_mean = diff_image_array.mean(axis=3) diff_mean_filename = "{}/{}_mean_temp{}".format(output_dir, output_base_name, ext) nb.save(nb.Nifti1Image(diff_mean, diff_image.affine, diff_image.header), diff_mean_filename) bet_out_file = "{}/{}_ec_bet{}".format(output_dir, output_base_name, ext) bet_out_file_mask = "{}/{}_ec_bet_mask{}".format(output_dir, output_base_name, ext) bet = BET(in_file=diff_mean_filename, out_file=bet_out_file, mask=True, output_type=output_type) bet.run() b0_mask = nb.load(bet_out_file_mask) b0_mask_array = b0_mask.get_fdata() b0_mask_dilate = dilate_mask(b0_mask_array, selem=selem) diff_mean_dilate_mask = diff_mean * b0_mask_dilate _, mask = median_otsu(diff_mean_dilate_mask, vol_idx=None, median_radius=median_radius, numpass=numpass, autocrop=autocrop, dilate=dilate) mask_filled = fill_mask(mask) masked_diff = diff_image_array * np.expand_dims(mask_filled, 3) nb.save(nb.Nifti1Image(mask_filled, diff_image.affine, diff_image.header), mask_output_filename) nb.save(nb.Nifti1Image(masked_diff, diff_image.affine, diff_image.header), masked_output_filename) remove(diff_mean_filename) remove(bet_out_file) remove(bet_out_file_mask) return (mask_output_filename, masked_output_filename)