def test_create_eddy_correct_pipeline(): fsl_course_dir = os.path.abspath('fsl_course_data') dwi_file = os.path.join(fsl_course_dir, "fdt/subj1/data.nii.gz") nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect") nipype_eddycorrect.inputs.inputnode.in_file = dwi_file nipype_eddycorrect.inputs.inputnode.ref_num = 0 with warnings.catch_warnings(): warnings.simplefilter("ignore") original_eddycorrect = pe.Node(interface=fsl.EddyCorrect(), name="original_eddycorrect") original_eddycorrect.inputs.in_file = dwi_file original_eddycorrect.inputs.ref_num = 0 test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test") pipeline = pe.Workflow(name="test_eddycorrect") pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_eddycorrect_") pipeline.connect([(nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]), (original_eddycorrect, test, [("eddy_corrected", "volume2")]), ]) pipeline.run(plugin='Linear') shutil.rmtree(pipeline.base_dir)
def test_create_eddy_correct_pipeline(): fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA']) dwi_file = os.path.join(fsl_course_dir, "fdt1/subj1/data.nii.gz") trim_dwi = pe.Node(fsl.ExtractROI(t_min=0, t_size=2), name="trim_dwi") trim_dwi.inputs.in_file = dwi_file nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect") nipype_eddycorrect.inputs.inputnode.ref_num = 0 with warnings.catch_warnings(): warnings.simplefilter("ignore") original_eddycorrect = pe.Node(interface=fsl.EddyCorrect(), name="original_eddycorrect") original_eddycorrect.inputs.ref_num = 0 test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test") pipeline = pe.Workflow(name="test_eddycorrect") pipeline.base_dir = tempfile.mkdtemp(prefix="nipype_test_eddycorrect_") pipeline.connect([ (trim_dwi, original_eddycorrect, [("roi_file", "in_file")]), (trim_dwi, nipype_eddycorrect, [("roi_file", "inputnode.in_file")]), (nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]), (original_eddycorrect, test, [("eddy_corrected", "volume2")]), ]) pipeline.run(plugin='Linear') shutil.rmtree(pipeline.base_dir)
def eddyCorrect(inDwi, output): from nipype.interfaces import fsl eddyc = fsl.EddyCorrect() eddyc.inputs.in_file = inDwi eddyc.inputs.out_file = output eddyc.inputs.ref_num = 0 eddyc.run()
def create_workflow(self, flow, inputnode, outputnode): # print inputnode processing_input = pe.Node(interface=util.IdentityInterface( fields=['diffusion', 'aparc_aseg', 'aseg', 'bvecs', 'bvals', 'grad', 'acqp', 'index', 'T1', 'brain', 'brain_mask', 'wm_mask_file', 'roi_volumes']), name='processing_input') # For DSI acquisition: extract the hemisphere that contains the data # if self.config.start_vol > 0 or self.config.end_vol < self.config.max_vol: # # split_vol = pe.Node(interface=splitDiffusion(),name='split_vol') # split_vol.inputs.start = self.config.start_vol # split_vol.inputs.end = self.config.end_vol # # split_bvecbval = pe.Node(interface=splitBvecBval(),name='split_bvecsbvals') # split_bvecbval.inputs.start = self.config.start_vol # split_bvecbval.inputs.end = self.config.end_vol # split_bvecbval.inputs.orientation = 'h' # split_bvecbval.inputs.delimiter = ' ' # # flow.connect([ # (inputnode,split_vol,[('diffusion','in_file')]), # (split_vol,processing_input,[('data','diffusion')]), # (inputnode,split_bvecbval,[('bvecs','bvecs'),('bvals','bvals')]), # (split_bvecbval,processing_input,[('bvecs_split','bvecs'),('bvals_split','bvals')]) # ]) # # else: flow.connect([ (inputnode, processing_input, [ ('diffusion', 'diffusion'), ('bvecs', 'bvecs'), ('bvals', 'bvals')]), ]) flow.connect([ (inputnode, processing_input, [('T1', 'T1'), ('aparc_aseg', 'aparc_aseg'), ('aseg', 'aseg'), ('brain', 'brain'), ('brain_mask', 'brain_mask'), ('wm_mask_file', 'wm_mask_file'), ('roi_volumes', 'roi_volumes')]), (processing_input, outputnode, [('bvals', 'bvals')]) ]) # Conversion to MRTrix image format ".mif", grad_fsl=(inputnode.inputs.bvecs,inputnode.inputs.bvals) mr_convert = pe.Node(interface=MRConvert( stride=[1, 2, +3, +4]), name='mr_convert') mr_convert.inputs.quiet = True mr_convert.inputs.force_writing = True concatnode = pe.Node(interface=util.Merge(2), name='concatnode') def convertList2Tuple(lists): # print "******************************************",tuple(lists) return tuple(lists) flow.connect([ # (processing_input,concatnode,[('bvecs','in1'),('bvals','in2')]), (processing_input, concatnode, [('bvecs', 'in1')]), (processing_input, concatnode, [('bvals', 'in2')]), (concatnode, mr_convert, [ (('out', convertList2Tuple), 'grad_fsl')]) ]) # Convert Freesurfer data mr_convert_brainmask = pe.Node( interface=MRConvert(out_filename='brainmaskfull.nii.gz', stride=[ 1, 2, 3], output_datatype='float32'), name='mr_convert_brain_mask') mr_convert_brain = pe.Node( interface=MRConvert(out_filename='anat_masked.nii.gz', stride=[ 1, 2, 3], output_datatype='float32'), name='mr_convert_brain') mr_convert_T1 = pe.Node( interface=MRConvert(out_filename='anat.nii.gz', stride=[ 1, 2, 3], output_datatype='float32'), name='mr_convert_T1') mr_convert_roi_volumes = pe.Node( interface=ApplymultipleMRConvert( stride=[1, 2, 3], output_datatype='float32', extension='nii'), name='mr_convert_roi_volumes') mr_convert_wm_mask_file = pe.Node( interface=MRConvert(out_filename='wm_mask_file.nii.gz', stride=[ 1, 2, 3], output_datatype='float32'), name='mr_convert_wm_mask_file') flow.connect([ (processing_input, mr_convert_brainmask, [('brain_mask', 'in_file')]), (processing_input, mr_convert_brain, [('brain', 'in_file')]), (processing_input, mr_convert_T1, [('T1', 'in_file')]), (processing_input, mr_convert_roi_volumes, [('roi_volumes', 'in_files')]), (processing_input, mr_convert_wm_mask_file, [('wm_mask_file', 'in_file')]) ]) # if self.config.partial_volume_estimation: # pve_extractor_from_5tt = pe.Node(interface=ExtractPVEsFrom5TT(),name='pve_extractor_from_5tt') # pve_extractor.inputs.pve_csf_file = 'pve_0.nii.gz' # pve_extractor.inputs.pve_csf_file = 'pve_1.nii.gz' # pve_extractor.inputs.pve_csf_file = 'pve_2.nii.gz' # # flow.connect([ # (mrtrix_5tt,pve_extractor_from_5tt,[('out_file','in_5tt')]), # (processing_input,pve_extractor_from_5tt,[('T1','ref_image')]), # ]) # from nipype.interfaces import fsl # # Run FAST for partial volume estimation (WM;GM;CSF) # fastr = pe.Node(interface=fsl.FAST(),name='fastr') # fastr.inputs.out_basename = 'fast_' # fastr.inputs.number_classes = 3 # # if self.config.fast_use_priors: # fsl_flirt = pe.Node(interface=fsl.FLIRT(out_file='Template2Input.nii.gz',out_matrix_file='template2input.mat'),name="linear_registration") # #fsl_flirt.inputs.in_file = os.environ['FSLDIR']+'/data/standard/MNI152_T1_1mm.nii.gz' # template_path = os.path.join('data', 'segmentation', 'ants_template_IXI') # fsl_flirt.inputs.in_file = pkg_resources.resource_filename('cmtklib', os.path.join(template_path, 'T_template2.nii.gz')) # #fsl_flirt.inputs.dof = self.config.dof # #fsl_flirt.inputs.cost = self.config.fsl_cost # #fsl_flirt.inputs.no_search = self.config.no_search # fsl_flirt.inputs.verbose = True # # flow.connect([ # (mr_convert_T1, fsl_flirt, [('converted','reference')]), # ]) # # fastr.inputs.use_priors = True # fastr.inputs.other_priors = [pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors1.nii.gz')), # pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors2.nii.gz')), # pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors3.nii.gz')) # ] # flow.connect([ # (fsl_flirt, fastr, [('out_matrix_file','init_transform')]), # ]) # # flow.connect([ # (mr_convert_brain,fastr,[('converted','in_files')]), # # (fastr,outputnode,[('partial_volume_files','partial_volume_files')]) # ]) # Threshold converted Freesurfer brainmask into a binary mask mr_threshold_brainmask = pe.Node(interface=MRThreshold(abs_value=1, out_file='brain_mask.nii.gz'), name='mr_threshold_brainmask') flow.connect([ (mr_convert_brainmask, mr_threshold_brainmask, [('converted', 'in_file')]) ]) # Extract b0 and create DWI mask flirt_dwimask_pre = pe.Node(interface=fsl.FLIRT(out_file='brain2b0.nii.gz', out_matrix_file='brain2b0aff'), name='flirt_dwimask_pre') costs = ['mutualinfo', 'corratio', 'normcorr', 'normmi', 'leastsq', 'labeldiff', 'bbr'] flirt_dwimask_pre.inputs.cost = costs[3] flirt_dwimask_pre.inputs.cost_func = costs[3] flirt_dwimask_pre.inputs.dof = 6 flirt_dwimask_pre.inputs.no_search = False flirt_dwimask = pe.Node( interface=fsl.FLIRT(out_file='dwi_brain_mask.nii.gz', apply_xfm=True, interp='nearestneighbour'), name='flirt_dwimask') mr_convert_b0 = pe.Node(interface=MRConvert(out_filename='b0.nii.gz', stride=[+1, +2, +3]), name='mr_convert_b0') mr_convert_b0.inputs.extract_at_axis = 3 mr_convert_b0.inputs.extract_at_coordinate = [0] flow.connect([ (processing_input, mr_convert_b0, [('diffusion', 'in_file')]) ]) flow.connect([ (mr_convert_T1, flirt_dwimask_pre, [('converted', 'in_file')]), (mr_convert_b0, flirt_dwimask_pre, [('converted', 'reference')]), (mr_convert_b0, flirt_dwimask, [('converted', 'reference')]), (flirt_dwimask_pre, flirt_dwimask, [ ('out_matrix_file', 'in_matrix_file')]), (mr_threshold_brainmask, flirt_dwimask, [('thresholded', 'in_file')]) ]) # Diffusion data denoising if self.config.denoising: mr_convert_noise = pe.Node(interface=MRConvert(out_filename='diffusion_noisemap.nii.gz', stride=[+1, +2, +3, +4]), name='mr_convert_noise') if self.config.denoising_algo == "MRtrix (MP-PCA)": mr_convert.inputs.out_filename = 'diffusion.mif' dwi_denoise = pe.Node( interface=DWIDenoise( out_file='diffusion_denoised.mif', out_noisemap='diffusion_noisemap.mif'), name='dwi_denoise') dwi_denoise.inputs.force_writing = True dwi_denoise.inputs.debug = True dwi_denoise.ignore_exception = True flow.connect([ # (processing_input,mr_convert,[('diffusion','in_file')]), (processing_input, mr_convert, [('diffusion', 'in_file')]), (mr_convert, dwi_denoise, [('converted', 'in_file')]), (flirt_dwimask, dwi_denoise, [('out_file', 'mask')]), ]) elif self.config.denoising_algo == "Dipy (NLM)": mr_convert.inputs.out_filename = 'diffusion_denoised.mif' dwi_denoise = pe.Node( interface=dipy.Denoise(), name='dwi_denoise') if self.config.dipy_noise_model == "Gaussian": dwi_denoise.inputs.noise_model = "gaussian" elif self.config.dipy_noise_model == "Rician": dwi_denoise.inputs.noise_model = "rician" flow.connect([ (processing_input, dwi_denoise, [('diffusion', 'in_file')]), (flirt_dwimask, dwi_denoise, [('out_file', 'in_mask')]), (dwi_denoise, mr_convert, [('out_file', 'in_file')]) ]) flow.connect([ (dwi_denoise, mr_convert_noise, [('out_file', 'in_file')]), (mr_convert_noise, outputnode, [('converted', 'diffusion_noisemap')]) ]) else: mr_convert.inputs.out_filename = 'diffusion.mif' flow.connect([ (processing_input, mr_convert, [('diffusion', 'in_file')]) ]) mr_convert_b = pe.Node(interface=MRConvert(out_filename='diffusion_corrected.nii.gz', stride=[+1, +2, +3, +4]), name='mr_convert_b') if self.config.bias_field_correction: mr_convert_bias = pe.Node(interface=MRConvert(out_filename='diffusion_biasfield.nii.gz', stride=[+1, +2, +3, +4]), name='mr_convert_bias') if self.config.bias_field_algo == "ANTS N4": dwi_biascorrect = pe.Node( interface=DWIBiasCorrect( use_ants=True, out_bias='diffusion_denoised_biasfield.mif'), name='dwi_biascorrect') elif self.config.bias_field_algo == "FSL FAST": dwi_biascorrect = pe.Node( interface=DWIBiasCorrect( use_fsl=True, out_bias='diffusion_denoised_biasfield.mif'), name='dwi_biascorrect') dwi_biascorrect.inputs.debug = True if self.config.denoising: if self.config.denoising_algo == "MRtrix (MP-PCA)": flow.connect([ (dwi_denoise, dwi_biascorrect, [('out_file', 'in_file')]), (flirt_dwimask, dwi_biascorrect, [('out_file', 'mask')]), (dwi_biascorrect, mr_convert_b, [('out_file', 'in_file')]) ]) elif self.config.denoising_algo == "Dipy (NLM)": flow.connect([ (mr_convert, dwi_biascorrect, [('converted', 'in_file')]), (flirt_dwimask, dwi_biascorrect, [('out_file', 'mask')]), (dwi_biascorrect, mr_convert_b, [('out_file', 'in_file')]) ]) else: flow.connect([ (mr_convert, dwi_biascorrect, [('converted', 'in_file')]), (flirt_dwimask, dwi_biascorrect, [('out_file', 'mask')]) ]) flow.connect([ (dwi_biascorrect, mr_convert_bias, [('out_file', 'in_file')]), (mr_convert_bias, outputnode, [('converted', 'diffusion_biasfield')]) ]) else: if self.config.denoising: if self.config.denoising_algo == "MRtrix (MP-PCA)": flow.connect([ (dwi_denoise, mr_convert_b, [('out_file', 'in_file')]) ]) elif self.config.denoising_algo == "Dipy (NLM)": flow.connect([ (mr_convert, mr_convert_b, [('converted', 'in_file')]) ]) else: flow.connect([ (mr_convert, mr_convert_b, [('converted', 'in_file')]) ]) extract_grad_mrtrix = pe.Node(interface=ExtractMRTrixGrad(out_grad_mrtrix='grad.txt'), name='extract_grad_mrtrix') flow.connect([ (mr_convert, extract_grad_mrtrix, [("converted", "in_file")]) ]) # extract_grad_fsl = pe.Node(interface=mrt.MRTrixInfo(out_grad_mrtrix=('diffusion_denoised.bvec','diffusion_denoised.bval')),name='extract_grad_fsl') # TODO extract the total readout directly from the BIDS json file acqpnode = pe.Node(interface=CreateAcqpFile( total_readout=self.config.total_readout), name='acqpnode') indexnode = pe.Node(interface=CreateIndexFile(), name='indexnode') flow.connect([ (extract_grad_mrtrix, indexnode, [ ("out_grad_mrtrix", "in_grad_mrtrix")]) ]) fs_mriconvert = pe.Node( interface=fs.MRIConvert( out_type='niigz', out_file='diffusion_preproc_resampled.nii.gz'), name="diffusion_resample") fs_mriconvert.inputs.vox_size = self.config.resampling fs_mriconvert.inputs.resample_type = self.config.interpolation mr_convert_b0_resample = pe.Node(interface=MRConvert(out_filename='b0_resampled.nii.gz', stride=[+1, +2, +3]), name='mr_convert_b0_resample') mr_convert_b0_resample.inputs.extract_at_axis = 3 mr_convert_b0_resample.inputs.extract_at_coordinate = [0] # fs_mriconvert_b0 = pe.Node(interface=fs.MRIConvert(out_type='niigz',out_file='b0_resampled.nii.gz'),name="b0_resample") # fs_mriconvert_b0.inputs.vox_size = self.config.resampling # fs_mriconvert_b0.inputs.resample_type = self.config.interpolation flow.connect([ (fs_mriconvert, mr_convert_b0_resample, [('out_file', 'in_file')]), ]) # resampling Freesurfer data and setting output type to short fs_mriconvert_T1 = pe.Node(interface=fs.MRIConvert(out_type='niigz', out_file='anat_resampled.nii.gz'), name="anat_resample") fs_mriconvert_T1.inputs.vox_size = self.config.resampling fs_mriconvert_T1.inputs.resample_type = self.config.interpolation flow.connect([ (mr_convert_T1, fs_mriconvert_T1, [('converted', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_T1,[('converted','reslice_like')]), (fs_mriconvert_T1, outputnode, [('out_file', 'T1')]) ]) fs_mriconvert_brain = pe.Node( interface=fs.MRIConvert( out_type='niigz', out_file='anat_masked_resampled.nii.gz'), name="anat_masked_resample") fs_mriconvert_brain.inputs.vox_size = self.config.resampling fs_mriconvert_brain.inputs.resample_type = self.config.interpolation flow.connect([ (mr_convert_brain, fs_mriconvert_brain, [('converted', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_brain,[('converted','reslice_like')]), (fs_mriconvert_brain, outputnode, [('out_file', 'brain')]) ]) fs_mriconvert_brainmask = pe.Node( interface=fs.MRIConvert( out_type='niigz', resample_type='nearest', out_file='brain_mask_resampled.nii.gz'), name="brain_mask_resample") fs_mriconvert_brainmask.inputs.vox_size = self.config.resampling flow.connect([ (mr_threshold_brainmask, fs_mriconvert_brainmask, [('thresholded', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_brainmask,[('converted','reslice_like')]), (fs_mriconvert_brainmask, outputnode, [('out_file', 'brain_mask')]) ]) fs_mriconvert_brainmaskfull = pe.Node( interface=fs.MRIConvert( out_type='niigz', out_file='brain_mask_full_resampled.nii.gz'), name="brain_mask_full_resample") fs_mriconvert_brainmaskfull.inputs.vox_size = self.config.resampling fs_mriconvert_brainmaskfull.inputs.resample_type = self.config.interpolation flow.connect([ (mr_convert_brainmask, fs_mriconvert_brainmaskfull, [('converted', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_brainmaskfull,[('converted','reslice_like')]), (fs_mriconvert_brainmaskfull, outputnode, [('out_file', 'brain_mask_full')]) ]) fs_mriconvert_wm_mask = pe.Node( interface=fs.MRIConvert( out_type='niigz', resample_type='nearest', out_file='wm_mask_resampled.nii.gz'), name="wm_mask_resample") fs_mriconvert_wm_mask.inputs.vox_size = self.config.resampling flow.connect([ (mr_convert_wm_mask_file, fs_mriconvert_wm_mask, [('converted', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_wm_mask,[('converted','reslice_like')]), (fs_mriconvert_wm_mask, outputnode, [('out_file', 'wm_mask_file')]) ]) fs_mriconvert_ROIs = pe.MapNode(interface=fs.MRIConvert(out_type='niigz', resample_type='nearest'), iterfield=['in_file'], name="ROIs_resample") fs_mriconvert_ROIs.inputs.vox_size = self.config.resampling flow.connect([ (mr_convert_roi_volumes, fs_mriconvert_ROIs, [('converted_files', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), (fs_mriconvert_ROIs, outputnode, [("out_file", "roi_volumes")]) ]) # fs_mriconvert_PVEs = pe.MapNode(interface=fs.MRIConvert(out_type='niigz'),name="PVEs_resample",iterfield=['in_file']) # fs_mriconvert_PVEs.inputs.vox_size = self.config.resampling # fs_mriconvert_PVEs.inputs.resample_type = self.config.interpolation # flow.connect([ # (fastr,fs_mriconvert_PVEs,[('partial_volume_files','in_file')]), # #(mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), # (fs_mriconvert_PVEs,outputnode,[("out_file","partial_volume_files")]) # ]) fs_mriconvert_dwimask = pe.Node(interface=fs.MRIConvert(out_type='niigz', resample_type='nearest', out_file='dwi_brain_mask_resampled.nii.gz'), name="dwi_brainmask_resample") # fs_mriconvert_dwimask.inputs.vox_size = self.config.resampling flow.connect([ (flirt_dwimask, fs_mriconvert_dwimask, [('out_file', 'in_file')]), (mr_convert_b0_resample, fs_mriconvert_dwimask, [('converted', 'reslice_like')]), (fs_mriconvert_dwimask, outputnode, [('out_file', 'dwi_brain_mask')]) ]) # TODO Implementation of FSL Topup if self.config.eddy_current_and_motion_correction: if self.config.eddy_correction_algo == 'FSL eddy_correct': eddy_correct = pe.Node(interface=fsl.EddyCorrect(ref_num=0, out_file='eddy_corrected.nii.gz'), name='eddy_correct') flow.connect([ (processing_input, outputnode, [("bvecs", "bvecs_rot")]) ]) if self.config.eddy_correct_motion_correction: mc_flirt = pe.Node( interface=fsl.MCFLIRT( out_file='motion_corrected.nii.gz', ref_vol=0, save_mats=True), name='motion_correction') flow.connect([ (mr_convert_b, mc_flirt, [("converted", "in_file")]) ]) # FIXME rotate b vectors after motion correction (mcflirt) flow.connect([ (mc_flirt, eddy_correct, [("out_file", "in_file")]) ]) else: flow.connect([ (mr_convert_b, eddy_correct, [("converted", "in_file")]) ]) # # DTK needs fixed number of directions (512) # if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(3),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in2")]), # (split_vol,merge_filenames,[("padding2","in3")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]) # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (eddy_correct,merge_filenames,[("eddy_corrected","in1")]), # (split_vol,merge_filenames,[("padding2","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]) # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # else: flow.connect([ (eddy_correct, fs_mriconvert, [ ('eddy_corrected', 'in_file')]), (fs_mriconvert, outputnode, [ ("out_file", "diffusion_preproc")]) ]) else: eddy_correct = pe.Node(interface=cmp_fsl.EddyOpenMP(out_file="eddy_corrected.nii.gz", verbose=True), name='eddy') flow.connect([ (mr_convert_b, eddy_correct, [("converted", "in_file")]), (processing_input, eddy_correct, [("bvecs", "bvecs")]), (processing_input, eddy_correct, [("bvals", "bvals")]), (flirt_dwimask, eddy_correct, [("out_file", "mask")]), (indexnode, eddy_correct, [("index", "index")]), (acqpnode, eddy_correct, [("acqp", "acqp")]) ]) flow.connect([ (eddy_correct, outputnode, [ ("bvecs_rotated", "bvecs_rot")]) ]) # # DTK needs fixed number of directions (512) # if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in1")]) # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # ]) # # resampling diffusion image and setting output type to short # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # # elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(3),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in1")]), # (split_vol,merge_filenames,[("padding2","in3")]) # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # ]) # # resampling diffusion image and setting output type to short # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (eddy_correct,merge_filenames,[("eddy_corrected","in1")]), # (split_vol,merge_filenames,[("padding2","in2")]) # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # ]) # # resampling diffusion image and setting output type to short # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # else: # resampling diffusion image and setting output type to short flow.connect([ (eddy_correct, fs_mriconvert, [ ('eddy_corrected', 'in_file')]), (fs_mriconvert, outputnode, [ ("out_file", "diffusion_preproc")]) ]) else: # resampling diffusion image and setting output type to short flow.connect([ (mr_convert_b, fs_mriconvert, [("converted", "in_file")]), (fs_mriconvert, outputnode, [ ("out_file", "diffusion_preproc")]), (inputnode, outputnode, [("bvecs", "bvecs_rot")]) ]) # #mr_convertB.inputs.grad_fsl = ('bvecs', 'bvals') # flow.connect([ # (mr_convertF,mr_convertB,[("converted","in_file")]) # ]) # else: # if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (mc_flirt,merge_filenames,[("out_file","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(3),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (mc_flirt,merge_filenames,[("out_file","in2")]), # (split_vol,merge_filenames,[("padding2","in3")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (mc_flirt,merge_filenames,[("out_file","in1")]), # (split_vol,merge_filenames,[("padding2","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # else: # flow.connect([ # (mc_flirt,outputnode,[("out_file","diffusion_preproc")]) # ]) fs_mriconvert_5tt = pe.Node(interface=fs.MRIConvert(out_type='niigz', out_file='act_5tt_resampled.nii.gz'), name="5tt_resample") fs_mriconvert_5tt.inputs.vox_size = self.config.resampling fs_mriconvert_5tt.inputs.resample_type = self.config.interpolation mrtrix_5tt = pe.Node(interface=Generate5tt( out_file='mrtrix_5tt.nii.gz'), name='mrtrix_5tt') mrtrix_5tt.inputs.algorithm = 'freesurfer' # mrtrix_5tt.inputs.algorithm = 'hsvs' flow.connect([ (processing_input, mrtrix_5tt, [('aparc_aseg', 'in_file')]), (mrtrix_5tt, fs_mriconvert_5tt, [('out_file', 'in_file')]), (fs_mriconvert_5tt, outputnode, [('out_file', 'act_5TT')]), ]) # if self.config.partial_volume_estimation: pve_extractor_from_5tt = pe.Node( interface=ExtractPVEsFrom5TT(), name='pve_extractor_from_5tt') pve_extractor_from_5tt.inputs.pve_csf_file = 'pve_0.nii.gz' pve_extractor_from_5tt.inputs.pve_gm_file = 'pve_1.nii.gz' pve_extractor_from_5tt.inputs.pve_wm_file = 'pve_2.nii.gz' flow.connect([ (mrtrix_5tt, pve_extractor_from_5tt, [('out_file', 'in_5tt')]), (processing_input, pve_extractor_from_5tt, [('T1', 'ref_image')]), ]) fs_mriconvert_PVEs = pe.MapNode(interface=fs.MRIConvert(out_type='niigz'), iterfield=['in_file'], name="PVEs_resample") fs_mriconvert_PVEs.inputs.vox_size = self.config.resampling fs_mriconvert_PVEs.inputs.resample_type = self.config.interpolation flow.connect([ (pve_extractor_from_5tt, fs_mriconvert_PVEs, [('partial_volume_files', 'in_file')]), # (mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), (fs_mriconvert_PVEs, outputnode, [ ("out_file", "partial_volume_files")]) ]) fs_mriconvert_gmwmi = pe.Node(interface=fs.MRIConvert(out_type='niigz', out_file='gmwmi_resampled.nii.gz'), name="gmwmi_resample") fs_mriconvert_gmwmi.inputs.vox_size = self.config.resampling fs_mriconvert_gmwmi.inputs.resample_type = self.config.interpolation mrtrix_gmwmi = pe.Node(interface=GenerateGMWMInterface( out_file='gmwmi.nii.gz'), name='mrtrix_gmwmi') update_gmwmi = pe.Node( interface=UpdateGMWMInterfaceSeeding(), name='update_gmwmi') update_gmwmi.inputs.out_gmwmi_file = 'gmwmi_proc.nii.gz' flow.connect([ (mrtrix_5tt, mrtrix_gmwmi, [('out_file', 'in_file')]), (mrtrix_gmwmi, update_gmwmi, [('out_file', 'in_gmwmi_file')]), (processing_input, update_gmwmi, [ ('roi_volumes', 'in_roi_volumes')]), (update_gmwmi, fs_mriconvert_gmwmi, [('out_gmwmi_file', 'in_file')]), (fs_mriconvert_gmwmi, outputnode, [('out_file', 'gmwmi')]), ])
#You need the output to be nifti, otherwise NODDI cannot read it # eddy = Node (fsl.Eddy(), name = 'eddy') # eddy.inputs.in_acqp = acqparams # eddy.inputs.in_bval = bval # eddy.inputs.in_bvec = bvec # eddy.inputs.in_index = index # eddy.inputs.use_cuda = True # eddy.inputs.is_shelled = True # eddy.inputs.num_threads = 8 # eddy.inputs.niter = 2 # eddy.inputs.output_type = 'NIFTI' #This will be passed to NODDI and charmed #I tried new Eddy function, it did not work very well, So, I am regressing to good old eddy_correct that perfroms only affine #I compared new eddy vs mcflirt vs eddy_correct and the later really did perform much better eddy = Node(fsl.EddyCorrect(), name='eddy') eddy.inputs.ref_num = 0 #----------------------------------------------------------------------------------------------------- # In[8] # I decided to use RESTORE (non-linear algorithm) to fit the kurtosis tensor here instead of the default (WLS, linear) # You will find: /media/amr/Amr_4TB/Work/October_Acquistion/Diffusion_TBSS_Stat/Study_Based_Template/Kurtosis_WLS # as well as : /media/amr/Amr_4TB/Work/October_Acquistion/Diffusion_TBSS_Stat/DTI_TBSS_workingdir_Study_Based_Template/DTI_TBSS_Study/_map_id_Kurtosis_FA_WLS #for all map_list # in the processing workingdir, there is only one copy, the RESTORE # the folders without _WLS suffix are done using RESTORE def Kurtosis(dwi, mask): import numpy as np import dipy.reconst.dki as dki import dipy.reconst.dti as dti
def create_workflow(self, flow, inputnode, outputnode): """Create the stage worflow. Parameters ---------- flow : nipype.pipeline.engine.Workflow The nipype.pipeline.engine.Workflow instance of the Diffusion pipeline inputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the inputs of the stage outputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the outputs of the stage """ # print inputnode processing_input = pe.Node( interface=util.IdentityInterface( fields=[ "diffusion", "aparc_aseg", "aseg", "bvecs", "bvals", "grad", "acqp", "index", "T1", "brain", "brain_mask", "wm_mask_file", "roi_volumes", ] ), name="processing_input", ) # fmt: off flow.connect( [ (inputnode, processing_input, [("diffusion", "diffusion"), ("bvecs", "bvecs"), ("bvals", "bvals"), ("T1", "T1"), ("aparc_aseg", "aparc_aseg"), ("aseg", "aseg"), ("brain", "brain"), ("brain_mask", "brain_mask"), ("wm_mask_file", "wm_mask_file"), ("roi_volumes", "roi_volumes")]), (processing_input, outputnode, [("bvals", "bvals")]), ] ) # fmt: on # Conversion to MRTrix image format ".mif", grad_fsl=(inputnode.inputs.bvecs,inputnode.inputs.bvals) mr_convert = pe.Node( interface=MRConvert(stride=[1, 2, +3, +4]), name="mr_convert" ) mr_convert.inputs.quiet = True mr_convert.inputs.force_writing = True concatnode = pe.Node(interface=util.Merge(2), name="concatnode") # fmt: off flow.connect( [ (processing_input, concatnode, [("bvecs", "in1")]), (processing_input, concatnode, [("bvals", "in2")]), (concatnode, mr_convert, [(("out", convert_list_to_tuple), "grad_fsl")]), ] ) # fmt: on # Convert Freesurfer data mr_convert_brainmask = pe.Node( interface=MRConvert( out_filename="brainmaskfull.nii.gz", stride=[1, 2, 3], output_datatype="float32", ), name="mr_convert_brain_mask", ) mr_convert_brain = pe.Node( interface=MRConvert( out_filename="anat_masked.nii.gz", stride=[1, 2, 3], output_datatype="float32", ), name="mr_convert_brain", ) mr_convert_T1 = pe.Node( interface=MRConvert( out_filename="anat.nii.gz", stride=[1, 2, 3], output_datatype="float32" ), name="mr_convert_T1", ) mr_convert_roi_volumes = pe.Node( interface=ApplymultipleMRConvert( stride=[1, 2, 3], output_datatype="float32", extension="nii" ), name="mr_convert_roi_volumes", ) mr_convert_wm_mask_file = pe.Node( interface=MRConvert( out_filename="wm_mask_file.nii.gz", stride=[1, 2, 3], output_datatype="float32", ), name="mr_convert_wm_mask_file", ) # fmt: off flow.connect( [ (processing_input, mr_convert_brainmask, [("brain_mask", "in_file")]), (processing_input, mr_convert_brain, [("brain", "in_file")]), (processing_input, mr_convert_T1, [("T1", "in_file")]), (processing_input, mr_convert_roi_volumes, [("roi_volumes", "in_files")]), (processing_input, mr_convert_wm_mask_file, [("wm_mask_file", "in_file")]), ] ) # fmt: on # if self.config.partial_volume_estimation: # pve_extractor_from_5tt = pe.Node(interface=ExtractPVEsFrom5TT(),name='pve_extractor_from_5tt') # pve_extractor.inputs.pve_csf_file = 'pve_0.nii.gz' # pve_extractor.inputs.pve_csf_file = 'pve_1.nii.gz' # pve_extractor.inputs.pve_csf_file = 'pve_2.nii.gz' # # flow.connect([ # (mrtrix_5tt,pve_extractor_from_5tt,[('out_file','in_5tt')]), # (processing_input,pve_extractor_from_5tt,[('T1','ref_image')]), # ]) # from nipype.interfaces import fsl # # Run FAST for partial volume estimation (WM;GM;CSF) # fastr = pe.Node(interface=fsl.FAST(),name='fastr') # fastr.inputs.out_basename = 'fast_' # fastr.inputs.number_classes = 3 # # if self.config.fast_use_priors: # fsl_flirt = pe.Node(interface=fsl.FLIRT(out_file='Template2Input.nii.gz',out_matrix_file='template2input.mat'),name="linear_registration") # #fsl_flirt.inputs.in_file = os.environ['FSLDIR']+'/data/standard/MNI152_T1_1mm.nii.gz' # template_path = os.path.join('data', 'segmentation', 'ants_template_IXI') # fsl_flirt.inputs.in_file = pkg_resources.resource_filename('cmtklib', os.path.join(template_path, 'T_template2.nii.gz')) # #fsl_flirt.inputs.dof = self.config.dof # #fsl_flirt.inputs.cost = self.config.fsl_cost # #fsl_flirt.inputs.no_search = self.config.no_search # fsl_flirt.inputs.verbose = True # # flow.connect([ # (mr_convert_T1, fsl_flirt, [('converted','reference')]), # ]) # # fastr.inputs.use_priors = True # fastr.inputs.other_priors = [pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors1.nii.gz')), # pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors2.nii.gz')), # pkg_resources.resource_filename('cmtklib', os.path.join(template_path,'3Class-Priors','priors3.nii.gz')) # ] # flow.connect([ # (fsl_flirt, fastr, [('out_matrix_file','init_transform')]), # ]) # # flow.connect([ # (mr_convert_brain,fastr,[('converted','in_files')]), # # (fastr,outputnode,[('partial_volume_files','partial_volume_files')]) # ]) # Threshold converted Freesurfer brainmask into a binary mask mr_threshold_brainmask = pe.Node( interface=MRThreshold(abs_value=1, out_file="brain_mask.nii.gz"), name="mr_threshold_brainmask", ) # fmt: off flow.connect( [(mr_convert_brainmask, mr_threshold_brainmask, [("converted", "in_file")])] ) # fmt: on # Extract b0 and create DWI mask flirt_dwimask_pre = pe.Node( interface=fsl.FLIRT( out_file="brain2b0.nii.gz", out_matrix_file="brain2b0aff" ), name="flirt_dwimask_pre", ) costs = [ "mutualinfo", "corratio", "normcorr", "normmi", "leastsq", "labeldiff", "bbr", ] flirt_dwimask_pre.inputs.cost = costs[3] flirt_dwimask_pre.inputs.cost_func = costs[3] flirt_dwimask_pre.inputs.dof = 6 flirt_dwimask_pre.inputs.no_search = False flirt_dwimask = pe.Node( interface=fsl.FLIRT( out_file="dwi_brain_mask.nii.gz", apply_xfm=True, interp="nearestneighbour", ), name="flirt_dwimask", ) mr_convert_b0 = pe.Node( interface=MRConvert(out_filename="b0.nii.gz", stride=[+1, +2, +3]), name="mr_convert_b0", ) mr_convert_b0.inputs.extract_at_axis = 3 mr_convert_b0.inputs.extract_at_coordinate = [0] # fmt: off flow.connect( [ (processing_input, mr_convert_b0, [("diffusion", "in_file")]), (mr_convert_T1, flirt_dwimask_pre, [("converted", "in_file")]), (mr_convert_b0, flirt_dwimask_pre, [("converted", "reference")]), (mr_convert_b0, flirt_dwimask, [("converted", "reference")]), (flirt_dwimask_pre, flirt_dwimask, [("out_matrix_file", "in_matrix_file")]), (mr_threshold_brainmask, flirt_dwimask, [("thresholded", "in_file")]), ] ) # fmt: on # Diffusion data denoising if self.config.denoising: mr_convert_noise = pe.Node( interface=MRConvert( out_filename="diffusion_noisemap.nii.gz", stride=[+1, +2, +3, +4] ), name="mr_convert_noise", ) if self.config.denoising_algo == "MRtrix (MP-PCA)": mr_convert.inputs.out_filename = "diffusion.mif" dwi_denoise = pe.Node( interface=DWIDenoise( out_file="diffusion_denoised.mif", out_noisemap="diffusion_noisemap.mif", ), name="dwi_denoise", ) dwi_denoise.inputs.force_writing = True dwi_denoise.inputs.debug = True dwi_denoise.ignore_exception = True # fmt: off flow.connect( [ # (processing_input,mr_convert,[('diffusion','in_file')]), (processing_input, mr_convert, [("diffusion", "in_file")]), (mr_convert, dwi_denoise, [("converted", "in_file")]), (flirt_dwimask, dwi_denoise, [("out_file", "mask")]), ] ) # fmt: on elif self.config.denoising_algo == "Dipy (NLM)": mr_convert.inputs.out_filename = "diffusion_denoised.mif" dwi_denoise = pe.Node(interface=dipy.Denoise(), name="dwi_denoise") if self.config.dipy_noise_model == "Gaussian": dwi_denoise.inputs.noise_model = "gaussian" elif self.config.dipy_noise_model == "Rician": dwi_denoise.inputs.noise_model = "rician" # fmt: off flow.connect( [ (processing_input, dwi_denoise, [("diffusion", "in_file")]), (flirt_dwimask, dwi_denoise, [("out_file", "in_mask")]), (dwi_denoise, mr_convert, [("out_file", "in_file")]), ] ) # fmt: on # fmt: off flow.connect( [ (dwi_denoise, mr_convert_noise, [("out_file", "in_file")]), (mr_convert_noise, outputnode, [("converted", "diffusion_noisemap")]), ] ) # fmt: on else: mr_convert.inputs.out_filename = "diffusion.mif" flow.connect([(processing_input, mr_convert, [("diffusion", "in_file")])]) mr_convert_b = pe.Node( interface=MRConvert( out_filename="diffusion_corrected.nii.gz", stride=[+1, +2, +3, +4] ), name="mr_convert_b", ) if self.config.bias_field_correction: mr_convert_bias = pe.Node( interface=MRConvert( out_filename="diffusion_biasfield.nii.gz", stride=[+1, +2, +3, +4] ), name="mr_convert_bias", ) if self.config.bias_field_algo == "ANTS N4": dwi_biascorrect = pe.Node( interface=DWIBiasCorrect( use_ants=True, out_bias="diffusion_denoised_biasfield.mif" ), name="dwi_biascorrect", ) elif self.config.bias_field_algo == "FSL FAST": dwi_biascorrect = pe.Node( interface=DWIBiasCorrect( use_fsl=True, out_bias="diffusion_denoised_biasfield.mif" ), name="dwi_biascorrect", ) dwi_biascorrect.inputs.debug = False if self.config.denoising: if self.config.denoising_algo == "MRtrix (MP-PCA)": # fmt: off flow.connect( [ (dwi_denoise, dwi_biascorrect, [("out_file", "in_file")]), (flirt_dwimask, dwi_biascorrect, [("out_file", "mask")]), # (dwi_biascorrect, mr_convert_b, # [('out_file', 'in_file')]) ] ) # fmt: on elif self.config.denoising_algo == "Dipy (NLM)": # fmt: off flow.connect( [ (mr_convert, dwi_biascorrect, [("converted", "in_file")]), (flirt_dwimask, dwi_biascorrect, [("out_file", "mask")]), # (dwi_biascorrect, mr_convert_b, # [('out_file', 'in_file')]) ] ) # fmt: on else: # fmt: off flow.connect( [ (mr_convert, dwi_biascorrect, [("converted", "in_file")]), (flirt_dwimask, dwi_biascorrect, [("out_file", "mask")]), ] ) # fmt: on # fmt: off flow.connect( [ (dwi_biascorrect, mr_convert_b, [("out_file", "in_file")]), (dwi_biascorrect, mr_convert_bias, [("out_file", "in_file")]), (mr_convert_bias, outputnode, [("converted", "diffusion_biasfield")]), ] ) # fmt: on else: if self.config.denoising: if self.config.denoising_algo == "MRtrix (MP-PCA)": # fmt: off flow.connect( [(dwi_denoise, mr_convert_b, [("out_file", "in_file")])] ) # fmt: on elif self.config.denoising_algo == "Dipy (NLM)": # fmt: off flow.connect( [(mr_convert, mr_convert_b, [("converted", "in_file")])] ) # fmt: on else: # fmt: off flow.connect( [(mr_convert, mr_convert_b, [("converted", "in_file")])] ) # fmt: on extract_grad_mrtrix = pe.Node( interface=ExtractMRTrixGrad(out_grad_mrtrix="grad.txt"), name="extract_grad_mrtrix", ) # fmt: off flow.connect( [(mr_convert, extract_grad_mrtrix, [("converted", "in_file")])] ) # fmt: on # extract_grad_fsl = pe.Node(interface=mrt.MRTrixInfo(out_grad_mrtrix=('diffusion_denoised.bvec','diffusion_denoised.bval')),name='extract_grad_fsl') # TODO extract the total readout directly from the BIDS json file acqpnode = pe.Node( interface=CreateAcqpFile(total_readout=self.config.total_readout), name="acqpnode", ) indexnode = pe.Node(interface=CreateIndexFile(), name="indexnode") # fmt: off flow.connect( [(extract_grad_mrtrix, indexnode, [("out_grad_mrtrix", "in_grad_mrtrix")])] ) # fmt: on fs_mriconvert = pe.Node( interface=fs.MRIConvert( out_type="niigz", out_file="diffusion_preproc_resampled.nii.gz" ), name="diffusion_resample", ) fs_mriconvert.inputs.vox_size = self.config.resampling fs_mriconvert.inputs.resample_type = self.config.interpolation mr_convert_b0_resample = pe.Node( interface=MRConvert( out_filename="b0_resampled.nii.gz", stride=[+1, +2, +3] ), name="mr_convert_b0_resample", ) mr_convert_b0_resample.inputs.extract_at_axis = 3 mr_convert_b0_resample.inputs.extract_at_coordinate = [0] # fs_mriconvert_b0 = pe.Node(interface=fs.MRIConvert(out_type='niigz',out_file='b0_resampled.nii.gz'),name="b0_resample") # fs_mriconvert_b0.inputs.vox_size = self.config.resampling # fs_mriconvert_b0.inputs.resample_type = self.config.interpolation # fmt: off flow.connect( [ (fs_mriconvert, mr_convert_b0_resample, [("out_file", "in_file")]), ] ) # fmt: on # resampling Freesurfer data and setting output type to short fs_mriconvert_T1 = pe.Node( interface=fs.MRIConvert(out_type="niigz", out_file="anat_resampled.nii.gz"), name="anat_resample", ) fs_mriconvert_T1.inputs.vox_size = self.config.resampling fs_mriconvert_T1.inputs.resample_type = self.config.interpolation # fmt: off flow.connect( [ (mr_convert_T1, fs_mriconvert_T1, [("converted", "in_file")]), # (mr_convert_b0_resample,fs_mriconvert_T1,[('converted','reslice_like')]), (fs_mriconvert_T1, outputnode, [("out_file", "T1")]), ] ) # fmt: on fs_mriconvert_brain = pe.Node( interface=fs.MRIConvert( out_type="niigz", out_file="anat_masked_resampled.nii.gz" ), name="anat_masked_resample", ) fs_mriconvert_brain.inputs.vox_size = self.config.resampling fs_mriconvert_brain.inputs.resample_type = self.config.interpolation # fmt: off flow.connect( [ (mr_convert_brain, fs_mriconvert_brain, [("converted", "in_file")]), # (mr_convert_b0_resample,fs_mriconvert_brain,[('converted','reslice_like')]), (fs_mriconvert_brain, outputnode, [("out_file", "brain")]), ] ) # fmt: on fs_mriconvert_brainmask = pe.Node( interface=fs.MRIConvert( out_type="niigz", resample_type="nearest", out_file="brain_mask_resampled.nii.gz", ), name="brain_mask_resample", ) fs_mriconvert_brainmask.inputs.vox_size = self.config.resampling # fmt: off flow.connect( [ (mr_threshold_brainmask, fs_mriconvert_brainmask, [("thresholded", "in_file")],), # (mr_convert_b0_resample,fs_mriconvert_brainmask,[('converted','reslice_like')]), (fs_mriconvert_brainmask, outputnode, [("out_file", "brain_mask")]), ] ) # fmt: on fs_mriconvert_brainmaskfull = pe.Node( interface=fs.MRIConvert( out_type="niigz", out_file="brain_mask_full_resampled.nii.gz" ), name="brain_mask_full_resample", ) fs_mriconvert_brainmaskfull.inputs.vox_size = self.config.resampling fs_mriconvert_brainmaskfull.inputs.resample_type = self.config.interpolation # fmt: off flow.connect( [ (mr_convert_brainmask, fs_mriconvert_brainmaskfull, [("converted", "in_file")]), # (mr_convert_b0_resample,fs_mriconvert_brainmaskfull,[('converted','reslice_like')]), (fs_mriconvert_brainmaskfull, outputnode, [("out_file", "brain_mask_full")]), ] ) # fmt: on fs_mriconvert_wm_mask = pe.Node( interface=fs.MRIConvert( out_type="niigz", resample_type="nearest", out_file="wm_mask_resampled.nii.gz", ), name="wm_mask_resample", ) fs_mriconvert_wm_mask.inputs.vox_size = self.config.resampling # fmt: off flow.connect( [ (mr_convert_wm_mask_file, fs_mriconvert_wm_mask, [("converted", "in_file")],), # (mr_convert_b0_resample,fs_mriconvert_wm_mask,[('converted','reslice_like')]), (fs_mriconvert_wm_mask, outputnode, [("out_file", "wm_mask_file")]), ] ) # fmt: on fs_mriconvert_ROIs = pe.MapNode( interface=fs.MRIConvert(out_type="niigz", resample_type="nearest"), iterfield=["in_file"], synchronize=True, name="ROIs_resample" ) fs_mriconvert_ROIs.inputs.vox_size = self.config.resampling # fmt: off flow.connect( [ (mr_convert_roi_volumes, fs_mriconvert_ROIs, [("converted_files", "in_file")],), # (mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), (fs_mriconvert_ROIs, outputnode, [("out_file", "roi_volumes")]), ] ) # fmt: on # fs_mriconvert_PVEs = pe.MapNode(interface=fs.MRIConvert(out_type='niigz'),name="PVEs_resample",iterfield=['in_file']) # fs_mriconvert_PVEs.inputs.vox_size = self.config.resampling # fs_mriconvert_PVEs.inputs.resample_type = self.config.interpolation # flow.connect([ # (fastr,fs_mriconvert_PVEs,[('partial_volume_files','in_file')]), # #(mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), # (fs_mriconvert_PVEs,outputnode,[("out_file","partial_volume_files")]) # ]) fs_mriconvert_dwimask = pe.Node( interface=fs.MRIConvert( out_type="niigz", resample_type="nearest", out_file="dwi_brain_mask_resampled.nii.gz", ), name="dwi_brainmask_resample", ) # fs_mriconvert_dwimask.inputs.vox_size = self.config.resampling # fmt: off flow.connect( [ (flirt_dwimask, fs_mriconvert_dwimask, [("out_file", "in_file")]), (mr_convert_b0_resample, fs_mriconvert_dwimask, [("converted", "reslice_like")],), (fs_mriconvert_dwimask, outputnode, [("out_file", "dwi_brain_mask")]), ] ) # fmt: on # TODO Implementation of FSL Topup if self.config.eddy_current_and_motion_correction: if self.config.eddy_correction_algo == "FSL eddy_correct": eddy_correct = pe.Node( interface=fsl.EddyCorrect( ref_num=0, out_file="eddy_corrected.nii.gz" ), name="eddy_correct", ) # fmt: off flow.connect([(processing_input, outputnode, [("bvecs", "bvecs_rot")])]) # fmt: on if self.config.eddy_correct_motion_correction: mc_flirt = pe.Node( interface=fsl.MCFLIRT( out_file="motion_corrected.nii.gz", ref_vol=0, save_mats=True, ), name="motion_correction", ) # fmt: off flow.connect( [ (mr_convert_b, mc_flirt, [("converted", "in_file")]), (mc_flirt, eddy_correct, [("out_file", "in_file")]) ] ) # fmt: on else: # fmt: off flow.connect( [(mr_convert_b, eddy_correct, [("converted", "in_file")])] ) # fmt: on # # DTK needs fixed number of directions (512) # if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(3),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (eddy_correct,merge_filenames,[("eddy_corrected","in2")]), # (split_vol,merge_filenames,[("padding2","in3")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]) # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (eddy_correct,merge_filenames,[("eddy_corrected","in1")]), # (split_vol,merge_filenames,[("padding2","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]) # ]) # flow.connect([ # (merge,fs_mriconvert,[('merged_file','in_file')]), # (fs_mriconvert,outputnode,[("out_file","diffusion_preproc")]) # ]) # else: # fmt: off flow.connect( [ (eddy_correct, fs_mriconvert, [("eddy_corrected", "in_file")]), (fs_mriconvert, outputnode, [("out_file", "diffusion_preproc")],), ] ) # fmt: on else: eddy_correct = pe.Node( interface=cmp_fsl.EddyOpenMP( out_file="eddy_corrected.nii.gz", verbose=True ), name="eddy", ) # fmt: off flow.connect( [ (mr_convert_b, eddy_correct, [("converted", "in_file")]), (processing_input, eddy_correct, [("bvecs", "bvecs")]), (processing_input, eddy_correct, [("bvals", "bvals")]), (flirt_dwimask, eddy_correct, [("out_file", "mask")]), (indexnode, eddy_correct, [("index", "index")]), (acqpnode, eddy_correct, [("acqp", "acqp")]), ] ) # fmt: on # resampling diffusion image and setting output type to short # fmt: off flow.connect( [ (eddy_correct, outputnode, [("bvecs_rotated", "bvecs_rot")]), (eddy_correct, fs_mriconvert, [("eddy_corrected", "in_file")]), (fs_mriconvert,outputnode, [("out_file", "diffusion_preproc")]) ] ) # fmt: on else: # resampling diffusion image and setting output type to short # fmt: off flow.connect( [ (mr_convert_b, fs_mriconvert, [("converted", "in_file")]), (fs_mriconvert, outputnode, [("out_file", "diffusion_preproc")]), (inputnode, outputnode, [("bvecs", "bvecs_rot")]), ] ) # fmt: on # #mr_convertB.inputs.grad_fsl = ('bvecs', 'bvals') # flow.connect([ # (mr_convertF,mr_convertB,[("converted","in_file")]) # ]) # else: # if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (mc_flirt,merge_filenames,[("out_file","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(3),name='merge_files') # flow.connect([ # (split_vol,merge_filenames,[("padding1","in1")]), # (mc_flirt,merge_filenames,[("out_file","in2")]), # (split_vol,merge_filenames,[("padding2","in3")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: # merge_filenames = pe.Node(interface=util.Merge(2),name='merge_files') # flow.connect([ # (mc_flirt,merge_filenames,[("out_file","in1")]), # (split_vol,merge_filenames,[("padding2","in2")]), # ]) # merge = pe.Node(interface=fsl.Merge(dimension='t'),name="merge") # flow.connect([ # (merge_filenames,merge,[("out","in_files")]), # (merge,outputnode,[("merged_file","diffusion_preproc")]) # ]) # else: # flow.connect([ # (mc_flirt,outputnode,[("out_file","diffusion_preproc")]) # ]) if self.config.act_tracking: fs_mriconvert_5tt = pe.Node( interface=fs.MRIConvert( out_type="niigz", out_file="act_5tt_resampled.nii.gz" ), name="5tt_resample", ) fs_mriconvert_5tt.inputs.vox_size = self.config.resampling fs_mriconvert_5tt.inputs.resample_type = self.config.interpolation mrtrix_5tt = pe.Node( interface=Generate5tt(out_file="mrtrix_5tt.nii.gz"), name="mrtrix_5tt" ) mrtrix_5tt.inputs.algorithm = "freesurfer" # mrtrix_5tt.inputs.algorithm = 'hsvs' # fmt: off flow.connect( [ (processing_input, mrtrix_5tt, [("aparc_aseg", "in_file")]), (mrtrix_5tt, fs_mriconvert_5tt, [("out_file", "in_file")]), (fs_mriconvert_5tt, outputnode, [("out_file", "act_5TT")]), ] ) # fmt: on if self.config.tracking_tool == 'Dipy': pve_extractor_from_5tt = pe.Node( interface=ExtractPVEsFrom5TT(), name="pve_extractor_from_5tt" ) pve_extractor_from_5tt.inputs.pve_csf_file = "pve_0.nii.gz" pve_extractor_from_5tt.inputs.pve_gm_file = "pve_1.nii.gz" pve_extractor_from_5tt.inputs.pve_wm_file = "pve_2.nii.gz" # fmt: off flow.connect( [ (mrtrix_5tt, pve_extractor_from_5tt, [("out_file", "in_5tt")]), (processing_input, pve_extractor_from_5tt, [("T1", "ref_image")]), ] ) # fmt: on fs_mriconvert_PVEs = pe.MapNode( interface=fs.MRIConvert(out_type="niigz"), iterfield=["in_file"], synchronize=True, name="PVEs_resample" ) fs_mriconvert_PVEs.inputs.vox_size = self.config.resampling fs_mriconvert_PVEs.inputs.resample_type = self.config.interpolation # fmt: off flow.connect( [ (pve_extractor_from_5tt, fs_mriconvert_PVEs, [("partial_volume_files", "in_file")],), # (mr_convert_b0_resample,fs_mriconvert_ROIs,[('converted','reslice_like')]), (fs_mriconvert_PVEs, outputnode, [("out_file", "partial_volume_files")],), ] ) # fmt: on if self.config.gmwmi_seeding: fs_mriconvert_gmwmi = pe.Node( interface=fs.MRIConvert( out_type="niigz", out_file="gmwmi_resampled.nii.gz" ), name="gmwmi_resample", ) fs_mriconvert_gmwmi.inputs.vox_size = self.config.resampling fs_mriconvert_gmwmi.inputs.resample_type = self.config.interpolation mrtrix_gmwmi = pe.Node( interface=GenerateGMWMInterface(out_file="gmwmi.nii.gz"), name="mrtrix_gmwmi", ) update_gmwmi = pe.Node( interface=UpdateGMWMInterfaceSeeding(), name="update_gmwmi" ) update_gmwmi.inputs.out_gmwmi_file = "gmwmi_proc.nii.gz" # fmt: off flow.connect( [ (mrtrix_5tt, mrtrix_gmwmi, [("out_file", "in_file")]), (mrtrix_gmwmi, update_gmwmi, [("out_file", "in_gmwmi_file")]), (processing_input, update_gmwmi, [("roi_volumes", "in_roi_volumes")]), (update_gmwmi, fs_mriconvert_gmwmi, [("out_gmwmi_file", "in_file")]), (fs_mriconvert_gmwmi, outputnode, [("out_file", "gmwmi")]), ] )
def create_workflow(self, flow, inputnode, outputnode): processing_input = pe.Node( interface=util.IdentityInterface(fields=['diffusion']), name='processing_input') # For DSI acquisition: extract the hemisphere that contains the data if self.config.start_vol > 0 or self.config.end_vol < self.config.max_vol: split_vol = pe.Node(interface=splitDiffusion(), name='split_vol') split_vol.inputs.start = self.config.start_vol split_vol.inputs.end = self.config.end_vol flow.connect([(inputnode, split_vol, [("diffusion", "in_file")]), (split_vol, processing_input, [("data", "diffusion") ])]) else: flow.connect([(inputnode, processing_input, [("diffusion", "diffusion")])]) if self.config.motion_correction: mc_flirt = pe.Node(interface=fsl.MCFLIRT( out_file='motion_corrected.nii.gz', ref_vol=0), name='motion_correction') flow.connect([(processing_input, mc_flirt, [("diffusion", "in_file")])]) if self.config.eddy_current_correction: eddy_correct = pe.Node(interface=fsl.EddyCorrect( ref_num=0, out_file='eddy_corrected.nii.gz'), name='eddy_correct') flow.connect([(mc_flirt, eddy_correct, [("out_file", "in_file") ])]) if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (eddy_correct, merge_filenames, [("eddy_corrected", "in2")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(3), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (eddy_correct, merge_filenames, [("eddy_corrected", "in2")]), (split_vol, merge_filenames, [("padding2", "in3")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (eddy_correct, merge_filenames, [("eddy_corrected", "in1")]), (split_vol, merge_filenames, [("padding2", "in2")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) else: flow.connect([(eddy_correct, outputnode, [("eddy_corrected", "diffusion_preproc")])]) else: if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (mc_flirt, merge_filenames, [("out_file", "in2")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(3), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (mc_flirt, merge_filenames, [("out_file", "in2")]), (split_vol, merge_filenames, [("padding2", "in3")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (mc_flirt, merge_filenames, [("out_file", "in1")]), (split_vol, merge_filenames, [("padding2", "in2")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) else: flow.connect([(mc_flirt, outputnode, [("out_file", "diffusion_preproc")])]) else: if self.config.eddy_current_correction: eddy_correct = pe.Node(interface=fsl.EddyCorrect( ref_num=0, out_file="eddy_corrected.nii.gz"), name='eddy_correct') flow.connect([(processing_input, eddy_correct, [("diffusion", "in_file")])]) if self.config.start_vol > 0 and self.config.end_vol == self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (eddy_correct, merge_filenames, [("eddy_corrected", "in1")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol > 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(3), name='merge_files') flow.connect([ (split_vol, merge_filenames, [("padding1", "in1")]), (eddy_correct, merge_filenames, [("eddy_corrected", "in1")]), (split_vol, merge_filenames, [("padding2", "in3")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) elif self.config.start_vol == 0 and self.config.end_vol < self.config.max_vol: merge_filenames = pe.Node(interface=util.Merge(2), name='merge_files') flow.connect([ (eddy_correct, merge_filenames, [("eddy_corrected", "in1")]), (split_vol, merge_filenames, [("padding2", "in2")]), ]) merge = pe.Node(interface=fsl.Merge(dimension='t'), name="merge") flow.connect([(merge_filenames, merge, [("out", "in_files") ]), (merge, outputnode, [("merged_file", "diffusion_preproc")])]) else: flow.connect([(eddy_correct, outputnode, [("eddy_corrected", "diffusion_preproc")])]) else: flow.connect([ (inputnode, outputnode, [("diffusion", "diffusion_preproc") ]), ])