def mod_smooth(in_file, brightness_threshold, usans, fwhm, smooth_type, reg_file, surface_fwhm, subjects_dir=None): import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as fs if smooth_type == 'susan': smooth = fsl.SUSAN() smooth.inputs.fwhm = fwhm smooth.inputs.brightness_threshold = brightness_threshold smooth.inputs.usans = usans smooth.inputs.in_file = in_file res = smooth.run() smoothed_file = res.outputs.smoothed_file elif smooth_type == 'isotropic': smooth = fsl.IsotropicSmooth() smooth.inputs.in_file = in_file smooth.inputs.fwhm = fwhm res = smooth.run() smoothed_file = res.outputs.out_file elif smooth_type == 'freesurfer': smooth = fs.Smooth() smooth.inputs.reg_file = reg_file smooth.inputs.in_file = in_file smooth.inputs.surface_fwhm = surface_fwhm smooth.inputs.vol_fwhm = fwhm smooth.inputs.proj_frac_avg = (0.0, 1.0, 0.1) smooth.inputs.subjects_dir = subjects_dir res = smooth.run() smoothed_file = res.outputs.smoothed_file return smoothed_file
def smooth_mask_pipeline(self, **name_maps): pipeline = self.new_pipeline('smooth_mask', desc="Smooths and masks a brain image", name_maps=name_maps, citations=[fsl_cite]) # Smoothing process smooth = pipeline.add( 'smooth', fsl.IsotropicSmooth(fwhm=self.parameter('smoothing_fwhm')), inputs={'in_file': ('magnitude', nifti_gz_format)}, outputs={'smooth': ('out_file', nifti_gz_format)}, requirements=[fsl_req.v('5.0.10')]) pipeline.add('mask', fsl.ApplyMask(), inputs={ 'in_file': (smooth, 'out_file'), 'mask_file': ('brain_mask', nifti_gz_format) }, outputs={'smooth_masked': ('out_file', nifti_gz_format)}, requirements=[fsl_req.v('5.0.10')]) return pipeline
def mod_smooth(in_file, mask_file, fwhm, smooth_type, reg_file, surface_fwhm, subjects_dir=None): import nipype.interfaces.fsl as fsl import nipype.interfaces.freesurfer as fs import os if smooth_type == 'susan': if fwhm == 0: return in_file smooth = create_susan_smooth() smooth.base_dir = os.getcwd() smooth.inputs.inputnode.fwhm = fwhm smooth.inputs.inputnode.mask_file = mask_file smooth.inputs.inputnode.in_file = in_file res = smooth.run() smoothed_file = res.outputs.outputnode.smoothed_files elif smooth_type == 'isotropic': if fwhm == 0: return in_file smooth = fsl.IsotropicSmooth() smooth.inputs.in_file = in_file smooth.inputs.fwhm = fwhm res = smooth.run() smoothed_file = res.outputs.out_file elif smooth_type == 'freesurfer': if fwhm == 0 and surface_fwhm == 0: return in_file smooth = fs.Smooth() smooth.inputs.reg_file = reg_file smooth.inputs.in_file = in_file smooth.inputs.surface_fwhm = surface_fwhm smooth.inputs.vol_fwhm = fwhm smooth.inputs.proj_frac_avg = (0.0, 1.0, 0.1) smooth.inputs.subjects_dir = subjects_dir res = smooth.run() smoothed_file = res.outputs.smoothed_file return smoothed_file
def init_fitlins_wf(bids_dir, derivatives, out_dir, space, desc=None, ignore=None, force_index=None, model=None, participants=None, smoothing=None, base_dir=None, name='fitlins_wf'): wf = pe.Workflow(name=name, base_dir=base_dir) # Find the appropriate model file(s) specs = ModelSpecLoader(bids_dir=bids_dir) if model is not None: specs.inputs.model = model model_dict = specs.run().outputs.model_spec if not model_dict: raise RuntimeError("Unable to find or construct models") if isinstance(model_dict, list): raise RuntimeError( "Currently unable to run multiple models in parallel - " "please specify model") # # Load and run the model # loader = pe.Node(LoadBIDSModel(bids_dir=bids_dir, derivatives=derivatives, model=model_dict), name='loader') if ignore is not None: loader.inputs.ignore = ignore if force_index is not None: loader.inputs.force_index = force_index if participants is not None: loader.inputs.selectors = {'subject': participants} # Select preprocessed BOLD series to analyze getter = pe.Node(BIDSSelect(bids_dir=bids_dir, derivatives=derivatives, selectors={ 'suffix': 'bold', 'desc': desc, 'space': space }), name='getter') if smoothing: smoothing_params = smoothing.split(':', 1) if smoothing_params[0] != 'iso': raise ValueError(f"Unknown smoothing type {smoothing_params[0]}") smoother = pe.MapNode( fsl.IsotropicSmooth(fwhm=int(smoothing_params[1])), iterfield=['in_file'], name='smoother') l1_model = pe.MapNode( FirstLevelModel(), iterfield=['session_info', 'contrast_info', 'bold_file', 'mask_file'], name='l1_model') # Set up common patterns image_pattern = '[sub-{subject}/][ses-{session}/]' \ '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \ '[_rec-{reconstruction}][_run-{run}][_echo-{echo}]_bold_' \ '{suffix<design|corr|contrasts>}.svg' contrast_plot_pattern = '[sub-{subject}/][ses-{session}/]' \ '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \ '[_rec-{reconstruction}][_run-{run}][_echo-{echo}]_bold' \ '[_space-{space}]_contrast-{contrast}_ortho.png' contrast_pattern = '[sub-{subject}/][ses-{session}/]' \ '[sub-{subject}_][ses-{session}_]task-{task}[_acq-{acquisition}]' \ '[_rec-{reconstruction}][_run-{run}][_echo-{echo}]_bold' \ '[_space-{space}]_contrast-{contrast}_{suffix<effect|stat>}.nii.gz' # Set up general interfaces # # HTML snippets to be included directly in report, not # saved as individual derivative files # reportlet_dir = Path(base_dir) / 'reportlets' / 'fitlins' reportlet_dir.mkdir(parents=True, exist_ok=True) snippet_pattern = '[sub-{subject}/][ses-{session}/][sub-{subject}_]' \ '[ses-{session}_]task-{task}_[run-{run}_]snippet.html' ds_model_warnings = pe.MapNode(BIDSDataSink( base_directory=str(reportlet_dir), path_patterns=snippet_pattern), iterfield=['entities', 'in_file'], run_without_submitting=True, name='ds_model_warning') plot_design = pe.MapNode(DesignPlot(image_type='svg'), iterfield='data', name='plot_design') plot_corr = pe.MapNode(DesignCorrelationPlot(image_type='svg'), iterfield=['data', 'contrast_info'], name='plot_corr') plot_l1_contrast_matrix = pe.MapNode(ContrastMatrixPlot(image_type='svg'), iterfield=['data', 'contrast_info'], name='plot_l1_contrast_matrix') ds_design = pe.MapNode(BIDSDataSink(base_directory=out_dir, fixed_entities={'suffix': 'design'}, path_patterns=image_pattern), iterfield=['entities', 'in_file'], run_without_submitting=True, name='ds_design') ds_corr = pe.MapNode(BIDSDataSink(base_directory=out_dir, fixed_entities={'suffix': 'corr'}, path_patterns=image_pattern), iterfield=['entities', 'in_file'], run_without_submitting=True, name='ds_corr') ds_l1_contrasts = pe.MapNode(BIDSDataSink( base_directory=out_dir, fixed_entities={'suffix': 'contrasts'}, path_patterns=image_pattern), iterfield=['entities', 'in_file'], run_without_submitting=True, name='ds_l1_contrasts') # # General Connections # wf.connect([ (loader, ds_model_warnings, [('warnings', 'in_file')]), (loader, l1_model, [('session_info', 'session_info')]), (getter, l1_model, [('mask_files', 'mask_file')]), (l1_model, plot_design, [('design_matrix', 'data')]), ]) if smoothing: wf.connect([ (getter, smoother, [('bold_files', 'in_file')]), (smoother, l1_model, [('out_file', 'in_file')]), ]) else: wf.connect([ (getter, l1_model, [('bold_files', 'bold_file')]), ]) stage = None model = l1_model for ix, step in enumerate(step['Level'] for step in model_dict['Steps']): # Set up elements common across levels # # Because pybids generates the entire model in one go, we will need # various helper nodes to select the correct portions of the model # level = 'l{:d}'.format(ix + 1) # TODO: No longer used at higher level, suggesting we can simply return # entities from loader as a single list select_entities = pe.Node(niu.Select(index=ix), name='select_{}_entities'.format(level), run_without_submitting=True) select_contrasts = pe.Node(niu.Select(index=ix), name='select_{}_contrasts'.format(level), run_without_submitting=True) # Squash the results of MapNodes that may have generated multiple maps # into single lists. # Do the same with corresponding metadata - interface will complain if shapes mismatch collate = pe.Node(MergeAll(['contrast_maps', 'contrast_metadata']), name='collate_{}'.format(level), run_without_submitting=True) # # Plotting # plot_contrasts = pe.MapNode(GlassBrainPlot(image_type='png'), iterfield='data', name='plot_{}_contrasts'.format(level)) # # Derivatives # ds_contrast_maps = pe.Node(BIDSDataSink( base_directory=out_dir, path_patterns=contrast_pattern), run_without_submitting=True, name='ds_{}_contrast_maps'.format(level)) ds_contrast_plots = pe.Node(BIDSDataSink( base_directory=out_dir, path_patterns=contrast_plot_pattern), run_without_submitting=True, name='ds_{}_contrast_plots'.format(level)) if ix == 0: plot_contrasts.inputs.vmax = 20 wf.connect([ (loader, select_entities, [('entities', 'inlist')]), (select_entities, getter, [('out', 'entities')]), (select_entities, ds_model_warnings, [('out', 'entities')]), (select_entities, ds_design, [('out', 'entities')]), (plot_design, ds_design, [('figure', 'in_file')]), (select_contrasts, plot_l1_contrast_matrix, [('out', 'contrast_info')]), (select_contrasts, plot_corr, [('out', 'contrast_info')]), (model, plot_l1_contrast_matrix, [('design_matrix', 'data')]), (model, plot_corr, [('design_matrix', 'data')]), (select_entities, ds_l1_contrasts, [('out', 'entities')]), (select_entities, ds_corr, [('out', 'entities')]), (plot_l1_contrast_matrix, ds_l1_contrasts, [('figure', 'in_file')]), (plot_corr, ds_corr, [('figure', 'in_file')]), ]) # Set up higher levels else: model = pe.MapNode(SecondLevelModel(), iterfield=['contrast_info'], name='{}_model'.format(level)) wf.connect([ (stage, model, [('contrast_maps', 'stat_files'), ('contrast_metadata', 'stat_metadata')]), ]) wf.connect([ (loader, select_contrasts, [('contrast_info', 'inlist')]), (select_contrasts, model, [('out', 'contrast_info')]), (model, collate, [('contrast_maps', 'contrast_maps'), ('contrast_metadata', 'contrast_metadata')]), (collate, plot_contrasts, [('contrast_maps', 'data')]), (collate, ds_contrast_maps, [('contrast_maps', 'in_file'), ('contrast_metadata', 'entities')]), (collate, ds_contrast_plots, [('contrast_metadata', 'entities')]), (plot_contrasts, ds_contrast_plots, [('figure', 'in_file')]), ]) stage = model return wf
def create_workflow(files, target_file, subject_id, TR, slice_times, norm_threshold=1, num_components=5, vol_fwhm=None, surf_fwhm=None, lowpass_freq=-1, highpass_freq=-1, subjects_dir=None, sink_directory=os.getcwd(), target_subject=['fsaverage3', 'fsaverage4'], name='resting'): wf = Workflow(name=name) # Rename files in case they are named identically name_unique = MapNode(Rename(format_string='rest_%(run)02d'), iterfield=['in_file', 'run'], name='rename') name_unique.inputs.keep_ext = True name_unique.inputs.run = range(1, len(files) + 1) name_unique.inputs.in_file = files realign = Node(nipy.SpaceTimeRealigner(), name="spacetime_realign") realign.inputs.slice_times = slice_times realign.inputs.tr = TR realign.inputs.slice_info = 2 # Comute TSNR on realigned data regressing polynomials upto order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(realign, "out_file", tsnr, "in_file") # Compute the median image across runs calc_median = Node(Function(input_names=['in_files'], output_names=['median_file'], function=median, imports=imports), name='median') wf.connect(tsnr, 'detrended_file', calc_median, 'in_files') """Segment and Register """ registration = create_reg_workflow(name='registration') wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image') registration.inputs.inputspec.subject_id = subject_id registration.inputs.inputspec.subjects_dir = subjects_dir registration.inputs.inputspec.target_image = target_file """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. """ art = Node(interface=ArtifactDetect(), name="art") art.inputs.use_differences = [True, True] art.inputs.use_norm = True art.inputs.norm_threshold = norm_threshold art.inputs.zintensity_threshold = 9 art.inputs.mask_type = 'spm_global' art.inputs.parameter_source = 'NiPy' """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal voxel sizes. """ wf.connect([ (name_unique, realign, [('out_file', 'in_file')]), (realign, art, [('out_file', 'realigned_files')]), (realign, art, [('par_file', 'realignment_parameters')]), ]) def selectindex(files, idx): import numpy as np from nipype.utils.filemanip import filename_to_list, list_to_filename return list_to_filename( np.array(filename_to_list(files))[idx].tolist()) mask = Node(fsl.BET(), name='getmask') mask.inputs.mask = True wf.connect(calc_median, 'median_file', mask, 'in_file') # get segmentation in normalized functional space def merge_files(in1, in2): out_files = filename_to_list(in1) out_files.extend(filename_to_list(in2)) return out_files # filter some noise # Compute motion regressors motreg = Node(Function( input_names=['motion_params', 'order', 'derivatives'], output_names=['out_files'], function=motion_regressors, imports=imports), name='getmotionregress') wf.connect(realign, 'par_file', motreg, 'motion_params') # Create a filter to remove motion and art confounds createfilter1 = Node(Function( input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'], output_names=['out_files'], function=build_filter1, imports=imports), name='makemotionbasedfilter') createfilter1.inputs.detrend_poly = 2 wf.connect(motreg, 'out_files', createfilter1, 'motion_params') wf.connect(art, 'norm_files', createfilter1, 'comp_norm') wf.connect(art, 'outlier_files', createfilter1, 'outliers') filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii.gz', out_pf_name='pF_mcart.nii.gz', demean=True), iterfield=['in_file', 'design', 'out_res_name'], name='filtermotion') wf.connect(realign, 'out_file', filter1, 'in_file') wf.connect(realign, ('out_file', rename, '_filtermotart'), filter1, 'out_res_name') wf.connect(createfilter1, 'out_files', filter1, 'design') createfilter2 = MapNode(Function(input_names=[ 'realigned_file', 'mask_file', 'num_components', 'extra_regressors' ], output_names=['out_files'], function=extract_noise_components, imports=imports), iterfield=['realigned_file', 'extra_regressors'], name='makecompcorrfilter') createfilter2.inputs.num_components = num_components wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') wf.connect(filter1, 'out_res', createfilter2, 'realigned_file') wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]), createfilter2, 'mask_file') filter2 = MapNode(fsl.GLM(out_f_name='F.nii.gz', out_pf_name='pF.nii.gz', demean=True), iterfield=['in_file', 'design', 'out_res_name'], name='filter_noise_nosmooth') wf.connect(filter1, 'out_res', filter2, 'in_file') wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2, 'out_res_name') wf.connect(createfilter2, 'out_files', filter2, 'design') wf.connect(mask, 'mask_file', filter2, 'mask') bandpass = Node(Function( input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'], output_names=['out_files'], function=bandpass_filter, imports=imports), name='bandpass_unsmooth') bandpass.inputs.fs = 1. / TR bandpass.inputs.highpass_freq = highpass_freq bandpass.inputs.lowpass_freq = lowpass_freq wf.connect(filter2, 'out_res', bandpass, 'files') """Smooth the functional data using :class:`nipype.interfaces.fsl.IsotropicSmooth`. """ smooth = MapNode(interface=fsl.IsotropicSmooth(), name="smooth", iterfield=["in_file"]) smooth.inputs.fwhm = vol_fwhm wf.connect(bandpass, 'out_files', smooth, 'in_file') collector = Node(Merge(2), name='collect_streams') wf.connect(smooth, 'out_file', collector, 'in1') wf.connect(bandpass, 'out_files', collector, 'in2') """ Transform the remaining images. First to anatomical and then to target """ warpall = MapNode(ants.ApplyTransforms(), iterfield=['input_image'], name='warpall') warpall.inputs.input_image_type = 3 warpall.inputs.interpolation = 'Linear' warpall.inputs.invert_transform_flags = [False, False] warpall.inputs.terminal_output = 'file' warpall.inputs.reference_image = target_file warpall.inputs.args = '--float' warpall.inputs.num_threads = 1 # transform to target wf.connect(collector, 'out', warpall, 'input_image') wf.connect(registration, 'outputspec.transforms', warpall, 'transforms') mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask') wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file') maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker') wf.connect(warpall, 'output_image', maskts, 'in_file') wf.connect(mask_target, 'out_file', maskts, 'mask_file') # map to surface # extract aparc+aseg ROIs # extract subcortical ROIs # extract target space ROIs # combine subcortical and cortical rois into a single cifti file ####### # Convert aparc to subject functional space # Sample the average time series in aparc ROIs sampleaparc = MapNode( freesurfer.SegStats(default_color_table=True), iterfield=['in_file', 'summary_file', 'avgwf_txt_file'], name='aparc_ts') sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] + range(49, 55) + [58] + range(1001, 1036) + range(2001, 2036)) wf.connect(registration, 'outputspec.aparc', sampleaparc, 'segmentation_file') wf.connect(collector, 'out', sampleaparc, 'in_file') def get_names(files, suffix): """Generate appropriate names for output files """ from nipype.utils.filemanip import (split_filename, filename_to_list, list_to_filename) import os out_names = [] for filename in files: path, name, _ = split_filename(filename) out_names.append(os.path.join(path, name + suffix)) return list_to_filename(out_names) wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc, 'avgwf_txt_file') wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc, 'summary_file') # Sample the time series onto the surface of the target surface. Performs # sampling into left and right hemisphere target = Node(IdentityInterface(fields=['target_subject']), name='target') target.iterables = ('target_subject', filename_to_list(target_subject)) samplerlh = MapNode(freesurfer.SampleToSurface(), iterfield=['source_file'], name='sampler_lh') samplerlh.inputs.sampling_method = "average" samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1) samplerlh.inputs.sampling_units = "frac" samplerlh.inputs.interp_method = "trilinear" samplerlh.inputs.smooth_surf = surf_fwhm #samplerlh.inputs.cortex_mask = True samplerlh.inputs.out_type = 'niigz' samplerlh.inputs.subjects_dir = subjects_dir samplerrh = samplerlh.clone('sampler_rh') samplerlh.inputs.hemi = 'lh' wf.connect(collector, 'out', samplerlh, 'source_file') wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file') wf.connect(target, 'target_subject', samplerlh, 'target_subject') samplerrh.set_input('hemi', 'rh') wf.connect(collector, 'out', samplerrh, 'source_file') wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file') wf.connect(target, 'target_subject', samplerrh, 'target_subject') # Combine left and right hemisphere to text file combiner = MapNode(Function(input_names=['left', 'right'], output_names=['out_file'], function=combine_hemi, imports=imports), iterfield=['left', 'right'], name="combiner") wf.connect(samplerlh, 'out_file', combiner, 'left') wf.connect(samplerrh, 'out_file', combiner, 'right') # Sample the time series file for each subcortical roi ts2txt = MapNode(Function( input_names=['timeseries_file', 'label_file', 'indices'], output_names=['out_file'], function=extract_subrois, imports=imports), iterfield=['timeseries_file'], name='getsubcortts') ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\ range(49, 55) + [58] ts2txt.inputs.label_file = \ os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_' '2mm_v2.nii.gz')) wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file') ###### substitutions = [ ('_target_subject_', ''), ('_filtermotart_cleaned_bp_trans_masked', ''), ('_filtermotart_cleaned_bp', ''), ] substitutions += [("_smooth%d" % i, "") for i in range(11)[::-1]] substitutions += [("_ts_masker%d" % i, "") for i in range(11)[::-1]] substitutions += [("_getsubcortts%d" % i, "") for i in range(11)[::-1]] substitutions += [("_combiner%d" % i, "") for i in range(11)[::-1]] substitutions += [("_filtermotion%d" % i, "") for i in range(11)[::-1]] substitutions += [("_filter_noise_nosmooth%d" % i, "") for i in range(11)[::-1]] substitutions += [("_makecompcorfilter%d" % i, "") for i in range(11)[::-1]] substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"), ("T1_out_brain_pve_1_maths_warped", "compcor_gm"), ("T1_out_brain_pve_2_maths_warped", "compcor_wm"), ("output_warped_image_maths", "target_brain_mask"), ("median_brain_mask", "native_brain_mask"), ("corr_", "")] regex_subs = [ ('_combiner.*/sar', '/smooth/'), ('_combiner.*/ar', '/unsmooth/'), ('_aparc_ts.*/sar', '/smooth/'), ('_aparc_ts.*/ar', '/unsmooth/'), ('_getsubcortts.*/sar', '/smooth/'), ('_getsubcortts.*/ar', '/unsmooth/'), ('series/sar', 'series/smooth/'), ('series/ar', 'series/unsmooth/'), ('_inverse_transform./', ''), ] # Save the relevant data into an output directory datasink = Node(interface=DataSink(), name="datasink") datasink.inputs.base_directory = sink_directory datasink.inputs.container = subject_id datasink.inputs.substitutions = substitutions datasink.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') wf.connect(realign, 'par_file', datasink, 'resting.qa.motion') wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm') wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity') wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files') wf.connect(registration, 'outputspec.segmentation_files', datasink, 'resting.mask_files') wf.connect(registration, 'outputspec.anat2target', datasink, 'resting.qa.ants') wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask') wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target') wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F') wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF') wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps') wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p') wf.connect(bandpass, 'out_files', datasink, 'resting.timeseries.@bandpassed') wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') wf.connect(createfilter2, 'out_files', datasink, 'resting.regress.@compcorr') wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target') wf.connect(sampleaparc, 'summary_file', datasink, 'resting.parcellations.aparc') wf.connect(sampleaparc, 'avgwf_txt_file', datasink, 'resting.parcellations.aparc.@avgwf') wf.connect(ts2txt, 'out_file', datasink, 'resting.parcellations.grayo.@subcortical') datasink2 = Node(interface=DataSink(), name="datasink2") datasink2.inputs.base_directory = sink_directory datasink2.inputs.container = subject_id datasink2.inputs.substitutions = substitutions datasink2.inputs.regexp_substitutions = regex_subs #(r'(/_.*(\d+/))', r'/run\2') wf.connect(combiner, 'out_file', datasink2, 'resting.parcellations.grayo.@surface') return wf
def ct_brain_extraction(data, working_directory=None, fractional_intensity_threshold=0.01, save_output=False, fsl_path=None): """ Automatic brain extraction of non contrast head CT images using bet2 by fsl. Ref.: Muschelli J, Ullman NL, Mould WA, Vespa P, Hanley DF, Crainiceanu CM. Validated automatic brain extraction of head CT images. NeuroImage. 2015 Jul 1;114:379–85. :param data: [str; np.ndarray] path to input data or input data in form of np.ndarray (x, y, z) :param working_directory: [str] path to directory to use to save temporary files and final output files :param fractional_intensity_threshold: fractional intensity threshold (0->1); default=0.01; smaller values give larger brain outline estimates :param save_output: [boolean] save or discard output :param fsl_path: [str], Optional path to fsl executable to help nipype find it :return: brain_mask, masked_image: np.ndarray """ if fsl_path is not None: os.environ["PATH"] += os.pathsep + fsl_path os.environ["FSLOUTPUTTYPE"] = 'NIFTI' temp_files = [] if working_directory is None: working_directory = tempfile.mkdtemp() if isinstance(data, np.ndarray): data_path = os.path.join(working_directory, 'temp_bet_input.nii') data_img = nib.Nifti1Image(data.astype('float64'), affine=None) nib.save(data_img, data_path) temp_files.append(data_path) elif os.path.exists(data): data_path = data else: raise NotImplementedError('Data has to be a path or an np.ndarray') output_file = os.path.join(working_directory, 'bet_output.nii') output_mask_file = os.path.join(working_directory, 'bet_output_mask.nii') if not save_output: temp_files.append(output_file) temp_files.append(output_mask_file) temp_intermediate_file = os.path.join(working_directory, 'temp_intermediate_file.nii') temp_files.append(temp_intermediate_file) # Thresholding Image to 0-100 # cli: fslmaths "${img}" -thr 0.000000 -uthr 100.000000 "${outfile}" thresholder1 = fsl.Threshold() thresholder1.inputs.in_file = data_path thresholder1.inputs.out_file = output_file thresholder1.inputs.thresh = 0 thresholder1.inputs.direction = 'below' thresholder1.inputs.output_type = 'NIFTI' thresholder1.run() thresholder2 = fsl.Threshold() thresholder2.inputs.in_file = output_file thresholder2.inputs.out_file = output_file thresholder2.inputs.thresh = 100 thresholder2.inputs.direction = 'above' thresholder2.inputs.output_type = 'NIFTI' thresholder2.run() # Creating 0 - 100 mask to remask after filling # cli: fslmaths "${outfile}" -bin "${tmpfile}"; # cli: fslmaths "${tmpfile}.nii.gz" -bin -fillh "${tmpfile}" binarizer1 = fsl.UnaryMaths() binarizer1.inputs.in_file = output_file binarizer1.inputs.out_file = temp_intermediate_file binarizer1.inputs.operation = 'bin' binarizer1.inputs.output_type = 'NIFTI' binarizer1.run() binarizer2 = fsl.UnaryMaths() binarizer2.inputs.in_file = temp_intermediate_file binarizer2.inputs.out_file = temp_intermediate_file binarizer2.inputs.operation = 'bin' binarizer2.inputs.output_type = 'NIFTI' binarizer2.run() fill_holes1 = fsl.UnaryMaths() fill_holes1.inputs.in_file = temp_intermediate_file fill_holes1.inputs.out_file = temp_intermediate_file fill_holes1.inputs.operation = 'fillh' fill_holes1.inputs.output_type = 'NIFTI' fill_holes1.run() # Presmoothing image # cli: fslmaths "${outfile}" - s 1 "${outfile}" smoothing = fsl.IsotropicSmooth() smoothing.inputs.in_file = output_file smoothing.inputs.out_file = output_file smoothing.inputs.sigma = 1 smoothing.inputs.output_type = 'NIFTI' smoothing.run() # Remasking Smoothed Image # cli: fslmaths "${outfile}" - mas "${tmpfile}" "${outfile}" masking1 = fsl.ApplyMask() masking1.inputs.in_file = output_file masking1.inputs.out_file = output_file masking1.inputs.mask_file = temp_intermediate_file masking1.inputs.output_type = 'NIFTI' masking1.run() # Running bet2 # cli: bet2 "${outfile}" "${outfile}" - f ${intensity} - v try: btr = fsl.BET() btr.inputs.in_file = output_file btr.inputs.out_file = output_file btr.inputs.frac = fractional_intensity_threshold btr.inputs.output_type = 'NIFTI' btr.run() except Exception as e: # sometimes nipype fails to find bet if fsl_path is not None: bet_path = os.path.join(fsl_path, 'bet2') else: bet_path = 'bet2' subprocess.run([ bet_path, output_file, output_file, '-f', str(fractional_intensity_threshold) ]) # Using fslfill to fill in any holes in mask # cli: fslmaths "${outfile}" - bin - fillh "${outfile}_Mask" binarizer3 = fsl.UnaryMaths() binarizer3.inputs.in_file = output_file binarizer3.inputs.out_file = output_mask_file binarizer3.inputs.operation = 'bin' binarizer3.inputs.output_type = 'NIFTI' binarizer3.run() fill_holes2 = fsl.UnaryMaths() fill_holes2.inputs.in_file = output_mask_file fill_holes2.inputs.out_file = output_mask_file fill_holes2.inputs.operation = 'fillh' fill_holes2.inputs.output_type = 'NIFTI' fill_holes2.run() # Using the filled mask to mask original image # cli: fslmaths "${img}" -mas "${outfile}_Mask" "${outfile}" masking2 = fsl.ApplyMask() masking2.inputs.in_file = data_path masking2.inputs.out_file = output_file masking2.inputs.mask_file = output_mask_file masking2.inputs.output_type = 'NIFTI' masking2.run() brain_mask = nib.load(output_mask_file).get_fdata() masked_image = nib.load(output_file).get_fdata() # delete temporary files for file in temp_files: os.remove(file) if not save_output: shutil.rmtree(working_directory) return brain_mask, masked_image
def spm_create_group_template_wf(wf_name="spm_create_group_template"): """ Pick all subject files in `grptemplate_input.in_files`, calculate an average image and smooth it with `"{}_smooth".format(wf_name)` node (you can configure the smooth `fwhm` from a config file.). It does: - calculate a mean image (across subjects) and - smooth it with a 8x8x8mm^3 gaussian kernel -> the result of this is the template. The size of the isometric smoothing gaussian kernel is given by one integer for the "{}_smooth.fwhm".format(wf_name) setting. You can also avoid calculating the mean image across subjects and setting a specific group template file by setting the configuration "{}.template_file".format(wf_name) to the path of the file you want. This image will be smoothed and used as a common template for the further pipeline steps. Parameters ---------- wf_name: str Name of the workflow. Nipype Inputs ------------- grptemplate_input.in_files: list of traits.File The raw NIFTI_GZ PET image files Nipype outputs -------------- grptemplate_output.template: existing file The common custom PET template file. Returns ------- wf: nipype Workflow """ # input input = setup_node( IdentityInterface(fields=["in_files"]), name="grptemplate_input", ) # checking if a template file has been set already template_file = get_config_setting("{}.template_file".format(wf_name)) use_common_template = path.exists(template_file) if not use_common_template: # merge concat = setup_node(Function( function=concat_imgs, input_names=["in_files"], output_names=["out_file"], imports=['from pypes.interfaces.nilearn import ni2file']), name='merge_time') # average average = setup_node(Function( function=mean_img, input_names=["in_file", "out_file"], output_names=["out_file"], imports=['from pypes.interfaces.nilearn import ni2file']), name='group_average') average.inputs.out_file = 'group_average.nii.gz' #TODO: check what is the difference between nilearn.image.smooth_img and FSL IsotropicSmooth # smooth #smooth = setup_node(Function(function=smooth_img, # input_names=["in_file", "fwhm"], # output_names=["out_file"], # imports=['from pypes.interfaces.nilearn import ni2file']), # name="{}_smooth".format(wf_name)) smooth = setup_node(fsl.IsotropicSmooth(fwhm=8), name="{}_smooth".format(wf_name)) # output output = setup_node( IdentityInterface(fields=["template"]), name="grptemplate_output", ) # Create the workflow object wf = pe.Workflow(name=wf_name) # if I have to create the group template if not use_common_template: wf.connect([ # input (input, concat, [("in_files", "in_files")]), # merge, average and smooth (concat, average, [("out_file", "in_file")]), (average, smooth, [("out_file", "in_file")]), # output (smooth, output, [("out_file", "template")]), ]) else: # if the template has been specified in the configuration file wf.add_nodes([input]) smooth.inputs.in_file = template_file wf.connect([ # output (smooth, output, [("out_file", "template")]), ]) return wf
def create_correct_bias_pipe(params={}, name="correct_bias_pipe"): """ Description: Correct bias using T1 and T2 images Same as bash_regis.T1xT2BiasFieldCorrection Params: - smooth (see `MathsCommand <https://nipype.readthedocs.io/en/0.12.1/\ interfaces/generated/nipype.interfaces.fsl.maths.html#mathscommand>`_) - norm_smooth (see `MultiMathsCommand <https://nipype.readthedocs.io/\ en/0.12.1/interfaces/generated/nipype.interfaces.fsl.maths.html\ #multiimagemaths>`_) - smooth_bias (see `IsotropicSmooth <https://nipype.readthedocs.io/en/\ 0.12.1/interfaces/generated/nipype.interfaces.fsl.maths.html#\ isotropicsmooth>`_) Inputs: inputnode: preproc_T1: preprocessed T1 file name preproc_T2: preprocessed T2 file name arguments: params: dictionary of node sub-parameters (from a json file) name: pipeline name (default = "correct_bias_pipe") Outputs: outputnode.debiased_T1: T1 after bias correction outputnode.debiased_T2: T2 after bias correction """ # creating pipeline correct_bias_pipe = pe.Workflow(name=name) # creating inputnode inputnode = pe.Node( niu.IdentityInterface(fields=['preproc_T1', 'preproc_T2']), name='inputnode') # BinaryMaths mult_T1_T2 = pe.Node(fsl.BinaryMaths(), name='mult_T1_T2') mult_T1_T2.inputs.operation = "mul" mult_T1_T2.inputs.args = "-abs -sqrt" mult_T1_T2.inputs.output_datatype = "float" correct_bias_pipe.connect(inputnode, 'preproc_T1', mult_T1_T2, 'in_file') correct_bias_pipe.connect(inputnode, 'preproc_T2', mult_T1_T2, 'operand_file') # Mean Brain Val meanbrainval = pe.Node(fsl.ImageStats(), name='meanbrainval') meanbrainval.inputs.op_string = "-M" correct_bias_pipe.connect(mult_T1_T2, 'out_file', meanbrainval, 'in_file') # norm_mult norm_mult = pe.Node(fsl.BinaryMaths(), name='norm_mult') norm_mult.inputs.operation = "div" correct_bias_pipe.connect(mult_T1_T2, 'out_file', norm_mult, 'in_file') correct_bias_pipe.connect(meanbrainval, 'out_stat', norm_mult, 'operand_value') # smooth smooth = NodeParams(fsl.maths.MathsCommand(), params=parse_key(params, "smooth"), name='smooth') correct_bias_pipe.connect(norm_mult, 'out_file', smooth, 'in_file') # norm_smooth norm_smooth = NodeParams(fsl.MultiImageMaths(), params=parse_key(params, "norm_smooth"), name='norm_smooth') correct_bias_pipe.connect(norm_mult, 'out_file', norm_smooth, 'in_file') correct_bias_pipe.connect(smooth, 'out_file', norm_smooth, 'operand_files') # modulate modulate = pe.Node(fsl.BinaryMaths(), name='modulate') modulate.inputs.operation = "div" correct_bias_pipe.connect(norm_mult, 'out_file', modulate, 'in_file') correct_bias_pipe.connect(norm_smooth, 'out_file', modulate, 'operand_file') # std_modulate std_modulate = pe.Node(fsl.ImageStats(), name='std_modulate') std_modulate.inputs.op_string = "-S" correct_bias_pipe.connect(modulate, 'out_file', std_modulate, 'in_file') # mean_modulate mean_modulate = pe.Node(fsl.ImageStats(), name='mean_modulate') mean_modulate.inputs.op_string = "-M" correct_bias_pipe.connect(modulate, 'out_file', mean_modulate, 'in_file') # compute_lower_val def compute_lower_val(mean_val, std_val): return mean_val - (std_val * 0.5) # compute_lower lower = pe.Node(niu.Function(input_names=['mean_val', 'std_val'], output_names=['lower_val'], function=compute_lower_val), name='lower') correct_bias_pipe.connect(mean_modulate, 'out_stat', lower, 'mean_val') correct_bias_pipe.connect(std_modulate, 'out_stat', lower, 'std_val') # thresh_lower thresh_lower = pe.Node(fsl.Threshold(), name='thresh_lower') correct_bias_pipe.connect(lower, 'lower_val', thresh_lower, 'thresh') correct_bias_pipe.connect(modulate, 'out_file', thresh_lower, 'in_file') # mod_mask mod_mask = pe.Node(fsl.UnaryMaths(), name='mod_mask') mod_mask.inputs.operation = "bin" mod_mask.inputs.args = "-ero -mul 255" correct_bias_pipe.connect(thresh_lower, 'out_file', mod_mask, 'in_file') # bias bias = pe.Node(fsl.MultiImageMaths(), name='bias') bias.inputs.op_string = "-mas %s -dilall" bias.inputs.output_datatype = "float" correct_bias_pipe.connect(norm_mult, 'out_file', bias, 'in_file') correct_bias_pipe.connect(mod_mask, 'out_file', bias, 'operand_files') # smooth_bias smooth_bias = NodeParams(fsl.IsotropicSmooth(), params=parse_key(params, "smooth_bias"), name='smooth_bias') correct_bias_pipe.connect(bias, 'out_file', smooth_bias, 'in_file') # debiased_T1 debiased_T1 = pe.Node(fsl.BinaryMaths(), name='debiased_T1') debiased_T1.inputs.operation = "div" debiased_T1.inputs.output_datatype = "float" correct_bias_pipe.connect(inputnode, 'preproc_T1', debiased_T1, 'in_file') correct_bias_pipe.connect(smooth_bias, 'out_file', debiased_T1, 'operand_file') # debiased_T2 debiased_T2 = pe.Node(fsl.BinaryMaths(), name='debiased_T2') debiased_T2.inputs.operation = "div" debiased_T2.inputs.output_datatype = "float" correct_bias_pipe.connect(inputnode, 'preproc_T2', debiased_T2, 'in_file') correct_bias_pipe.connect(smooth_bias, 'out_file', debiased_T2, 'operand_file') # outputnode outputnode = pe.Node( niu.IdentityInterface(fields=["debiased_T1", "debiased_T2"]), name='outputnode') correct_bias_pipe.connect(debiased_T1, 'out_file', outputnode, 'debiased_T1') correct_bias_pipe.connect(debiased_T2, 'out_file', outputnode, 'debiased_T2') return correct_bias_pipe
# plt.suptitle("Original NIfTI Images") # plt.show() #Skull Stripping output_strip_file = "skull_stripping_" + input_file # skullstrip = Node(fsl.BET(in_file=file_in, mask=True), name="skullstrip") skullstrip = fsl.BET(in_file=file_path + input_file, out_file=file_path + output_strip_file, mask=True) skullstrip.run() # Inhomogeneity Correction(Smoothing) output_smooth_file = "smooth_" + input_file # smooth = Node(fsl.IsotropicSmooth(in_file=file_in, fwhm=4), name="smooth") smooth = fsl.IsotropicSmooth(in_file=file_path + output_strip_file, out_file=file_path + "/../output/benign/" + output_smooth_file, fwhm=4) smooth.run() # Mask process # mask = Node(fsl.ApplyMask(), name="mask") # Initiation of a workflow # wf = Workflow(name="smoothflow", base_dir=file_path+"output/") # First the "simple", but more restricted method # wf.connect(skullstrip, "mask_file", mask, "mask_file") # Now the more complicated method # wf.connect([(smooth, mask, [("out_file", "in_file")])]) # wf.run()
vmin, vmax = (30, 150) if data.dtype == np.int16 else (30, 150) plt.imshow(np.rot90(data[:, :, ctr[2] + z_idx]), cmap="gray", vmin=vmin, vmax=vmax) plt.gca().set_axis_off() from nipype.interfaces import fsl from nipype import Node, Workflow # For reasons that will later become clear, it's important to # pass filenames to Nodes as absolute paths from os.path import abspath in_file = abspath("data/T1.nii.gz") skullstrip = Node(fsl.BET(in_file=in_file, mask=True), name="skullstrip") smooth = Node(fsl.IsotropicSmooth(in_file=in_file, fwhm=4), name="smooth") mask = Node(fsl.ApplyMask(), name="mask") # Workflows need names too wf = Workflow(name="smoothflow") # First the "simple", but more restricted method wf.connect(skullstrip, "mask_file", mask, "mask_file") # Now the more complicated method. Note this way you can define several # connections at once, and you can even define several connnections between # two nodes in one smalller step wf.connect([(smooth, mask, [("out_file", "in_file")])])
def spm_warp_fmri_wf(wf_name="spm_warp_fmri", register_to_grptemplate=False): """ Run SPM to warp resting-state fMRI pre-processed data to MNI or a given template. Tasks: - Warping the inputs to MNI or a template, if `do_group_template` is True Parameters ---------- wf_name: str register_to_grptemplate: bool If True will expect the wfmri_input.epi_template input and use it as a group template for inter-subject registratio. Nipype Inputs ------------- wfmri_input.in_file: traits.File The slice time and motion corrected fMRI file. wfmri_input.reference_file: traits.File The anatomical image in its native space for registration reference. wfmri_input.anat_fmri: traits.File The anatomical image in fMRI space. wfmri_input.anat_to_mni_warp: traits.File The warp field from the transformation of the anatomical image to the standard MNI space. wfmri_input.time_filtered: traits.File The bandpass time filtered fMRI file. wfmri_input.avg_epi: traits.File The average EPI from the fMRI file. wfmri_input.epi_template: traits.File Reference EPI template file for inter subject registration. If `do_group_template` is True you must specify this input. wfmri_input.brain_mask: traits.File Brain mask in fMRI space. wfmri_input.atlas_anat: traits.File Atlas in subject anatomical space. Nipype Outputs -------------- wfmri_output.warped_fmri: traits.File The slice time, motion, and nuisance corrected fMRI file registered to the template. wfmri_output.wtime_filtered: traits.File The bandpass time filtered fMRI file registered to the template. wfmri_output.smooth: traits.File The smooth bandpass time filtered fMRI file registered to the template. wfmri_output.wavg_epi: traits.File The average EPI from the fMRI file registered to the template. wfmri_output.warp_field: traits.File The fMRI to template warp field. wfmri_output.coreg_avg_epi: traits.File The average EPI image in anatomical space. Only if registration.anat2fmri is false. wfmri_output.coreg_others: traits.File Other mid-preprocessing fmri images registered to anatomical space: - wfmri_input.in_file, - wfmri_input.brain_mask, - wfmri_input.time_filtered. Only if registration.anat2fmri is false wfmri_output.wbrain_mask: traits.File Brain mask in fMRI space warped to MNI. Returns ------- wf: nipype Workflow """ # Create the workflow object wf = pe.Workflow(name=wf_name) # specify input and output fields in_fields = ["in_file", "anat_fmri", "anat_to_mni_warp", "brain_mask", "reference_file", "time_filtered", "avg_epi",] out_fields = ["warped_fmri", "wtime_filtered", "smooth", "wavg_epi", "wbrain_mask", "warp_field", "coreg_avg_epi", "coreg_others" ] if register_to_grptemplate: in_fields += ['epi_template'] do_atlas, _ = check_atlas_file() if do_atlas: in_fields += ["atlas_anat"] out_fields += ["atlas_fmri"] # input identities wfmri_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True), name="wfmri_input") # in file unzipper in_gunzip = pe.Node(Gunzip(), name="in_gunzip") # merge list for normalization input merge_list = pe.Node(Merge(2), name='merge_for_warp') gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file']) # the template bounding box tpm_bbox = setup_node(Function(function=get_bounding_box, input_names=["in_file"], output_names=["bbox"]), name="tpm_bbox") # smooth the final result smooth = setup_node(fsl.IsotropicSmooth(fwhm=8, output_type='NIFTI'), name="smooth_fmri") # output identities rest_output = setup_node(IdentityInterface(fields=out_fields), name="wfmri_output") # check how to perform the registration, to decide how to build the pipeline anat2fmri = get_config_setting('registration.anat2fmri', False) # register to group template if register_to_grptemplate: gunzip_template = pe.Node(Gunzip(), name="gunzip_template",) warp = setup_node(spm.Normalize(jobtype="estwrite", out_prefix="wgrptmpl_"), name="fmri_grptemplate_warp",) warp_source_arg = "source" warp_outsource_arg = "normalized_source" warp_field_arg = "normalization_parameters" elif anat2fmri: # register to standard template warp = setup_node(spm_normalize(), name="fmri_warp") tpm_bbox.inputs.in_file = spm_tpm_priors_path() warp_source_arg = "image_to_align" warp_outsource_arg = "normalized_image" warp_field_arg = "deformation_field" else: # anat2fmri is False coreg = setup_node(spm_coregister(cost_function="mi"), name="coreg_fmri") warp = setup_node(spm_apply_deformations(), name="fmri_warp") coreg_files = pe.Node(Merge(3), name='merge_for_coreg') warp_files = pe.Node(Merge(2), name='merge_for_warp') tpm_bbox.inputs.in_file = spm_tpm_priors_path() # make the connections if register_to_grptemplate: wf.connect([ # get template bounding box to apply to results (wfmri_input, tpm_bbox, [("epi_template", "in_file")]), # unzip and forward the template file (wfmri_input, gunzip_template, [("epi_template", "in_file")]), (gunzip_template, warp, [("out_file", "template")]), # get template bounding box to apply to results (wfmri_input, tpm_bbox, [("epi_template", "in_file")]), ]) if anat2fmri or register_to_grptemplate: # prepare the inputs wf.connect([ # unzip the in_file input file (wfmri_input, in_gunzip, [("avg_epi", "in_file")]), # warp source file (in_gunzip, warp, [("out_file", warp_source_arg)]), # bounding box (tpm_bbox, warp, [("bbox", "write_bounding_box")]), # merge the other input files into a list (wfmri_input, merge_list, [("in_file", "in1"), ("time_filtered", "in2"), ]), # gunzip them for SPM (merge_list, gunzipper, [("out", "in_file")]), # apply to files (gunzipper, warp, [("out_file", "apply_to_files")]), # outputs (warp, rest_output, [(warp_field_arg, "warp_field"), (warp_outsource_arg, "wavg_epi"), ]), ]) else: # FMRI to ANAT wf.connect([ (wfmri_input, coreg, [("reference_file", "target")]), # unzip the in_file input file (wfmri_input, in_gunzip, [("avg_epi", "in_file")]), (in_gunzip, coreg, [("out_file", "source")]), # merge the other input files into a list (wfmri_input, coreg_files, [("in_file", "in1"), ("time_filtered", "in2"), ("brain_mask", "in3"), ]), # gunzip them for SPM (coreg_files, gunzipper, [("out", "in_file")]), # coregister fmri to anat (gunzipper, coreg, [("out_file", "apply_to_files")]), # anat to mni warp field (wfmri_input, warp, [("anat_to_mni_warp", "deformation_file")]), # bounding box (tpm_bbox, warp, [("bbox", "write_bounding_box")]), # apply to files (coreg, warp_files, [("coregistered_source", "in1")]), (coreg, warp_files, [("coregistered_files", "in2")]), (warp_files, warp, [("out", "apply_to_files")]), # outputs (warp, rest_output, [("normalized_files", "warped_files"),]), (warp, rest_output, [(("normalized_files", selectindex, 0), "wavg_epi"),]), (coreg, rest_output, [("coregistered_source", "coreg_avg_epi")]), (coreg, rest_output, [("coregistered_files", "coreg_others")]), ]) # atlas file in fMRI space if anat2fmri: coreg_atlas = setup_node(spm_coregister(cost_function="mi"), name="coreg_atlas2fmri") # set the registration interpolation to nearest neighbour. coreg_atlas.inputs.write_interp = 0 wf.connect([ (wfmri_input, coreg_atlas, [("reference_file", "source"), ("atlas_anat", "apply_to_files"), ]), (in_gunzip, coreg_atlas, [("out_file", "target")]), (coreg_atlas, rest_output, [("coregistered_files", "atlas_fmri")]), ]) # smooth and sink wf.connect([ # smooth the final bandpassed image (warp, smooth, [(("normalized_files", selectindex, 1), "in_file")]), # output (smooth, rest_output, [("out_file", "smooth")]), (warp, rest_output, [(("normalized_files", selectindex, 0), "warped_fmri"), (("normalized_files", selectindex, 1), "wtime_filtered"), ]), ]) return wf
def create_masked_correct_bias_pipe(params={}, name="masked_correct_bias_pipe"): """ Description: Correct bias using T1 and T2 images in a mask Same as bash_regis.T1xT2BiasFieldCorrection Inputs: inputnode: preproc_T1: preprocessed T1 file name preproc_T2: preprocessed T2 file name brain_mask: brain mask where operation will be applied arguments: params: dictionary of node sub-parameters (from a json file) name: pipeline name (default = "masked_correct_bias_pipe") Outputs: restore_T1.out_file: T1 after bias correction restore_T2.out_file T2 after bias correction restore_mask_T1.out_file: Masked T1 after bias correction restore_mask_T2.out_file: Masked T2 after bias correction """ # creating pipeline masked_correct_bias_pipe = pe.Workflow(name=name) # creating inputnode inputnode = pe.Node(niu.IdentityInterface( fields=['preproc_T1', 'preproc_T2', 'brain_mask']), name='inputnode') # BinaryMaths mult_T1_T2 = pe.Node(fsl.BinaryMaths(), name='mult_T1_T2') mult_T1_T2.inputs.operation = "mul" mult_T1_T2.inputs.args = "-abs -sqrt" mult_T1_T2.inputs.output_datatype = "float" masked_correct_bias_pipe.connect(inputnode, 'preproc_T1', mult_T1_T2, 'in_file') masked_correct_bias_pipe.connect(inputnode, 'preproc_T2', mult_T1_T2, 'operand_file') # mask mult mask_mult = pe.Node(fsl.ApplyMask(), name='mask_mult') masked_correct_bias_pipe.connect(mult_T1_T2, 'out_file', mask_mult, 'in_file') masked_correct_bias_pipe.connect(inputnode, 'brain_mask', mask_mult, 'mask_file') # Mean Brain Val meanbrainval = pe.Node(fsl.ImageStats(), name='meanbrainval') meanbrainval.inputs.op_string = "-M" masked_correct_bias_pipe.connect(mult_T1_T2, 'out_file', meanbrainval, 'in_file') # norm_mult norm_mult = pe.Node(fsl.BinaryMaths(), name='norm_mult') norm_mult.inputs.operation = "div" masked_correct_bias_pipe.connect(mask_mult, 'out_file', norm_mult, 'in_file') masked_correct_bias_pipe.connect(meanbrainval, ('out_stat', print_val), norm_mult, 'operand_value') # smooth smooth = NodeParams(fsl.maths.MathsCommand(), params=parse_key(params, "smooth"), name='smooth') masked_correct_bias_pipe.connect(norm_mult, 'out_file', smooth, 'in_file') # norm_smooth norm_smooth = NodeParams(fsl.MultiImageMaths(), params=parse_key(params, "norm_smooth"), name='norm_smooth') masked_correct_bias_pipe.connect(norm_mult, 'out_file', norm_smooth, 'in_file') masked_correct_bias_pipe.connect(smooth, 'out_file', norm_smooth, 'operand_files') # modulate modulate = pe.Node(fsl.BinaryMaths(), name='modulate') modulate.inputs.operation = "div" masked_correct_bias_pipe.connect(norm_mult, 'out_file', modulate, 'in_file') masked_correct_bias_pipe.connect(norm_smooth, 'out_file', modulate, 'operand_file') # std_modulate std_modulate = pe.Node(fsl.ImageStats(), name='std_modulate') std_modulate.inputs.op_string = "-S" masked_correct_bias_pipe.connect(modulate, 'out_file', std_modulate, 'in_file') # mean_modulate mean_modulate = pe.Node(fsl.ImageStats(), name='mean_modulate') mean_modulate.inputs.op_string = "-M" masked_correct_bias_pipe.connect(modulate, 'out_file', mean_modulate, 'in_file') # function lower val def compute_lower_val(mean_val, std_val): return mean_val - (std_val * 0.5) # compute_lower lower = pe.Node(niu.Function(input_names=['mean_val', 'std_val'], output_names=['lower_val'], function=compute_lower_val), name='lower') masked_correct_bias_pipe.connect(mean_modulate, 'out_stat', lower, 'mean_val') masked_correct_bias_pipe.connect(std_modulate, 'out_stat', lower, 'std_val') # thresh_lower thresh_lower = pe.Node(fsl.Threshold(), name='thresh_lower') masked_correct_bias_pipe.connect(lower, 'lower_val', thresh_lower, 'thresh') masked_correct_bias_pipe.connect(modulate, 'out_file', thresh_lower, 'in_file') # mod_mask mod_mask = pe.Node(fsl.UnaryMaths(), name='mod_mask') mod_mask.inputs.operation = "bin" mod_mask.inputs.args = "-ero -mul 255" masked_correct_bias_pipe.connect(thresh_lower, 'out_file', mod_mask, 'in_file') """ ##tmp_bias tmp_bias = pe.Node(fsl.MultiImageMaths(),name='tmp_bias') tmp_bias.inputs.op_string = "-mas %s" tmp_bias.inputs.output_datatype = "float" masked_correct_bias_pipe.connect(norm_mult, 'out_file', tmp_bias, 'in_file') masked_correct_bias_pipe.connect(mod_mask, 'out_file', tmp_bias, 'operand_files') """ # bias bias = pe.Node(fsl.MultiImageMaths(), name='bias') bias.inputs.op_string = "-mas %s -dilall" bias.inputs.output_datatype = "float" masked_correct_bias_pipe.connect(norm_mult, 'out_file', bias, 'in_file') masked_correct_bias_pipe.connect(mod_mask, 'out_file', bias, 'operand_files') # smooth_bias smooth_bias = NodeParams(fsl.IsotropicSmooth(), params=parse_key(params, "smooth_bias"), name='smooth_bias') masked_correct_bias_pipe.connect(bias, 'out_file', smooth_bias, 'in_file') # restore_T1 restore_T1 = pe.Node(fsl.BinaryMaths(), name='restore_T1') restore_T1.inputs.operation = "div" restore_T1.inputs.output_datatype = "float" masked_correct_bias_pipe.connect(inputnode, 'preproc_T1', restore_T1, 'in_file') masked_correct_bias_pipe.connect(smooth_bias, 'out_file', restore_T1, 'operand_file') # restore_T2 restore_T2 = pe.Node(fsl.BinaryMaths(), name='restore_T2') restore_T2.inputs.operation = "div" restore_T2.inputs.output_datatype = "float" masked_correct_bias_pipe.connect(inputnode, 'preproc_T2', restore_T2, 'in_file') masked_correct_bias_pipe.connect(smooth_bias, 'out_file', restore_T2, 'operand_file') # restore_mask_T1 restore_mask_T1 = pe.Node(fsl.ApplyMask(), name='restore_mask_T1') masked_correct_bias_pipe.connect(restore_T1, 'out_file', restore_mask_T1, 'in_file') masked_correct_bias_pipe.connect(inputnode, 'brain_mask', restore_mask_T1, 'mask_file') # restore_mask_T2 restore_mask_T2 = pe.Node(fsl.ApplyMask(), name='restore_mask_T2') masked_correct_bias_pipe.connect(restore_T2, 'out_file', restore_mask_T2, 'in_file') masked_correct_bias_pipe.connect(inputnode, 'brain_mask', restore_mask_T2, 'mask_file') return masked_correct_bias_pipe