def afni_unifize(in_file, write_dir=None, out_file=None, caching=False, terminal_output='allatonce', verbose=True, environ=None, copy_geometry=False, **unifize_kwargs): if write_dir is None: write_dir = os.path.dirname(in_file) if environ is None: environ = {'AFNI_DECONFLICT': 'OVERWRITE'} if caching: memory = Memory(write_dir) copy_geom = memory.cache(fsl.CopyGeom) unifize = memory.cache(afni.Unifize) copy = memory.cache(afni.Copy) unifize.interface().set_default_terminal_output(terminal_output) copy.interface().set_default_terminal_output(terminal_output) else: copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run unifize = afni.Unifize(terminal_output=terminal_output).run copy = afni.Copy(terminal_output=terminal_output).run if out_file is None: out_file = fname_presuffix(in_file, suffix='_unifized', newpath=write_dir) if copy_geometry: unifized_file = fname_presuffix(in_file, suffix='_unifized_rough_geom', newpath=write_dir) else: unifized_file = out_file out_unifize = unifize(in_file=in_file, out_file=unifized_file, environ=environ, quiet=not (verbose), **unifize_kwargs) if copy_geometry: out_copy = copy(in_file=out_unifize.outputs.out_file, out_file=out_file, environ=environ) out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file, in_file=in_file) return out_file
def init_enhance_and_skullstrip_epi_wf(name='enhance_and_skullstrip_epi_wf'): workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'mask_file', 'skull_stripped_file', 'bias_corrected_file', 'out_report' ]), name='outputnode') n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True), name='n4_correct') skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') unifize = pe.Node(afni.Unifize(t2=True, outputtype='NIFTI_GZ', args='-clfrac 0.4', out_file="uni.nii.gz"), name='unifize') skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet') workflow.connect([ (inputnode, n4_correct, [('in_file', 'input_image')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, unifize, [('out_file', 'in_file')]), (unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file') ]), (unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (n4_correct, mask_reportlet, [('output_image', 'background_file')]), (combine_masks, mask_reportlet, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (mask_reportlet, outputnode, [('out_report', 'out_report')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
def init_unifize_and_skullstrip_wf(name='unifize_and_skullstrip_wf'): wf = pe.Workflow(name=name) # inputnode/outputnode can be thought of as the parameters and return values of a function inputnode = pe.Node(niu.IdentityInterface(['in_file', 'out_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(['out_file']), name='outputnode') # # The rest of the workflow should be defined here. # bet = pe.Node(fsl.BET(), name='bet') unifize = pe.Node(afni.Unifize(outputtype='NIFTI'), name='unifize') wf.connect([ (inputnode, unifize, [('in_file', 'in_file')]), (unifize, bet, [('out_file', 'in_file')]), (inputnode, bet, [('out_file', 'out_file')]), (bet, outputnode, [('out_file', 'out_file')]), ]) return wf
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf', pre_mask=False, omp_nthreads=1): """ This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*boldref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *boldref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). omp_nthreads : int number of threads available to parallel nodes **Inputs** in_file BOLD image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). **Outputs** bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Dilate pre_mask pre_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=3.0, internal_datatype='char'), name='pre_mask_dilate') # Ensure mask's header matches reference's check_hdr = pe.Node(MatchHeader(), name='check_hdr', run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection( dimension=3, copy_header=True, bspline_fitting_distance=200), name='n4_correct', n_procs=1) # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') bet_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=6.0, internal_datatype='char'), name='skullstrip_first_dilate') bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask') # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype='NIFTI_GZ', # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args='-clfrac 0.2 -rbt 18.3 65.0 90.0', out_file="uni.nii.gz"), name='unifize') fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') if not pre_mask: bold_template = get_template('MNI152NLin2009cAsym', resolution=2, desc='fMRIPrep', suffix='boldref') brain_mask = get_template('MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask') # Initialize transforms with antsAI init_aff = pe.Node(AI(fixed_image=str(bold_template), fixed_image_mask=str(brain_mask), metric=('Mattes', 32, 'Regular', 0.2), transform=('Affine', 0.1), search_factor=(20, 0.12), principal_axes=False, convergence=(10, 1e-6, 10), verbose=True), name='init_aff', n_procs=omp_nthreads) # Registration().version may be None if parseversion(Registration().version or '0.0.0') > Version('2.2.0'): init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization norm = pe.Node(Registration(from_file=pkgr_fn( 'fmriprep.data', 'epi_atlasbased_brainmask.json')), name='norm', n_procs=omp_nthreads) norm.inputs.fixed_image = str(bold_template) map_brainmask = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True, input_image=str(brain_mask)), name='map_brainmask') workflow.connect([ (inputnode, init_aff, [('in_file', 'moving_image')]), (inputnode, map_brainmask, [('in_file', 'reference_image')]), (inputnode, norm, [('in_file', 'moving_image')]), (init_aff, norm, [('output_transform', 'initial_moving_transform') ]), (norm, map_brainmask, [('reverse_invert_flags', 'invert_transform_flags'), ('reverse_transforms', 'transforms')]), (map_brainmask, pre_dilate, [('output_image', 'in_file')]), ]) else: workflow.connect([ (inputnode, pre_dilate, [('pre_mask', 'in_file')]), ]) workflow.connect([ (inputnode, check_hdr, [('in_file', 'reference')]), (pre_dilate, check_hdr, [('out_file', 'in_file')]), (check_hdr, n4_correct, [('out_file', 'mask_image')]), (inputnode, n4_correct, [('in_file', 'input_image')]), (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]), (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]), (bet_dilate, bet_mask, [('out_file', 'mask_file')]), (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), (bet_mask, unifize, [('out_file', 'in_file')]), (unifize, fixhdr_unifize, [('out_file', 'in_file')]), (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file') ]), (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]), (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
def init_enhance_and_skullstrip_bold_wf( brainmask_thresh=0.5, name="enhance_and_skullstrip_bold_wf", omp_nthreads=1, pre_mask=False, ): """ Enhance and run brain extraction on a BOLD EPI image. This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*boldref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *boldref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. Workflow graph .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- brainmask_thresh: :obj:`float` Lower threshold for the probabilistic brainmask to obtain the final binary mask (default: 0.5). name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) omp_nthreads : int number of threads available to parallel nodes pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). Inputs ------ in_file : str BOLD image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). Outputs ------- bias_corrected_file : str the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file : str the ``bias_corrected_file`` after skull-stripping mask_file : str mask of the skull-stripped input file out_report : str reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file" ]), name="outputnode", ) # Dilate pre_mask pre_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=3.0, internal_datatype="char", ), name="pre_mask_dilate", ) # Ensure mask's header matches reference's check_hdr = pe.Node(MatchHeader(), name="check_hdr", run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection(dimension=3, copy_header=True, bspline_fitting_distance=200), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass") bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass") fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name="apply_mask") if not pre_mask: from ..interfaces.nibabel import Binarize bold_template = get_template("MNI152NLin2009cAsym", resolution=2, desc="fMRIPrep", suffix="boldref") brain_mask = get_template("MNI152NLin2009cAsym", resolution=2, desc="brain", suffix="mask") # Initialize transforms with antsAI init_aff = pe.Node( AI( fixed_image=str(bold_template), fixed_image_mask=str(brain_mask), metric=("Mattes", 32, "Regular", 0.2), transform=("Affine", 0.1), search_factor=(20, 0.12), principal_axes=False, convergence=(10, 1e-6, 10), verbose=True, ), name="init_aff", n_procs=omp_nthreads, ) # Registration().version may be None if parseversion(Registration().version or "0.0.0") > Version("2.2.0"): init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization norm = pe.Node( Registration(from_file=pkgr_fn("niworkflows.data", "epi_atlasbased_brainmask.json")), name="norm", n_procs=omp_nthreads, ) norm.inputs.fixed_image = str(bold_template) map_brainmask = pe.Node( ApplyTransforms( interpolation="BSpline", float=True, # Use the higher resolution and probseg for numerical stability in rounding input_image=str( get_template( "MNI152NLin2009cAsym", resolution=1, label="brain", suffix="probseg", )), ), name="map_brainmask", ) binarize_mask = pe.Node(Binarize(thresh_low=brainmask_thresh), name="binarize_mask") # fmt: off workflow.connect([ (inputnode, init_aff, [("in_file", "moving_image")]), (inputnode, map_brainmask, [("in_file", "reference_image")]), (inputnode, norm, [("in_file", "moving_image")]), (init_aff, norm, [("output_transform", "initial_moving_transform") ]), (norm, map_brainmask, [ ("reverse_invert_flags", "invert_transform_flags"), ("reverse_transforms", "transforms"), ]), (map_brainmask, binarize_mask, [("output_image", "in_file")]), (binarize_mask, pre_dilate, [("out_mask", "in_file")]), ]) # fmt: on else: # fmt: off workflow.connect([ (inputnode, pre_dilate, [("pre_mask", "in_file")]), ]) # fmt: on # fmt: off workflow.connect([ (inputnode, check_hdr, [("in_file", "reference")]), (pre_dilate, check_hdr, [("out_file", "in_file")]), (check_hdr, n4_correct, [("out_file", "mask_image")]), (inputnode, n4_correct, [("in_file", "input_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file") ]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (fixhdr_unifize, apply_mask, [("out_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), (n4_correct, outputnode, [("output_image", "bias_corrected_file")]), ]) # fmt: on return workflow
def run(options): # fix! out_dir = os.path.join('option', '1') err_dir = os.path.join('option', '2') data_dir = os.path.join('option', '3') work_dir = os.path.join('something', 'else') # Workflow merica_wf = pe.Workflow('merica_wf') merica_wf.base_dir = work_dir inputspec = pe.Node(util.IdentityInterface(fields=options.keys()), name='inputspec') # Node: subject_iterable run_iterable = pe.Node(util.IdentityInterface(fields=['run'], mandatory_inputs=True), name='run_iterable') run_iterable.iterables = ('run', runs) info = dict(mri_files=[['run']]) # Create a datasource node to get the mri files datasource = pe.Node(nio.DataGrabber(infields=['run'], outfields=info.keys()), name='datasource') datasource.inputs.template = '*' datasource.inputs.base_directory = abspath(data_dir) datasource.inputs.field_template = dict(mri_files='%s/func/*.nii.gz') datasource.inputs.template_args = info datasource.inputs.sort_filelist = True datasource.inputs.ignore_exception = False datasource.inputs.raise_on_empty = True meica_wf.connect(run_iterable, 'run', datasource, 'run') # Create a Function node to rename output files getsubs = pe.Node(util.Function(input_names=['run', 'mri_files'], output_names=['subs'], function=get_subs), name='getsubs') getsubs.inputs.ignore_exception = False meica_wf.connect(run_iterable, 'run', getsubs, 'run') meica_wf.connect(datasource, 'mri_files', getsubs, 'mri_files') get_cm = pe.Node(util.Function(input_names=['fname'], output_names=['x', 'y', 'z'], function=find_CM), name='get_cm') get_obliquity = pe.Node(util.Function(input_names=['fname'], output_names=['angmerit'], function=check_obliquity), name='get_cm') if get_obliquity.is_oblique == True: deoblique = pe.Node(afni.Warp(deoblique=True) name='deoblique') merica_wf.connect(upstream, 't1', deoblique, 'in_file') warpspeed = pe.Node(afni.Warp(args='-card2oblique -newgrid 1.0')) if skull-stripped == False: unifeyes = pe.Node(afni.Unifize() name='unifeyes') if get_obliquity.is_oblique == True: merica_wf.connect(deoblique, 'out_file', unifeyes, 'in_file') else: merica_wf.connect(upstream, 't1', unifeyes, 'in_file') skullstrip = pe.Node(afni.SkullStrip(args='-shrink_fac_bot_lim 0.3 -orig_vol') name='skullstrip') autobots = pe.Node(afni.Autobox() name='autobots') merica_wf.connect(skullstrip, 'out_file', autobots, 'in_file') # Moving on to functional preprocessing, be back later! if despike == True: despike = pe.Node(afni.Despike() name='despike') if skull-stripped == False: merica_wf.connect(autobots, 'out_file', despike, 'in_file') else: merica_wf.connect(upstream, 't1', despike, 'in_file') meica_wf.connect(run_iterable, 'run', get_cm, 'fname') meica_wf.connect(run_iterable, 'run', get_cm, 'fname')
def init_enhance_and_skullstrip_asl_wf( brainmask_thresh=0.5, name="enhance_and_skullstrip_asl_wf", omp_nthreads=1, pre_mask=False, ): """ Enhance and run brain extraction on a ASL image. This workflow takes in a :abbr:`ASL (Aretrrail Spin Labeling)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*aslref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *aslref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`ASL (arterial spin labeling)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. Workflow graph .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_asl_wf wf = init_enhance_and_skullstrip_asl_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- brainmask_thresh: :obj:`float` Lower threshold for the probabilistic brainmask to obtain the final binary mask (default: 0.5). name : str Name of workflow (default: ``enhance_and_skullstrip_asl_wf``) omp_nthreads : int number of threads available to parallel nodes pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). Inputs ------ in_file : str ASL image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). Outputs ------- bias_corrected_file : str the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file : str the ``bias_corrected_file`` after skull-stripping mask_file : str mask of the skull-stripped input file out_report : str reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file" ]), name="outputnode", ) pre_mask = pre_mask # Ensure mask's header matches reference's #check_hdr = pe.Node(MatchHeader(), name="check_hdr", run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection(dimension=3, copy_header=True, bspline_fitting_distance=200), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass") bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass") fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name="apply_mask") #binarize_mask = pe.Node(Binarize(thresh_low=brainmask_thresh), name="binarize_mask") # fmt: off workflow.connect([ (inputnode, n4_correct, [("in_file", "mask_image")]), (inputnode, n4_correct, [("in_file", "input_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file") ]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (fixhdr_unifize, apply_mask, [("out_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), (n4_correct, outputnode, [("output_image", "bias_corrected_file")]), ]) # fmt: on return workflow
def coregister_fmri_session(session_data, t_r, write_dir, brain_volume, use_rats_tool=True, slice_timing=True, prior_rigid_body_registration=False, caching=False, voxel_size_x=.1, voxel_size_y=.1, verbose=True, **environ_kwargs): """ Coregistration of the subject's functional and anatomical images. The functional volume is aligned to the anatomical, first with a rigid body registration and then on a per-slice basis (only a fine correction, this is mostly for correction of EPI distortion). Parameters ---------- session_data : sammba.registration.SessionData Single animal data, giving paths to its functional and anatomical image, as well as it identifier. t_r : float Repetition time for the EPI, in seconds. write_dir : str Directory to save the output and temporary images. brain_volume : int Volume of the brain in mm3 used for brain extraction. Typically 400 for mouse and 1800 for rat. use_rats_tool : bool, optional If True, brain mask is computed using RATS Mathematical Morphology. Otherwise, a histogram-based brain segmentation is used. prior_rigid_body_registration : bool, optional If True, a rigid-body registration of the anat to the func is performed prior to the warp. Useful if the images headers have missing/wrong information. voxel_size_x : float, optional Resampling resolution for the x-axis, in mm. voxel_size_y : float, optional Resampling resolution for the y-axis, in mm. caching : bool, optional Wether or not to use caching. verbose : bool, optional If True, all steps are verbose. Note that caching implies some verbosity in any case. environ_kwargs : extra arguments keywords Extra arguments keywords, passed to interfaces environ variable. Returns ------- the same sequence with each animal_data updated: the following attributes are added - `output_dir_` : str Path to the output directory. - `coreg_func_` : str Path to paths to the coregistered functional image. - `coreg_anat_` : str Path to paths to the coregistered functional image. - `coreg_transform_` : str Path to the transform from anat to func. Notes ----- If `use_rats_tool` is turned on, RATS tool is used for brain extraction and has to be cited. For more information, see `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_ """ func_filename = session_data.func anat_filename = session_data.anat environ = {'AFNI_DECONFLICT': 'OVERWRITE'} for (key, value) in environ_kwargs.items(): environ[key] = value if verbose: terminal_output = 'allatonce' else: terminal_output = 'none' if use_rats_tool: if segmentation.interfaces.Info().version() is None: raise ValueError('Can not locate RATS') else: ComputeMask = segmentation.MathMorphoMask else: ComputeMask = segmentation.HistogramMask if ants.base.Info().version is None: raise ValueError('Can not locate ANTS') if caching: memory = Memory(write_dir) tshift = memory.cache(afni.TShift) clip_level = memory.cache(afni.ClipLevel) volreg = memory.cache(afni.Volreg) allineate = memory.cache(afni.Allineate) tstat = memory.cache(afni.TStat) compute_mask = memory.cache(ComputeMask) calc = memory.cache(afni.Calc) allineate = memory.cache(afni.Allineate) allineate2 = memory.cache(afni.Allineate) unifize = memory.cache(afni.Unifize) bias_correct = memory.cache(ants.N4BiasFieldCorrection) catmatvec = memory.cache(afni.CatMatvec) warp = memory.cache(afni.Warp) resample = memory.cache(afni.Resample) slicer = memory.cache(afni.ZCutUp) warp_apply = memory.cache(afni.NwarpApply) qwarp = memory.cache(afni.Qwarp) merge = memory.cache(afni.Zcat) copy_geom = memory.cache(fsl.CopyGeom) overwrite = False for step in [ tshift, volreg, allineate, allineate2, tstat, compute_mask, calc, unifize, resample, slicer, warp_apply, qwarp, merge ]: step.interface().set_default_terminal_output(terminal_output) else: tshift = afni.TShift(terminal_output=terminal_output).run clip_level = afni.ClipLevel().run volreg = afni.Volreg(terminal_output=terminal_output).run allineate = afni.Allineate(terminal_output=terminal_output).run allineate2 = afni.Allineate(terminal_output=terminal_output ).run # TODO: remove after fixed bug tstat = afni.TStat(terminal_output=terminal_output).run compute_mask = ComputeMask().run calc = afni.Calc(terminal_output=terminal_output).run unifize = afni.Unifize(terminal_output=terminal_output).run bias_correct = ants.N4BiasFieldCorrection( terminal_output=terminal_output).run catmatvec = afni.CatMatvec().run warp = afni.Warp().run resample = afni.Resample(terminal_output=terminal_output).run slicer = afni.ZCutUp(terminal_output=terminal_output).run warp_apply = afni.NwarpApply(terminal_output=terminal_output).run qwarp = afni.Qwarp(terminal_output=terminal_output).run merge = afni.Zcat(terminal_output=terminal_output).run copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run overwrite = True session_data._check_inputs() output_dir = os.path.join(os.path.abspath(write_dir), session_data.animal_id) session_data._set_output_dir_(output_dir) current_dir = os.getcwd() os.chdir(output_dir) output_files = [] ####################################### # Correct functional for slice timing # ####################################### if slice_timing: out_tshift = tshift(in_file=func_filename, outputtype='NIFTI_GZ', tpattern='altplus', tr=str(t_r), environ=environ) func_filename = out_tshift.outputs.out_file output_files.append(func_filename) ################################################ # Register functional volumes to the first one # ################################################ # XXX why do you need a thresholded image ? out_clip_level = clip_level(in_file=func_filename) out_calc_threshold = calc(in_file_a=func_filename, expr='ispositive(a-{0}) * a'.format( out_clip_level.outputs.clip_val), outputtype='NIFTI_GZ') thresholded_filename = out_calc_threshold.outputs.out_file out_volreg = volreg( # XXX dfile not saved in_file=thresholded_filename, outputtype='NIFTI_GZ', environ=environ, oned_file=fname_presuffix(thresholded_filename, suffix='Vr.1Dfile.1D', use_ext=False), oned_matrix_save=fname_presuffix(thresholded_filename, suffix='Vr.aff12.1D', use_ext=False)) # Apply the registration to the whole head out_allineate = allineate(in_file=func_filename, master=func_filename, in_matrix=out_volreg.outputs.oned_matrix_save, out_file=fname_presuffix(func_filename, suffix='Av'), environ=environ) # 3dAllineate removes the obliquity. This is not a good way to readd it as # removes motion correction info in the header if it were an AFNI file...as # it happens it's NIfTI which does not store that so irrelevant! out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file, in_file=out_volreg.outputs.out_file) allineated_filename = out_copy_geom.outputs.out_file # Create a (hopefully) nice mean image for use in the registration out_tstat = tstat(in_file=allineated_filename, args='-mean', outputtype='NIFTI_GZ', environ=environ) # Update outputs output_files.extend([ thresholded_filename, out_volreg.outputs.oned_matrix_save, out_volreg.outputs.out_file, out_volreg.outputs.md1d_file, allineated_filename, out_tstat.outputs.out_file ]) ########################################### # Corret anat and func for intensity bias # ########################################### # Correct the functional average for intensities bias out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file) unbiased_func_filename = out_bias_correct.outputs.output_image # Bias correct the antomical image out_unifize = unifize(in_file=anat_filename, outputtype='NIFTI_GZ', environ=environ) unbiased_anat_filename = out_unifize.outputs.out_file # Update outputs output_files.extend([unbiased_func_filename, unbiased_anat_filename]) ############################################# # Rigid-body registration anat -> mean func # ############################################# if prior_rigid_body_registration: # Mask the mean functional volume outside the brain. out_clip_level = clip_level(in_file=unbiased_func_filename) out_compute_mask_func = compute_mask( in_file=unbiased_func_filename, volume_threshold=brain_volume, intensity_threshold=int(out_clip_level.outputs.clip_val)) out_cacl_func = calc(in_file_a=unbiased_func_filename, in_file_b=out_compute_mask_func.outputs.out_file, expr='a*b', outputtype='NIFTI_GZ', environ=environ) # Mask the anatomical volume outside the brain. out_clip_level = clip_level(in_file=unbiased_anat_filename) out_compute_mask_anat = compute_mask( in_file=unbiased_anat_filename, volume_threshold=brain_volume, intensity_threshold=int(out_clip_level.outputs.clip_val)) out_cacl_anat = calc(in_file_a=unbiased_anat_filename, in_file_b=out_compute_mask_anat.outputs.out_file, expr='a*b', outputtype='NIFTI_GZ', environ=environ) # Compute the transformation from functional to anatomical brain # XXX: why in this sense out_allineate = allineate2( in_file=out_cacl_func.outputs.out_file, reference=out_cacl_anat.outputs.out_file, out_matrix=fname_presuffix(out_cacl_func.outputs.out_file, suffix='_shr.aff12.1D', use_ext=False), center_of_mass='', warp_type='shift_rotate', out_file=fname_presuffix(out_cacl_func.outputs.out_file, suffix='_shr'), environ=environ) rigid_transform_file = out_allineate.outputs.out_matrix output_files.extend([ out_compute_mask_func.outputs.out_file, out_cacl_func.outputs.out_file, out_compute_mask_anat.outputs.out_file, out_cacl_anat.outputs.out_file, rigid_transform_file, out_allineate.outputs.out_file ]) # apply the inverse transform to register the anatomical to the func catmatvec_out_file = fname_presuffix(rigid_transform_file, suffix='INV') out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')], oneline=True, out_file=catmatvec_out_file) output_files.append(out_catmatvec.outputs.out_file) out_allineate = allineate(in_file=unbiased_anat_filename, master=unbiased_func_filename, in_matrix=out_catmatvec.outputs.out_file, out_file=fname_presuffix( unbiased_anat_filename, suffix='_shr_in_func_space'), environ=environ) allineated_anat_filename = out_allineate.outputs.out_file output_files.append(allineated_anat_filename) else: allineated_anat_filename = unbiased_anat_filename ############################################ # Nonlinear registration anat -> mean func # ############################################ # 3dWarp doesn't put the obliquity in the header, so do it manually # This step generates one file per slice and per time point, so we are # making sure they are removed at the end out_warp = warp(in_file=allineated_anat_filename, oblique_parent=unbiased_func_filename, interp='quintic', gridset=unbiased_func_filename, outputtype='NIFTI_GZ', verbose=True, environ=environ) registered_anat_filename = out_warp.outputs.out_file registered_anat_oblique_filename = fix_obliquity(registered_anat_filename, unbiased_func_filename, verbose=verbose) # Concatenate all the anat to func tranforms mat_filename = fname_presuffix(registered_anat_filename, suffix='_warp.mat', use_ext=False) # XXX Handle this correctly according to caching if not os.path.isfile(mat_filename): np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s') output_files.append(mat_filename) transform_filename = fname_presuffix(registered_anat_filename, suffix='_anat_to_func.aff12.1D', use_ext=False) if prior_rigid_body_registration: _ = catmatvec(in_file=[(mat_filename, 'ONELINE'), (rigid_transform_file, 'ONELINE')], oneline=True, out_file=transform_filename) else: _ = catmatvec(in_file=[(mat_filename, 'ONELINE')], oneline=True, out_file=transform_filename) ################################################## # Per-slice non-linear registration func -> anat # ################################################## # Slice anatomical image anat_img = nibabel.load(registered_anat_oblique_filename) anat_n_slices = anat_img.header.get_data_shape()[2] sliced_registered_anat_filenames = [] for slice_n in range(anat_n_slices): out_slicer = slicer(in_file=registered_anat_oblique_filename, keep='{0} {0}'.format(slice_n), out_file=fname_presuffix( registered_anat_oblique_filename, suffix='Sl%d' % slice_n), environ=environ) oblique_slice = fix_obliquity(out_slicer.outputs.out_file, registered_anat_oblique_filename, verbose=verbose) sliced_registered_anat_filenames.append(oblique_slice) # Slice mean functional sliced_bias_corrected_filenames = [] img = nibabel.load(func_filename) n_slices = img.header.get_data_shape()[2] for slice_n in range(n_slices): out_slicer = slicer(in_file=unbiased_func_filename, keep='{0} {0}'.format(slice_n), out_file=fname_presuffix(unbiased_func_filename, suffix='Sl%d' % slice_n), environ=environ) oblique_slice = fix_obliquity(out_slicer.outputs.out_file, unbiased_func_filename, verbose=verbose) sliced_bias_corrected_filenames.append(oblique_slice) # Below line is to deal with slices where there is no signal (for example # rostral end of some anatomicals) # The inverse warp frequently fails, Resampling can help it work better # XXX why specifically .1 in voxel_size ? voxel_size_z = anat_img.header.get_zooms()[2] resampled_registered_anat_filenames = [] for sliced_registered_anat_filename in sliced_registered_anat_filenames: out_resample = resample(in_file=sliced_registered_anat_filename, voxel_size=(voxel_size_x, voxel_size_y, voxel_size_z), outputtype='NIFTI_GZ', environ=environ) resampled_registered_anat_filenames.append( out_resample.outputs.out_file) resampled_bias_corrected_filenames = [] for sliced_bias_corrected_filename in sliced_bias_corrected_filenames: out_resample = resample(in_file=sliced_bias_corrected_filename, voxel_size=(voxel_size_x, voxel_size_y, voxel_size_z), outputtype='NIFTI_GZ', environ=environ) resampled_bias_corrected_filenames.append( out_resample.outputs.out_file) # single slice non-linear functional to anatomical registration warped_slices = [] warp_filenames = [] for (resampled_bias_corrected_filename, resampled_registered_anat_filename) in zip( resampled_bias_corrected_filenames, resampled_registered_anat_filenames): warped_slice = fname_presuffix(resampled_bias_corrected_filename, suffix='_qw') out_qwarp = qwarp( in_file=resampled_bias_corrected_filename, base_file=resampled_registered_anat_filename, iwarp=True, # XXX: is this necessary noneg=True, blur=[0], nmi=True, noXdis=True, allineate=True, allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 ' '-parfix 4 0 -parfix 5 0 -parfix 6 0 ' '-parfix 7 0 -parfix 9 0 ' '-parfix 10 0 -parfix 12 0', out_file=warped_slice, environ=environ) warped_slices.append(out_qwarp.outputs.warped_source) warp_filenames.append(out_qwarp.outputs.source_warp) output_files.append(out_qwarp.outputs.base_warp) # There are files geenrated by the allineate option output_files.extend([ fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'), fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin.nii', use_ext=False), fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin.aff12.1D', use_ext=False) ]) # Resample the mean volume back to the initial resolution, voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms() resampled_warped_slices = [] for warped_slice in warped_slices: out_resample = resample(in_file=warped_slice, voxel_size=voxel_size, outputtype='NIFTI_GZ', environ=environ) resampled_warped_slices.append(out_resample.outputs.out_file) # fix the obliquity resampled_warped_slices_oblique = [] for (sliced_registered_anat_filename, resampled_warped_slice) in zip(sliced_registered_anat_filenames, resampled_warped_slices): oblique_slice = fix_obliquity(resampled_warped_slice, sliced_registered_anat_filename, verbose=verbose) resampled_warped_slices_oblique.append(oblique_slice) # slice functional sliced_func_filenames = [] for slice_n in range(n_slices): out_slicer = slicer(in_file=allineated_filename, keep='{0} {0}'.format(slice_n), out_file=fname_presuffix(allineated_filename, suffix='Sl%d' % slice_n), environ=environ) oblique_slice = fix_obliquity(out_slicer.outputs.out_file, allineated_filename, verbose=verbose) sliced_func_filenames.append(oblique_slice) # Apply the precomputed warp slice by slice warped_func_slices = [] for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames, warp_filenames): out_warp_apply = warp_apply(in_file=sliced_func_filename, master=sliced_func_filename, warp=warp_filename, out_file=fname_presuffix( sliced_func_filename, suffix='_qw'), environ=environ) warped_func_slices.append(out_warp_apply.outputs.out_file) # Finally, merge all slices ! out_merge_func = merge(in_files=warped_func_slices, outputtype='NIFTI_GZ', environ=environ) # Fix the obliquity merged_oblique = fix_obliquity(out_merge_func.outputs.out_file, allineated_filename, verbose=verbose) # Update the fmri data setattr(session_data, "coreg_func_", merged_oblique) setattr(session_data, "coreg_anat_", registered_anat_oblique_filename) setattr(session_data, "coreg_transform_", transform_filename) os.chdir(current_dir) # Collect the outputs output_files.extend(sliced_registered_anat_filenames + sliced_bias_corrected_filenames + resampled_registered_anat_filenames + resampled_bias_corrected_filenames + warped_slices + warp_filenames + resampled_warped_slices_oblique + sliced_func_filenames + warped_func_slices) if not caching: for out_file in output_files: if os.path.isfile(out_file): os.remove(out_file)
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf', omp_nthreads=1, enhance_t2=False): """ This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g. a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a conservative mask using Nilearn's ``create_epi_mask``. 2. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 3. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 4. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 5. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 6. Calculate a final mask as the intersection of 3) and 5). 7. Apply final mask on the enhanced reference. .. workflow :: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) omp_nthreads : int number of threads available to parallel nodes enhance_t2 : bool perform logarithmic transform of input BOLD image to improve contrast before calculating the preliminary mask **Inputs** in_file BOLD image (single volume) **Outputs** bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Create a loose mask to avoid N4 internal's Otsu mask n4_mask = pe.Node(MaskEPI(upper_cutoff=0.75, enhance_t2=enhance_t2, opening=1, no_sanitize=True), name='n4_mask') # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True), name='n4_correct', n_procs=1) # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') bet_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=6.0, internal_datatype='char'), name='skullstrip_first_dilate') bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask') # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype='NIFTI_GZ', # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args='-clfrac 0.2 -rbt 18.3 65.0 90.0', out_file="uni.nii.gz"), name='unifize') fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') workflow.connect([ (inputnode, n4_mask, [('in_file', 'in_files')]), (inputnode, n4_correct, [('in_file', 'input_image')]), (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]), (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]), (n4_mask, n4_correct, [('out_mask', 'mask_image')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]), (bet_dilate, bet_mask, [('out_file', 'mask_file')]), (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), (bet_mask, unifize, [('out_file', 'in_file')]), (unifize, fixhdr_unifize, [('out_file', 'in_file')]), (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file') ]), (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]), (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
def create_pipeline_graph(pipeline_name, graph_file, graph_kind='hierarchical'): """Creates pipeline graph for a given piepline. Parameters ---------- pipeline_name : one of {'anat_to_common_rigid', 'anat_to_common_affine', 'anat_to_common_nonlinear'} Pipeline name. graph_file : str. Path to save the graph image to. graph_kind : one of {'orig', 'hierarchical', 'flat', 'exec', 'colored'}, optional. The kind of the graph, passed to nipype.pipeline.workflows.Workflow().write_graph """ pipeline_names = ['anats_to_common_rigid', 'anats_to_common_affine', 'anats_to_common_nonlinear'] if pipeline_name not in pipeline_names: raise NotImplementedError( 'Pipeline name must be one of {0}, you entered {1}'.format( pipeline_names, pipeline_name)) graph_kinds = ['orig', 'hierarchical', 'flat', 'exec', 'colored'] if graph_kind not in graph_kinds: raise ValueError( 'Graph kind must be one of {0}, you entered {1}'.format( graph_kinds, graph_kind)) workflow = pe.Workflow(name=pipeline_name) ####################################################################### # Specify rigid body registration pipeline steps unifize = pe.Node(interface=afni.Unifize(), name='bias_correct') clip_level = pe.Node(interface=afni.ClipLevel(), name='compute_mask_threshold') compute_mask = pe.Node(interface=interfaces.MathMorphoMask(), name='compute_brain_mask') apply_mask = pe.Node(interface=afni.Calc(), name='apply_brain_mask') center_mass = pe.Node(interface=afni.CenterMass(), name='compute_and_set_cm_in_header') refit_copy = pe.Node(afni.Refit(), name='copy_cm_in_header') tcat1 = pe.Node(afni.TCat(), name='concatenate_across_individuals1') tstat1 = pe.Node(afni.TStat(), name='compute_average1') undump = pe.Node(afni.Undump(), name='create_empty_template') refit_set = pe.Node(afni.Refit(), name='set_cm_in_header') resample1 = pe.Node(afni.Resample(), name='resample1') resample2 = pe.Node(afni.Resample(), name='resample2') shift_rotate = pe.Node(afni.Allineate(), name='shift_rotate') apply_allineate1 = pe.Node(afni.Allineate(), name='apply_transform1') tcat2 = pe.Node(afni.TCat(), name='concatenate_across_individuals2') tstat2 = pe.Node(afni.TStat(), name='compute_average2') tcat3 = pe.Node(afni.TCat(), name='concatenate_across_individuals3') tstat3 = pe.Node(afni.TStat(), name='compute_average3') workflow.add_nodes([unifize, clip_level, compute_mask, apply_mask, center_mass, refit_copy, tcat1, tstat1, undump, refit_set, resample1, resample2, shift_rotate, apply_allineate1, tcat2, tstat2, tcat3, tstat3]) ####################################################################### # and connections workflow.connect(unifize, 'out_file', clip_level, 'in_file') workflow.connect(clip_level, 'clip_val', compute_mask, 'intensity_threshold') workflow.connect(unifize, 'out_file', compute_mask, 'in_file') workflow.connect(compute_mask, 'out_file', apply_mask, 'in_file_a') workflow.connect(unifize, 'out_file', apply_mask, 'in_file_b') workflow.connect(apply_mask, 'out_file', center_mass, 'in_file') workflow.connect(unifize, 'out_file', refit_copy, 'in_file') workflow.connect(center_mass, 'out_file', refit_copy, 'duporigin_file') workflow.connect(center_mass, 'out_file', tcat1, 'in_files') workflow.connect(tcat1, 'out_file', tstat1, 'in_file') workflow.connect(tstat1, 'out_file', undump, 'in_file') workflow.connect(undump, 'out_file', refit_set, 'in_file') workflow.connect(refit_set, 'out_file', resample1, 'master') workflow.connect(refit_copy, 'out_file', resample1, 'in_file') workflow.connect(refit_set, 'out_file', resample2, 'master') workflow.connect(center_mass, 'out_file', resample2, 'in_file') workflow.connect(resample2, 'out_file', tcat2, 'in_files') workflow.connect(tcat2, 'out_file', tstat2, 'in_file') workflow.connect(tstat2, 'out_file', shift_rotate, 'reference') workflow.connect(resample2, 'out_file', shift_rotate, 'in_file') workflow.connect(tstat2, 'out_file', apply_allineate1, 'master') workflow.connect(resample1, 'out_file', apply_allineate1, 'in_file') workflow.connect(shift_rotate, 'out_matrix', apply_allineate1, 'in_matrix') workflow.connect(apply_allineate1, 'out_file', tcat3, 'in_files') workflow.connect(tcat3, 'out_file', tstat3, 'in_file') if pipeline_name in ['anats_to_common_affine', 'anat_to_common_nonlinear']: mask = pe.Node(afni.MaskTool(), name='generate_count_mask') allineate = pe.Node(afni.Allineate(), name='allineate') catmatvec = pe.Node(afni.CatMatvec(), name='concatenate_transforms') apply_allineate2 = pe.Node(afni.Allineate(), name='apply_transform2') tcat3 = pe.Node( afni.TCat(), name='concatenate_across_individuals4') tstat3 = pe.Node(afni.TStat(), name='compute_average4') workflow.add_nodes([mask, allineate, catmatvec, apply_allineate2, tcat3, tstat3]) workflow.connect(tcat2, 'out_file', mask, 'in_file') workflow.connect(mask, 'out_file', allineate, 'weight') workflow.connect(apply_allineate1, 'out_file', allineate, 'in_file') workflow.connect(allineate, 'out_matrix', catmatvec, 'in_file') #XXX how can we enter multiple files ? workflow.connect(catmatvec, 'out_file', apply_allineate2, 'in_matrix') workflow.connect(resample1, 'out_file', apply_allineate2, 'in_file') workflow.connect(apply_allineate2, 'out_file', tcat3, 'in_files') workflow.connect(tcat3, 'out_file', tstat3, 'in_file') if pipeline_name == 'anats_to_common_nonlinear': pass graph_file_root, graph_file_ext = os.path.splitext(graph_file) if graph_file_ext: _ = workflow.write_graph(graph2use=graph_kind, format=graph_file_ext[1:], dotfilename=graph_file_root) else: _ = workflow.write_graph(graph2use=graph_kind, dotfilename=graph_file_root)
def init_enhance_and_skullstrip_dwi_wf( name="enhance_and_skullstrip_dwi_wf", omp_nthreads=1 ): """ Enhance a *b0* reference and perform brain extraction. This workflow takes in a *b0* reference image and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Run ANTs' ``N4BiasFieldCorrection`` on the input dwi reference image and mask. 2. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 3. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 4. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 5. Calculate a final mask as the intersection of 2) and 4). 6. Apply final mask on the enhanced reference. Workflow Graph: .. workflow :: :graph2use: orig :simple_form: yes from dmriprep.workflows.dwi.util import init_enhance_and_skullstrip_dwi_wf wf = init_enhance_and_skullstrip_dwi_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- name : str Name of workflow (default: ``enhance_and_skullstrip_dwi_wf``) omp_nthreads : int number of threads available to parallel nodes Inputs ------ in_file The *b0* reference (single volume) pre_mask initial mask Outputs ------- bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ from niworkflows.interfaces.header import CopyXForm from niworkflows.interfaces.fixes import ( FixN4BiasFieldCorrection as N4BiasFieldCorrection, ) from niworkflows.interfaces.nibabel import ApplyMask workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode" ) outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file"] ), name="outputnode", ) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection( dimension=3, copy_header=True, bspline_fitting_distance=200 ), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass" ) bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 contrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run AFNI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node( afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass" ) fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") normalize = pe.Node(niu.Function(function=_normalize), name="normalize") # Compute masked brain apply_mask = pe.Node(ApplyMask(), name="apply_mask") # fmt:off workflow.connect([ (inputnode, n4_correct, [("in_file", "input_image"), ("pre_mask", "mask_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file")]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (combine_masks, apply_mask, [("out_file", "in_mask")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (n4_correct, normalize, [("output_image", "in_file")]), (normalize, apply_mask, [("out", "in_file")]), (normalize, outputnode, [("out", "bias_corrected_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), ] ) # fmt:on return workflow
def afni_wf(name='AFNISkullStripWorkflow', unifize=False, n4_nthreads=1): """ Skull-stripping workflow Originally derived from the `codebase of the QAP <https://github.com/preprocessed-connectomes-project/\ quality-assessment-protocol/blob/master/qap/anatomical_preproc.py#L105>`_. Now, this workflow includes :abbr:`INU (intensity non-uniformity)` correction using the N4 algorithm and (optionally) intensity harmonization using ANFI's ``3dUnifize``. """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['bias_corrected', 'out_file', 'out_mask', 'bias_image']), name='outputnode') inu_n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True, num_threads=n4_nthreads, copy_header=True), n_procs=n4_nthreads, name='inu_n4') sstrip = pe.Node(afni.SkullStrip(outputtype='NIFTI_GZ'), name='skullstrip') sstrip_orig_vol = pe.Node(afni.Calc(expr='a*step(b)', outputtype='NIFTI_GZ'), name='sstrip_orig_vol') binarize = pe.Node(fsl.Threshold(args='-bin', thresh=1.e-3), name='binarize') if unifize: # Add two unifize steps, pre- and post- skullstripping. inu_uni_0 = pe.Node(afni.Unifize(outputtype='NIFTI_GZ'), name='unifize_pre_skullstrip') inu_uni_1 = pe.Node(afni.Unifize(gm=True, outputtype='NIFTI_GZ'), name='unifize_post_skullstrip') workflow.connect([ (inu_n4, inu_uni_0, [('output_image', 'in_file')]), (inu_uni_0, sstrip, [('out_file', 'in_file')]), (inu_uni_0, sstrip_orig_vol, [('out_file', 'in_file_a')]), (sstrip_orig_vol, inu_uni_1, [('out_file', 'in_file')]), (inu_uni_1, outputnode, [('out_file', 'out_file')]), (inu_uni_0, outputnode, [('out_file', 'bias_corrected')]), ]) else: workflow.connect([ (inputnode, sstrip_orig_vol, [('in_file', 'in_file_a')]), (inu_n4, sstrip, [('output_image', 'in_file')]), (sstrip_orig_vol, outputnode, [('out_file', 'out_file')]), (inu_n4, outputnode, [('output_image', 'bias_corrected')]), ]) # Remaining connections workflow.connect([ (sstrip, sstrip_orig_vol, [('out_file', 'in_file_b')]), (inputnode, inu_n4, [('in_file', 'input_image')]), (sstrip_orig_vol, binarize, [('out_file', 'in_file')]), (binarize, outputnode, [('out_file', 'out_mask')]), (inu_n4, outputnode, [('bias_image', 'bias_image')]), ]) return workflow
io_S3DataGrabber.inputs.sort_filelist = True io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz' io_S3DataGrabber.inputs.anon = True io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' io_S3DataGrabber.inputs.local_directory = '/tmp' #Wraps command **bet** fsl_BET = pe.Node(interface=fsl.BET(), name='fsl_BET', iterfield=['']) #Wraps command **3dTshift** afni_TShift = pe.Node(interface=afni.TShift(), name='afni_TShift', iterfield=['']) #Wraps command **3dUnifize** afni_Unifize = pe.Node(interface=afni.Unifize(), name='afni_Unifize', iterfield=['']) #Generic datagrabber module that wraps around glob in an io_S3DataGrabber_2 = pe.Node(io.S3DataGrabber(), name='io_S3DataGrabber_2') #Wraps command **fslreorient2std** fsl_Reorient2Std = pe.Node(interface=fsl.Reorient2Std(), name='fsl_Reorient2Std', iterfield=['']) #Wraps command **fslreorient2std** fsl_Reorient2Std_1 = pe.Node(interface=fsl.Reorient2Std(), name='fsl_Reorient2Std_1', iterfield=[''])
def afni_wf(name="AFNISkullStripWorkflow", unifize=False, n4_nthreads=1): """ Create a skull-stripping workflow based on AFNI's tools. Originally derived from the `codebase of the QAP <https://github.com/preprocessed-connectomes-project/quality-assessment-protocol/blob/master/qap/anatomical_preproc.py#L105>`_. Now, this workflow includes :abbr:`INU (intensity non-uniformity)` correction using the N4 algorithm and (optionally) intensity harmonization using ANFI's ``3dUnifize``. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from niworkflows.anat.skullstrip import afni_wf wf = afni_wf() Parameters ---------- n4_nthreads : int number of cpus N4 bias field correction can utilize. unifize : bool whether AFNI's ``3dUnifize`` should be applied (default: ``False``). name : str name for the workflow hierarchy of Nipype Inputs ------ in_file : str input T1w image. Outputs ------- bias_corrected : str path to the bias corrected input MRI. out_file : str path to the skull-stripped image. out_mask : str path to the generated brain mask. bias_image : str path to the B1 inhomogeneity field. """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["bias_corrected", "out_file", "out_mask", "bias_image"]), name="outputnode", ) inu_n4 = pe.Node( N4BiasFieldCorrection( dimension=3, save_bias=True, num_threads=n4_nthreads, rescale_intensities=True, copy_header=True, ), n_procs=n4_nthreads, name="inu_n4", ) sstrip = pe.Node(afni.SkullStrip(outputtype="NIFTI_GZ"), name="skullstrip") sstrip_orig_vol = pe.Node(afni.Calc(expr="a*step(b)", outputtype="NIFTI_GZ"), name="sstrip_orig_vol") binarize = pe.Node(Binarize(thresh_low=0.0), name="binarize") if unifize: # Add two unifize steps, pre- and post- skullstripping. inu_uni_0 = pe.Node(afni.Unifize(outputtype="NIFTI_GZ"), name="unifize_pre_skullstrip") inu_uni_1 = pe.Node(afni.Unifize(gm=True, outputtype="NIFTI_GZ"), name="unifize_post_skullstrip") # fmt: off workflow.connect([ (inu_n4, inu_uni_0, [("output_image", "in_file")]), (inu_uni_0, sstrip, [("out_file", "in_file")]), (inu_uni_0, sstrip_orig_vol, [("out_file", "in_file_a")]), (sstrip_orig_vol, inu_uni_1, [("out_file", "in_file")]), (inu_uni_1, outputnode, [("out_file", "out_file")]), (inu_uni_0, outputnode, [("out_file", "bias_corrected")]), ]) # fmt: on else: # fmt: off workflow.connect([ (inputnode, sstrip_orig_vol, [("in_file", "in_file_a")]), (inu_n4, sstrip, [("output_image", "in_file")]), (sstrip_orig_vol, outputnode, [("out_file", "out_file")]), (inu_n4, outputnode, [("output_image", "bias_corrected")]), ]) # fmt: on # Remaining connections # fmt: off workflow.connect([ (sstrip, sstrip_orig_vol, [("out_file", "in_file_b")]), (inputnode, inu_n4, [("in_file", "input_image")]), (sstrip_orig_vol, binarize, [("out_file", "in_file")]), (binarize, outputnode, [("out_mask", "out_mask")]), (inu_n4, outputnode, [("bias_image", "bias_image")]), ]) # fmt: on return workflow
NodeHash_30bb950.inputs.bucket = 'openneuro' NodeHash_30bb950.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' NodeHash_30bb950.inputs.local_directory = '/tmp' NodeHash_30bb950.inputs.sort_filelist = True NodeHash_30bb950.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz' #Wraps command **N4BiasFieldCorrection** NodeHash_1ea4b50 = pe.Node(interface=ants.N4BiasFieldCorrection(), name='NodeName_1ea4b50') NodeHash_1ea4b50.inputs.copy_header = False NodeHash_1ea4b50.inputs.dimension = 3 NodeHash_1ea4b50.inputs.num_threads = 4 NodeHash_1ea4b50.inputs.save_bias = True #Wraps command **3dUnifize** NodeHash_291d6d0 = pe.Node(interface=afni.Unifize(), name='NodeName_291d6d0') NodeHash_291d6d0.inputs.outputtype = 'NIFTI_GZ' #Wraps command **3dSkullStrip** NodeHash_1ddfa30 = pe.Node(interface=afni.SkullStrip(), name='NodeName_1ddfa30') NodeHash_1ddfa30.inputs.outputtype = 'NIFTI_GZ' #Wraps command **3dcalc** NodeHash_3bd6370 = pe.Node(interface=afni.Calc(), name='NodeName_3bd6370') NodeHash_3bd6370.inputs.expr = 'a*step(b)' NodeHash_3bd6370.inputs.outputtype = 'NIFTI_GZ' #Wraps command **fslmaths** NodeHash_49ddb10 = pe.Node(interface=fsl.Threshold(), name='NodeName_49ddb10') NodeHash_49ddb10.inputs.args = '-bin'