def fmri_bmsk_workflow(name='fMRIBrainMask', use_bet=False): """Comute brain mask of an fmri dataset""" workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), name='outputnode') if not use_bet: afni_msk = pe.Node(afni.Automask( outputtype='NIFTI_GZ'), name='afni_msk') # Connect brain mask extraction workflow.connect([ (inputnode, afni_msk, [('in_file', 'in_file')]), (afni_msk, outputnode, [('out_file', 'out_file')]) ]) else: from nipype.interfaces.fsl import BET, ErodeImage bet_msk = pe.Node(BET(mask=True, functional=True), name='bet_msk') erode = pe.Node(ErodeImage(kernel_shape='box', kernel_size=1.0), name='erode') # Connect brain mask extraction workflow.connect([ (inputnode, bet_msk, [('in_file', 'in_file')]), (bet_msk, erode, [('mask_file', 'in_file')]), (erode, outputnode, [('out_file', 'out_file')]) ]) return workflow
def test_automask(): input_map = dict( apply_mask=dict( argstr='-apply_prefix %s', hash_files=False, genfile=True, ), apply_suffix=dict(usedefault=True, ), args=dict(argstr='%s', ), clfrac=dict(argstr='-dilate %s', ), dilate=dict(argstr='-dilate %s', ), environ=dict(usedefault=True, ), erode=dict(argstr='-erode %s', ), ignore_exception=dict(usedefault=True, ), in_file=dict( argstr='%s', mandatory=True, ), mask_suffix=dict(usedefault=True, ), out_file=dict( hash_files=False, genfile=True, argstr='-prefix %s', ), outputtype=dict(), ) instance = afni.Automask() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value
def __init__(self, settings): # call base constructor super().__init__(settings) # define input/output node self.set_input(['refimg', 'T1_skullstrip']) self.set_output(['affine_func_2_anat', 'warp_func_2_anat']) # define datasink substitutions self.set_subs([ ('_calc_calc_calc_calc_calc', ''), ('_roi', '_reference'), ('_unwarped_Warped', '_unwarped'), ('_masked_calc', '_skullstrip'), ('_Warped', '_anat'), ]) # Skullstrip the EPI image self.epi_skullstrip = Node(fsl.BET(), name='epi_skullstrip') self.epi_automask = Node(afni.Automask(args='-overwrite', outputtype='NIFTI_GZ'), name='epi_automask') self.epi_3dcalc = Node(afni.Calc(expr='c*or(a,b)', overwrite=True, outputtype='NIFTI_GZ'), name='epi_3dcalc') # create the output name for the registration self.create_prefix = Node(Function(input_names=['filename'], output_names=['basename'], function=get_prefix), name='create_prefix') # align func to anat self.align_func_2_anat = Node(ants.Registration( num_threads=settings['num_threads'], collapse_output_transforms=False, initial_moving_transform_com=1, write_composite_transform=True, initialize_transforms_per_stage=True, transforms=['Rigid', 'Affine'], transform_parameters=[(0.1, ), (0.1, )], metric=['MI', 'MI'], metric_weight=[1, 1], radius_or_number_of_bins=[32, 32], sampling_strategy=['Regular', 'Regular'], sampling_percentage=[0.25, 0.25], convergence_threshold=[1.e-6, 1.e-8], convergence_window_size=[10, 10], smoothing_sigmas=[[3, 2, 1, 0], [2, 1, 0]], sigma_units=['vox', 'vox'], shrink_factors=[[8, 4, 2, 1], [4, 2, 1]], number_of_iterations=[[1000, 500, 250, 100], [500, 250, 100]], use_estimate_learning_rate_once=[False, True], use_histogram_matching=False, verbose=True, output_warped_image=True), name='align_func_2_anat') self.align_func_2_anat.n_procs = settings['num_threads']
def init_skullstrip_b0_wf(name='skullstrip_b0_wf'): """ This workflow applies skull-stripping to a DWI image. It is intended to be used on an image that has previously been bias-corrected with :py:func:`~qsiprep.workflows.bold.util.init_enhance_and_skullstrip_bold_wf` .. workflow :: :graph2use: orig :simple_form: yes from qsiprep.workflows.bold.util import init_skullstrip_b0_wf wf = init_skullstrip_b0_wf() Inputs in_file b0 image (single volume) Outputs skull_stripped_file the ``in_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'out_report']), name='outputnode') automask_dilate = pe.Node(afni.Automask(dilate=3, outputtype='NIFTI_GZ'), name='automask_dilate') apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet') workflow.connect([ (inputnode, automask_dilate, [('in_file', 'in_file')]), (automask_dilate, outputnode, [('out_file', 'mask_file')]), # Masked file (inputnode, apply_mask, [('in_file', 'in_file')]), (automask_dilate, apply_mask, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), # Reportlet (inputnode, mask_reportlet, [('in_file', 'background_file')]), (automask_dilate, mask_reportlet, [('out_file', 'mask_file')]), (mask_reportlet, outputnode, [('out_report', 'out_report')]), ]) return workflow
def init_enhance_and_skullstrip_epi_wf(name='enhance_and_skullstrip_epi_wf'): workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'mask_file', 'skull_stripped_file', 'bias_corrected_file', 'out_report' ]), name='outputnode') n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True), name='n4_correct') skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') unifize = pe.Node(afni.Unifize(t2=True, outputtype='NIFTI_GZ', args='-clfrac 0.4', out_file="uni.nii.gz"), name='unifize') skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet') workflow.connect([ (inputnode, n4_correct, [('in_file', 'input_image')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, unifize, [('out_file', 'in_file')]), (unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file') ]), (unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (n4_correct, mask_reportlet, [('output_image', 'background_file')]), (combine_masks, mask_reportlet, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (mask_reportlet, outputnode, [('out_report', 'out_report')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
def fmri_bmsk_workflow(name='fMRIBrainMask', use_bet=False): """ Computes a brain mask for the input :abbr:`fMRI (functional MRI)` dataset .. workflow:: from mriqc.workflows.functional import fmri_bmsk_workflow wf = fmri_bmsk_workflow() """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']), name='outputnode') if not use_bet: afni_msk = pe.Node(afni.Automask( outputtype='NIFTI_GZ'), name='afni_msk') # Connect brain mask extraction workflow.connect([ (inputnode, afni_msk, [('in_file', 'in_file')]), (afni_msk, outputnode, [('out_file', 'out_file')]) ]) else: bet_msk = pe.Node(fsl.BET(mask=True, functional=True), name='bet_msk') erode = pe.Node(fsl.ErodeImage(), name='erode') # Connect brain mask extraction workflow.connect([ (inputnode, bet_msk, [('in_file', 'in_file')]), (bet_msk, erode, [('mask_file', 'in_file')]), (erode, outputnode, [('out_file', 'out_file')]) ]) return workflow
def init_mask_finalize_wf(name="mask_finalize_wf"): """Creates a final mask using a combination of the t1 mask and dwi2mask """ inputnode = pe.Node( niu.IdentityInterface(fields=['t1_mask', 'resampled_b0s']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=['mask_file']), name='outputnode') workflow = Workflow(name=name) resample_t1_mask = pe.Node(afni.Resample(outputtype='NIFTI_GZ', resample_mode="NN"), name='resample_t1_mask') b0mask = pe.Node(afni.Automask(outputtype='NIFTI_GZ'), name='b0mask') or_mask = pe.Node(afni.Calc(outputtype='NIFTI_GZ', expr='step(a+b)'), name='or_mask') workflow.connect([(inputnode, resample_t1_mask, [('t1_mask', 'in_file'), ('resampled_b0s', 'master')]), (inputnode, b0mask, [('resampled_b0s', 'in_file')]), (b0mask, or_mask, [('out_file', 'in_file_a')]), (resample_t1_mask, or_mask, [('out_file', 'in_file_b')]), (or_mask, outputnode, [('out_file', 'mask_file')])]) return workflow
def make_func_mask_workflow(name='funcmask', base_dir=None): brainmask = Workflow(name=name, base_dir=base_dir) inputnode = Node(utility.IdentityInterface(fields=['mean_file']), name='inputnode') outputnode = Node(utility.IdentityInterface(fields=['masked_file', 'mask']), name='outputnode') skullstrip1 = MapNode(fsl.BET(frac=0.2, mask=True, output_type='NIFTI_GZ'), name='skullstrip_first_pass', iterfield=['in_file']) brainmask.connect(inputnode, 'mean_file', skullstrip1, 'in_file') skullstrip2 = MapNode(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass', iterfield=['in_file']) brainmask.connect(skullstrip1, 'out_file', skullstrip2, 'in_file') combine_masks = MapNode(fsl.BinaryMaths(operation='mul'), name='combine_masks', iterfield=['in_file', 'operand_file']) brainmask.connect(skullstrip1, 'mask_file', combine_masks, 'in_file') brainmask.connect(skullstrip2, 'out_file', combine_masks, 'operand_file') apply_mask = MapNode(fsl.ApplyMask(), name='apply_mask', iterfield=['in_file', 'mask_file']) brainmask.connect(inputnode, 'mean_file', apply_mask, 'in_file') brainmask.connect(combine_masks, 'out_file', apply_mask, 'mask_file') brainmask.connect(apply_mask, 'out_file', outputnode, 'masked_file') brainmask.connect(combine_masks, 'out_file', outputnode, 'mask') return brainmask
# bias field correction biasfield = Node(ants.segmentation.N4BiasFieldCorrection( dimension=3, n_iterations=[150, 100, 50, 30], convergence_threshold=1e-11, bspline_fitting_distance=10, bspline_order=4, shrink_factor=2, output_image='func_median.nii.gz'), name='biasfield') preproc.connect([(median, biasfield, [('median_file', 'input_image')])]) # compute functional mask func_mask = Node(afni.Automask(dilate=1, args='-peels 3', outputtype='NIFTI_GZ', out_file='func_mask.nii.gz'), name='func_mask') preproc.connect([(biasfield, func_mask, [('output_image', 'in_file')])]) # artefact detection artefact = Node(ra.ArtifactDetect(save_plot=True, use_norm=True, parameter_source='NiPy', mask_type='file', norm_threshold=motion_norm, zintensity_threshold=z_thr, use_differences=[True, False]), name='artefact')
name='inputspec2') outputnode2 = pe.Node( interface=util.IdentityInterface(fields=['result_func']), name='outputspec2') inputnode2.inputs.drifter_result = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz' # Call fslcpgeom source dest, source is reorient output nii.gz file and dest is drifter folder nii.gz file reoriented_file = results_path + '/' + data + '_1/reorient/corr_epi_reoriented.nii.gz' drifted_file = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz' call(["fslcpgeom", reoriented_file, drifted_file]) # AFNI skullstrip and mean image skullstrip tstat1 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"), name='tstat1') automask = pe.Node(interface=afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name='automask') skullstrip = pe.Node(interface=afni.Calc(expr='a*b', outputtype="NIFTI_GZ"), name='skullstrip') tstat2 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"), name='tstat2') workflow2.connect(inputnode2, 'drifter_result', tstat1, 'in_file') workflow2.connect(tstat1, 'out_file', automask, 'in_file') workflow2.connect(automask, 'out_file', skullstrip, 'in_file_b') workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a') workflow2.connect(skullstrip, 'out_file', tstat2, 'in_file') # Remove n (3) first volumes trim = pe.Node(interface=Trim(begin_index=3), name='trim')
mean = fsl.MeanImage(dimension='T', in_file=in_file, out_file=mean_file) mean.run() print("Bias field correction") biasfield = ants.N4BiasFieldCorrection(dimension=2, n_iterations=[150, 100, 50, 30], convergence_threshold=1e-11, bspline_fitting_distance=10, bspline_order=4, shrink_factor=2, input_image=mean_file, output_image=bias_file) biasfield.run() print("Compute mask") mask = afni.Automask(outputtype='NIFTI_GZ', in_file=bias_file, out_file=mask_file) mask.run() print("Fixing headers, saving data") aff = nb.load(in_file).affine orig_data = nb.load(in_file).get_data() bias_data = nb.load(bias_file).get_data() mask_data = nb.load(mask_file).get_data() nb.save(nb.Nifti1Image(mask_data, aff), mask_file) nb.save(nb.Nifti1Image(bias_data, aff), bias_file) nb.save(nb.Nifti1Image(orig_data, aff), ts_file) os.remove("func_bias_corr_masked.nii.gz")
ds = pe.Node(nio.DataSink(), name="datasink") #ds.run_without_submitting = True", ds.inputs.base_directory = resultsdir datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'sess_id'], outfields=['EPI_bandpassed']), name="datasource") #grabs data datasource.inputs.base_directory = '/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/probands/' datasource.inputs.template = '%s/preprocessed/lsd_resting/%s/rest_preprocessed2mni.nii.gz' datasource.inputs.template_args['EPI_bandpassed'] = [['subject_id', 'sess_id']] datasource.inputs.sort_filelist = True wf.connect(subjects_infosource, "subject_id", datasource, "subject_id") wf.connect(sess_infosource, "sess_id", datasource, "sess_id") automask = pe.Node(interface=afni.Automask(), name='automask') automask.inputs.dilate = 1 automask.inputs.outputtype = "NIFTI_GZ" wf.connect(datasource, 'EPI_bandpassed', automask, 'in_file') wf.connect(automask, 'out_file', ds, '@automask') #extract rois with spheres sphere = pe.Node(afni.Calc(), name="sphere") sphere.inputs.in_file_a = fsl.Info.standard_image( '/usr/share/fsl/data/standard/MNI152_T1_2mm_brain.nii.gz') sphere.inputs.outputtype = 'NIFTI_GZ' def roi2exp(coord): radius = 4 return "step((%d*%d)-(x+%d)*(x+%d)-(y+%d)*(y+%d)-(z+%d)*(z+%d))" % (
def init_enhance_and_skullstrip_asl_wf( brainmask_thresh=0.5, name="enhance_and_skullstrip_asl_wf", omp_nthreads=1, pre_mask=False, ): """ Enhance and run brain extraction on a ASL image. This workflow takes in a :abbr:`ASL (Aretrrail Spin Labeling)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*aslref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *aslref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`ASL (arterial spin labeling)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. Workflow graph .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_asl_wf wf = init_enhance_and_skullstrip_asl_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- brainmask_thresh: :obj:`float` Lower threshold for the probabilistic brainmask to obtain the final binary mask (default: 0.5). name : str Name of workflow (default: ``enhance_and_skullstrip_asl_wf``) omp_nthreads : int number of threads available to parallel nodes pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). Inputs ------ in_file : str ASL image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). Outputs ------- bias_corrected_file : str the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file : str the ``bias_corrected_file`` after skull-stripping mask_file : str mask of the skull-stripped input file out_report : str reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file" ]), name="outputnode", ) pre_mask = pre_mask # Ensure mask's header matches reference's #check_hdr = pe.Node(MatchHeader(), name="check_hdr", run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection(dimension=3, copy_header=True, bspline_fitting_distance=200), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass") bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass") fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name="apply_mask") #binarize_mask = pe.Node(Binarize(thresh_low=brainmask_thresh), name="binarize_mask") # fmt: off workflow.connect([ (inputnode, n4_correct, [("in_file", "mask_image")]), (inputnode, n4_correct, [("in_file", "input_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file") ]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (fixhdr_unifize, apply_mask, [("out_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), (n4_correct, outputnode, [("output_image", "bias_corrected_file")]), ]) # fmt: on return workflow
workflow2.base_dir = '.' inputnode2 = pe.Node(interface=util.IdentityInterface(fields=['drifter_result']), name='inputspec2') outputnode2 = pe.Node(interface=util.IdentityInterface(fields=['result_func']), name='outputspec2') inputnode2.inputs.drifter_result=results_path+'/' + data + '_1/drifter/drifter_corrected.nii.gz' # Call fslcpgeom source dest, source is reorient output nii.gz file and dest is drifter folder nii.gz file reoriented_file=results_path+'/' + data + '_1/reorient/corr_epi_reoriented.nii.gz' drifted_file=results_path+'/' + data + '_1/drifter/drifter_corrected.nii.gz' call(["fslcpgeom", reoriented_file, drifted_file]) # AFNI skullstrip and mean image skullstrip mean_epi = pe.Node(interface=afni.TStat(args='-mean',outputtype="NIFTI_GZ"), name='mean_epi') skullstrip3D = pe.Node(interface=afni.Automask(dilate=1,outputtype="NIFTI_GZ"), name='skullstrip3D') skullstrip4D = pe.Node(interface=afni.Calc(expr = 'a*b',outputtype="NIFTI_GZ"), name='skullstrip4D’) mean_epi_brain = pe.Node(interface=afni.TStat(args='-mean',outputtype="NIFTI_GZ"), name='mean_epi_brain') workflow2.connect(inputnode2, 'drifter_result', mean_epi,'in_file') workflow2.connect(mean_epi, 'out_file', skullstrip3D, 'in_file') workflow2.connect(skullstrip3D, 'out_file', skullstrip, 'in_file_b') workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a') workflow2.connect(skullstrip, 'out_file', mean_epi_brain, 'in_file') # Remove n (3) first volumes remove3vol = pe.Node(interface=remove3vol(begin_index=3), name='remove3vol') workflow2.connect(skullstrip, 'out_file', remove3vol, 'in_file') # Spatial smoothing, kernel sigma 2.00 mm (5 mm is too much) #smooth = pe.Node(interface=fsl.maths.SpatialFilter(operation='mean', terminal_output='stream', kernel_shape='gauss', kernel_size=1.5, nan2zeros=True), name='smooth')
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf', omp_nthreads=1, enhance_t2=False): """ This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g. a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a conservative mask using Nilearn's ``create_epi_mask``. 2. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 3. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 4. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 5. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 6. Calculate a final mask as the intersection of 3) and 5). 7. Apply final mask on the enhanced reference. .. workflow :: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) omp_nthreads : int number of threads available to parallel nodes enhance_t2 : bool perform logarithmic transform of input BOLD image to improve contrast before calculating the preliminary mask **Inputs** in_file BOLD image (single volume) **Outputs** bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Create a loose mask to avoid N4 internal's Otsu mask n4_mask = pe.Node(MaskEPI(upper_cutoff=0.75, enhance_t2=enhance_t2, opening=1, no_sanitize=True), name='n4_mask') # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True), name='n4_correct', n_procs=1) # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') bet_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=6.0, internal_datatype='char'), name='skullstrip_first_dilate') bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask') # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype='NIFTI_GZ', # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args='-clfrac 0.2 -rbt 18.3 65.0 90.0', out_file="uni.nii.gz"), name='unifize') fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') workflow.connect([ (inputnode, n4_mask, [('in_file', 'in_files')]), (inputnode, n4_correct, [('in_file', 'input_image')]), (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]), (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]), (n4_mask, n4_correct, [('out_mask', 'mask_image')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]), (bet_dilate, bet_mask, [('out_file', 'mask_file')]), (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), (bet_mask, unifize, [('out_file', 'in_file')]), (unifize, fixhdr_unifize, [('out_file', 'in_file')]), (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file') ]), (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]), (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
def init_skullstrip_b0_wf(name='skullstrip_b0_wf', use_t1_prior=False, use_initial_mask=False): """ This workflow applies fancy skull-stripping to a DWI image. It is intended to be used on an image that has previously been bias-corrected and enhanced with :py:func:`~qsiprep.workflows.dwi.util.init_enhance_and_skullstrip_dwi_wf` .. workflow :: :graph2use: orig :simple_form: yes from qsiprep.workflows.bold.util import init_skullstrip_b0_wf wf = init_skullstrip_b0_wf() Inputs in_file b0 image (single volume) initial_dwi_mask A rough mask from a prior pipeline t1_prior_mask A brain mask from a co-registered T1 in the same voxel grid as in_file Outputs skull_stripped_file the ``in_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['in_file', 't1_prior_mask', 'initial_dwi_mask']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'out_report']), name='outputnode') pad_image = pe.Node(ImageMath(dimension=3, operation="PadImage", secondary_arg="10"), name="pad_image") unpad_image = pe.Node(ImageMath(dimension=3, operation="PadImage", secondary_arg="-10"), name="unpad_image") if use_initial_mask: workflow.connect([(inputnode, pad_image, [('initial_dwi_mask', 'in_file')])]) else: initial_mask = pe.Node(afni.Automask(outputtype="NIFTI_GZ"), name="initial_mask") workflow.connect([(inputnode, initial_mask, [('in_file', 'in_file')]), (initial_mask, pad_image, [('out_file', 'in_file')]) ]) erode1 = pe.Node(ImageMath(dimension=3, operation="ME", secondary_arg="2"), name="erode1") get_largest = pe.Node(ImageMath(dimension=3, operation="GetLargestComponent"), name='get_largest') dilate1 = pe.Node(ImageMath(dimension=3, operation="MD", secondary_arg="4"), name='dilate1') fill_holes = pe.Node(ImageMath(dimension=3, operation="FillHoles", secondary_arg="2"), name='fill_holes') dilate2 = pe.Node(ImageMath(dimension=3, operation="MD", secondary_arg="5"), name='dilate2') erode2 = pe.Node(ImageMath(dimension=3, operation="ME", secondary_arg="7"), name="erode2") apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') # Do the prior-less parts workflow.connect([(pad_image, erode1, [('out_file', 'in_file')]), (erode1, get_largest, [('out_file', 'in_file')]), (get_largest, dilate1, [('out_file', 'in_file')]), (dilate1, fill_holes, [('out_file', 'in_file')])]) # Add in a t1 prior if requested if use_t1_prior: pad_t1 = pe.Node(ImageMath(dimension=3, operation="PadImage", secondary_arg="10"), name="pad_t1") add_t1_prior = pe.Node(ImageMath(dimension=3, operation="addtozero"), name="add_t1_prior") workflow.connect([ (inputnode, pad_t1, [('t1_prior_mask', 'in_file')]), (pad_t1, add_t1_prior, [('out_file', 'secondary_file')]), (fill_holes, add_t1_prior, [('out_file', 'in_file')]), (add_t1_prior, dilate2, [('out_file', 'in_file')]) ]) else: workflow.connect(fill_holes, 'out_file', dilate2, 'in_file') workflow.connect([ (dilate2, erode2, [('out_file', 'in_file')]), (erode2, unpad_image, [('out_file', 'in_file')]), (unpad_image, outputnode, [('out_file', 'mask_file')]), (inputnode, apply_mask, [('in_file', 'in_file')]), (unpad_image, apply_mask, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), ]) return workflow
def init_enhance_and_skullstrip_dwi_wf(name='enhance_and_skullstrip_dwi_wf', do_biascorrection=True, omp_nthreads=1): """ https://community.mrtrix.org/t/dwibiascorrect-with-ants-high-intensity-in-cerebellum-brainstem/1338/3 Truncates image intensities, runs N4, creates a rough initial mask .. workflow :: :graph2use: orig :simple_form: yes from qsiprep.workflows.dwi.util import init_enhance_and_skullstrip_dwi_wf wf = init_enhance_and_skullstrip_dwi_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_dwi_wf``) do_biascorrection : Bool Do bias correction on ``in_file``? omp_nthreads : int number of threads available to parallel nodes **Inputs** in_file dwi image (single volume) **Outputs** bias_corrected_file the ``in_file`` after N4BiasFieldCorrection and sharpening skull_stripped_file the ``bias_corrected_file`` after soft skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Truncate intensity values so they're OK for N4 truncate_values = pe.Node(ImageMath(dimension=3, operation="TruncateImageIntensity", secondary_arg="0.0 0.98 512"), name="truncate_values") # Truncate intensity values for creating a mask # (there are many high outliers in b=0 images) truncate_values_for_masking = pe.Node(ImageMath( dimension=3, operation="TruncateImageIntensity", secondary_arg="0.0 0.9 512"), name="truncate_values_for_masking") # N4 will break if any negative values are present. rescale_image = pe.Node(ImageMath(dimension=3, operation="RescaleImage", secondary_arg="0 1000"), name="rescale_image") # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection( dimension=3, n_iterations=[200, 200], convergence_threshold=1e-6, bspline_order=3, bspline_fitting_distance=150, copy_header=True, args='-v 1'), name='n4_correct', n_procs=1) # Sharpen the b0 ref sharpen_image = pe.Node(ImageMath(dimension=3, operation="Sharpen"), name="sharpen_image") # Basic mask initial_mask = pe.Node(afni.Automask(outputtype="NIFTI_GZ"), name="initial_mask") # Fill holes left by Automask fill_holes = pe.Node(ImageMath(dimension=3, operation='FillHoles', secondary_arg='2'), name='fill_holes') # Dilate before smoothing dilate_mask = pe.Node(ImageMath(dimension=3, operation='MD', secondary_arg='1'), name='dilate_mask') # Smooth the mask and use it as a weight for N4 smooth_mask = pe.Node(ImageMath(dimension=3, operation='G', secondary_arg='4'), name='smooth_mask') # Make a "soft" skull-stripped image apply_mask = pe.Node(ants.MultiplyImages( dimension=3, output_product_image="SkullStrippedRef.nii.gz"), name="apply_mask") fix_mask_header = pe.Node(CopyHeader(), name='fix_mask_header') fix_smooth_mask_header = pe.Node(CopyHeader(), name='fix_smooth_mask_header') workflow.connect([ (inputnode, truncate_values, [('in_file', 'in_file')]), (truncate_values, rescale_image, [('out_file', 'in_file')]), (inputnode, truncate_values_for_masking, [('in_file', 'in_file')]), (truncate_values_for_masking, initial_mask, [('out_file', 'in_file')]), (initial_mask, fill_holes, [('out_file', 'in_file')]), (fill_holes, dilate_mask, [('out_file', 'in_file')]), (dilate_mask, smooth_mask, [('out_file', 'in_file')]), (rescale_image, n4_correct, [('out_file', 'input_image')]), (rescale_image, fix_smooth_mask_header, [('out_file', 'hdr_file')]), (smooth_mask, fix_smooth_mask_header, [('out_file', 'in_file')]), (fix_smooth_mask_header, n4_correct, [('out_file', 'weight_image')]), (n4_correct, sharpen_image, [('output_image', 'in_file')]), (sharpen_image, outputnode, [('out_file', 'bias_corrected_file')]), (sharpen_image, apply_mask, [('out_file', 'first_input')]), (smooth_mask, apply_mask, [('out_file', 'second_input')]), (apply_mask, outputnode, [('output_product_image', 'skull_stripped_file')]), (fill_holes, fix_mask_header, [('out_file', 'in_file')]), (sharpen_image, fix_mask_header, [('out_file', 'hdr_file')]), (fix_mask_header, outputnode, [('out_file', 'mask_file')]) ]) return workflow
def init_skullstrip_bold_wf(name="skullstrip_bold_wf"): """ Apply skull-stripping to a BOLD image. It is intended to be used on an image that has previously been bias-corrected with :py:func:`~niworkflows.func.util.init_enhance_and_skullstrip_bold_wf` Workflow Graph .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_skullstrip_bold_wf wf = init_skullstrip_bold_wf() Inputs ------ in_file : str BOLD image (single volume) Outputs ------- skull_stripped_file : str the ``in_file`` after skull-stripping mask_file : str mask of the skull-stripped input file out_report : str reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "out_report"]), name="outputnode", ) skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass") skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass") combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") apply_mask = pe.Node(fsl.ApplyMask(), name="apply_mask") mask_reportlet = pe.Node(SimpleShowMaskRPT(), name="mask_reportlet") # fmt: off workflow.connect([ (inputnode, skullstrip_first_pass, [("in_file", "in_file")]), (skullstrip_first_pass, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, combine_masks, [("out_file", "operand_file") ]), (combine_masks, outputnode, [("out_file", "mask_file")]), # Masked file (inputnode, apply_mask, [("in_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), # Reportlet (inputnode, mask_reportlet, [("in_file", "background_file")]), (combine_masks, mask_reportlet, [("out_file", "mask_file")]), (mask_reportlet, outputnode, [("out_report", "out_report")]), ]) # fmt: on return workflow
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf', pre_mask=False, omp_nthreads=1): """ This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*boldref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *boldref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). omp_nthreads : int number of threads available to parallel nodes **Inputs** in_file BOLD image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). **Outputs** bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Dilate pre_mask pre_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=3.0, internal_datatype='char'), name='pre_mask_dilate') # Ensure mask's header matches reference's check_hdr = pe.Node(MatchHeader(), name='check_hdr', run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection( dimension=3, copy_header=True, bspline_fitting_distance=200), name='n4_correct', n_procs=1) # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') bet_dilate = pe.Node(fsl.DilateImage(operation='max', kernel_shape='sphere', kernel_size=6.0, internal_datatype='char'), name='skullstrip_first_dilate') bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask') # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype='NIFTI_GZ', # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args='-clfrac 0.2 -rbt 18.3 65.0 90.0', out_file="uni.nii.gz"), name='unifize') fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') if not pre_mask: bold_template = get_template('MNI152NLin2009cAsym', resolution=2, desc='fMRIPrep', suffix='boldref') brain_mask = get_template('MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask') # Initialize transforms with antsAI init_aff = pe.Node(AI(fixed_image=str(bold_template), fixed_image_mask=str(brain_mask), metric=('Mattes', 32, 'Regular', 0.2), transform=('Affine', 0.1), search_factor=(20, 0.12), principal_axes=False, convergence=(10, 1e-6, 10), verbose=True), name='init_aff', n_procs=omp_nthreads) # Registration().version may be None if parseversion(Registration().version or '0.0.0') > Version('2.2.0'): init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization norm = pe.Node(Registration(from_file=pkgr_fn( 'fmriprep.data', 'epi_atlasbased_brainmask.json')), name='norm', n_procs=omp_nthreads) norm.inputs.fixed_image = str(bold_template) map_brainmask = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True, input_image=str(brain_mask)), name='map_brainmask') workflow.connect([ (inputnode, init_aff, [('in_file', 'moving_image')]), (inputnode, map_brainmask, [('in_file', 'reference_image')]), (inputnode, norm, [('in_file', 'moving_image')]), (init_aff, norm, [('output_transform', 'initial_moving_transform') ]), (norm, map_brainmask, [('reverse_invert_flags', 'invert_transform_flags'), ('reverse_transforms', 'transforms')]), (map_brainmask, pre_dilate, [('output_image', 'in_file')]), ]) else: workflow.connect([ (inputnode, pre_dilate, [('pre_mask', 'in_file')]), ]) workflow.connect([ (inputnode, check_hdr, [('in_file', 'reference')]), (pre_dilate, check_hdr, [('out_file', 'in_file')]), (check_hdr, n4_correct, [('out_file', 'mask_image')]), (inputnode, n4_correct, [('in_file', 'input_image')]), (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]), (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]), (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]), (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]), (bet_dilate, bet_mask, [('out_file', 'mask_file')]), (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]), (bet_mask, unifize, [('out_file', 'in_file')]), (unifize, fixhdr_unifize, [('out_file', 'in_file')]), (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file') ]), (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]), (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (combine_masks, outputnode, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]), ]) return workflow
# define working dir work_dir = os.path.join(data_dir, subject_id, 'preprocessed/func') os.chdir(work_dir) rest_mni = os.path.join(work_dir, 'rest_preprocessed2mni.nii.gz') # define working dir for smoothing work_dir_smooth = os.path.join(os.getcwd(), 'smooth') if not os.path.exists(work_dir_smooth): os.makedirs(work_dir_smooth) os.chdir(work_dir_smooth) # Step#1 get a brain mask for func2mni image from nipype.interfaces import afni as afni automask = afni.Automask() automask.inputs.in_file = rest_mni automask.inputs.outputtype = "NIFTI_GZ" automask.run() # Step#2 smooth func2mni image from nipype.interfaces.fsl import maths smooth = maths.IsotropicSmooth() smooth.inputs.in_file = rest_mni smooth.inputs.fwhm = 6 smooth.run() ## Step#3 mask the smoothed image from nipype.interfaces.fsl import maths maskApp = maths.ApplyMask() maskApp.inputs.in_file = 'rest_preprocessed2mni_smooth.nii.gz'
def init_dwi_reference_wf(mem_gb, omp_nthreads, name="dwi_reference_wf"): """ Build a workflow that generates a reference :math:`b = 0` image from a DWI dataset. To generate the reference *b0*, this workflow takes in a DWI dataset, extracts the b0s, registers them to each other, rescales the signal intensity values, and calculates a median image. Then, the reference *b0* and its skull-stripped version are generated using a custom methodology adapted from *niworkflows*. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from dmriprep.workflows.dwi.util import init_dwi_reference_wf wf = init_dwi_reference_wf(mem_gb=0.01, omp_nthreads=1) wf.inputs.inputnode.b0_ixs=[0] Parameters ---------- omp_nthreads : int Maximum number of threads an individual process may use name : str Name of workflow (default: ``dwi_reference_wf``) Inputs ------ dwi_file dwi NIfTI file b0_ixs : list index of b0s in dwi NIfTI file Outputs ------- dwi_file Validated dwi NIfTI file raw_ref_image Reference image ref_image Contrast-enhanced reference image ref_image_brain Skull-stripped reference image dwi_mask Skull-stripping mask of reference image validation_report HTML reportlet indicating whether ``dwi_file`` had a valid affine See Also -------- * :py:func:`~dmriprep.workflows.dwi.util.init_enhance_and_skullstrip_wf` """ from niworkflows.interfaces.header import ValidateImage from ...interfaces.images import ExtractB0, RescaleB0 workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=["dwi_file", "b0_ixs"]), name="inputnode" ) outputnode = pe.Node( niu.IdentityInterface( fields=[ "dwi_file", "raw_ref_image", "ref_image", "ref_image_brain", "dwi_mask", "validation_report", ] ), name="outputnode", ) validate = pe.Node(ValidateImage(), name="validate", mem_gb=mem_gb) extract_b0 = pe.Node(ExtractB0(), name="extract_b0") reg_b0 = pe.Node(fsl.MCFLIRT(ref_vol=0, interpolation="sinc"), name="reg_b0") pre_mask = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="pre_mask") rescale_b0 = pe.Node(RescaleB0(), name="rescale_b0") enhance_and_skullstrip_dwi_wf = init_enhance_and_skullstrip_dwi_wf( omp_nthreads=omp_nthreads ) # fmt:off workflow.connect([ (inputnode, validate, [("dwi_file", "in_file")]), (validate, extract_b0, [("out_file", "in_file")]), (inputnode, extract_b0, [("b0_ixs", "b0_ixs")]), (extract_b0, reg_b0, [("out_file", "in_file")]), (reg_b0, pre_mask, [("out_file", "in_file")]), (reg_b0, rescale_b0, [("out_file", "in_file")]), (pre_mask, rescale_b0, [("out_file", "mask_file")]), (rescale_b0, enhance_and_skullstrip_dwi_wf, [("out_ref", "inputnode.in_file")]), (pre_mask, enhance_and_skullstrip_dwi_wf, [("out_file", "inputnode.pre_mask")]), (validate, outputnode, [("out_file", "dwi_file"), ("out_report", "validation_report")]), (rescale_b0, outputnode, [("out_ref", "raw_ref_image")]), (enhance_and_skullstrip_dwi_wf, outputnode, [ ("outputnode.bias_corrected_file", "ref_image"), ("outputnode.mask_file", "dwi_mask"), ("outputnode.skull_stripped_file", "ref_image_brain"), ]), ]) # fmt:on return workflow
def init_enhance_and_skullstrip_dwi_wf( name="enhance_and_skullstrip_dwi_wf", omp_nthreads=1 ): """ Enhance a *b0* reference and perform brain extraction. This workflow takes in a *b0* reference image and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Run ANTs' ``N4BiasFieldCorrection`` on the input dwi reference image and mask. 2. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 3. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 4. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 5. Calculate a final mask as the intersection of 2) and 4). 6. Apply final mask on the enhanced reference. Workflow Graph: .. workflow :: :graph2use: orig :simple_form: yes from dmriprep.workflows.dwi.util import init_enhance_and_skullstrip_dwi_wf wf = init_enhance_and_skullstrip_dwi_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- name : str Name of workflow (default: ``enhance_and_skullstrip_dwi_wf``) omp_nthreads : int number of threads available to parallel nodes Inputs ------ in_file The *b0* reference (single volume) pre_mask initial mask Outputs ------- bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ from niworkflows.interfaces.header import CopyXForm from niworkflows.interfaces.fixes import ( FixN4BiasFieldCorrection as N4BiasFieldCorrection, ) from niworkflows.interfaces.nibabel import ApplyMask workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode" ) outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file"] ), name="outputnode", ) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection( dimension=3, copy_header=True, bspline_fitting_distance=200 ), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node( fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass" ) bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 contrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run AFNI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node( afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass" ) fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") normalize = pe.Node(niu.Function(function=_normalize), name="normalize") # Compute masked brain apply_mask = pe.Node(ApplyMask(), name="apply_mask") # fmt:off workflow.connect([ (inputnode, n4_correct, [("in_file", "input_image"), ("pre_mask", "mask_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file")]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (combine_masks, apply_mask, [("out_file", "in_mask")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (n4_correct, normalize, [("output_image", "in_file")]), (normalize, apply_mask, [("out", "in_file")]), (normalize, outputnode, [("out", "bias_corrected_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), ] ) # fmt:on return workflow
def base_preproc(trim_realign=True,name='rsfmri_base_preproc'): inputnode = pe.Node( utility.IdentityInterface( fields=['fmri','t1','t1_mask']), name='inputspec') outputnode = pe.Node( utility.IdentityInterface( fields=['preprocessed','mask','mean','motion']), name='outputspec') # n_trim = pe.Node( # interface=nipypp.Trim(begin_index=3), # name='trim') n_realign = pe.Node( fsl.MCFLIRT(ref_vol=0, mean_vol=True, save_plots=True, save_rms=True, save_mats=True, stats_imgs=True,), name='realign') n_mean = pe.Node(fsl.MeanImage(),name='mean') n_mask = pe.Node( interface=afni.Automask( out_file='%s_mask.nii', # brain_file=Undefined, outputtype='NIFTI'), name='mask') n_mask_mean = pe.Node( interface=fsl.ImageMaths(op_string='-mul', suffix='_brain', output_type='NIFTI'), name='mask_mean') n_segment_epi = pe.Node( fsl.FAST( img_type=2, number_classes=3, probability_maps=True, segments=True), name='segment_epi') #linear with shear/scale in phase direction n_coregister_linear = pe.Node( afni.Allineate(epi=True, args='-float',cost='nmi', out_param_file='params.1D', out_matrix='coregister.mat'), name='coregister_linear') n_coregister_gray_linear = pe.Node( afni.Allineate(epi=True, args='-float',cost='nmi', out_param_file='params.1D', out_matrix='coregister.mat'), name='coregister_gray_linear') n_smooth = pe.Node( afni.BlurInMask(fwhm=5.0, out_file='%s_smooth', float_out=True), name = 'smooth') n_bandpass_smooth = pe.Node( afni.Bandpass(highpass=0.005, lowpass=999999, despike=True, blur=5.0, normalize=False, out_file='%s_filt.nii.gz'), name='bandpass_smooth') n_motion_filter = pe.Node( interface = nipypp.RegressOutMotion( motion_source='fsl', regressors_type='voxelwise_translation', global_signal = False, prefix = 'mfilt_', regressors_transform='original+bw_derivatives'), name = 'motion_filter') # spm_path = spm.Info().version()['path'] # epi_tpl = os.path.join(spm_path, 'templates/EPI.nii') """ n_normalize = pe.Node( spm.Normalize(template=epi_tpl, source_image_smoothing=8, template_image_smoothing=0, DCT_period_cutoff=25, affine_regularization_type='mni', jobtype='est'), name='normalize') """ n_motion_estimates = pe.Node( nipyutils.MotionEstimate(motion_source='fsl'), name='motion_estimates') w=pe.Workflow(name=name) if trim_realign: w.connect([ # (inputnode, n_trim, [('fmri','in_file')]), # (inputnode, n_trim, [('fmri','in_file_a'), # (('fmri',n_volumes,-1),'stop_idx')]), # (n_trim, n_realign, [('out_file','in_file')]), (inputnode, n_realign, [('fmri','in_file')]), # (inputnode, n_realign, [('fmri','in_file')]), (n_realign, n_motion_filter, [('out_file','in_file'), ('par_file','motion')]), (n_mask, n_motion_filter,[('out_file','mask')]), (n_motion_filter, n_bandpass_smooth, [('out_file','in_file')]), (n_realign, n_mask, [('out_file','in_file')]), (n_realign, n_mask_mean, [('mean_img', 'in_file')]), (n_realign, n_motion_estimates,[('par_file','motion')]), (n_mask, n_motion_estimates,[('out_file','mask')]), (n_realign, outputnode, [('par_file','motion')]), ]) else: w.connect([ (inputnode, n_bandpass_smooth, [('fmri','in_file')]), (inputnode, n_mean, [('fmri','in_file')]), (inputnode, n_mask, [('fmri','in_file')]), (n_mean, n_mask_mean, [('out_file', 'in_file')]), ]) w.connect([ (n_mask, n_mask_mean, [('out_file', 'in_file2')]), (n_mask, n_bandpass_smooth, [('out_file','mask')]), # (n_mask_mean, n_segment_epi, [('out_file','in_files')]), # (n_mask_mean, n_normalize, [('out_file','source')]), # (n_detrend, n_smooth, [('out_file','in_file')]), # (n_mask, n_smooth, [('out_file', 'mask')]), # (n_smooth, outputnode, [('out_file','preprocessed')]), (n_bandpass_smooth, outputnode, [('out_file','preprocessed')]), (n_mask, outputnode, [('out_file','mask')]), (n_mask_mean, outputnode, [('out_file','mean')]), ]) return w
def init_masking_wf(name='mask_wf', derivatives='/derivatives', num_threads=8): wf = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['inv2', 't1w', 't1map', 'manual_inside', 'manual_outside'], ), name='inputnode') n4 = pe.Node(ants.N4BiasFieldCorrection(copy_header=True, num_threads=num_threads), name='n4') wf.connect(inputnode, 'inv2', n4, 'input_image') bet = pe.Node(fsl.BET(mask=True, skull=True), name='bet') wf.connect(n4, 'output_image', bet, 'in_file') nighres_brain_extract = pe.Node(niu.Function(function=nighres_skullstrip, input_names=['inv2', 't1w', 't1map'], output_names=['brainmask']), name='nighres_brain_extract') wf.connect(n4, 'output_image', nighres_brain_extract, 'inv2') wf.connect(inputnode, 't1w', nighres_brain_extract, 't1w') wf.connect(inputnode, 't1map', nighres_brain_extract, 't1map') dura_masker = pe.Node(niu.Function(function=nighres_dura_masker, input_names=['inv2', 'inv2_mask'], output_names=['duramask']), name='dura_masker') wf.connect(n4, 'output_image', dura_masker, 'inv2') wf.connect(nighres_brain_extract, 'brainmask', dura_masker, 'inv2_mask') afni_mask = pe.Node(afni.Automask(outputtype='NIFTI_GZ', clfrac=0.5), name='afni_mask') wf.connect(bet, 'out_file', afni_mask, 'in_file') threshold_dura = pe.Node(fsl.Threshold(thresh=.8, args='-bin'), name='threshold_dura') wf.connect(dura_masker, 'duramask', threshold_dura, 'in_file') mask_t1map = pe.Node(fsl.ApplyMask(), name='mask_t1map') wf.connect(inputnode, 't1map', mask_t1map, 'in_file') wf.connect(afni_mask, 'out_file', mask_t1map, 'mask_file') t1w_masker = pe.Node(niu.Function(function=mask_t1w, input_names=['t1w', 'inv2', 't1w_mask', 'dura_mask', 'manual_inside', 'manual_outside'], output_names=['out_file', 'brain_mask']), name='t1w_masker') wf.connect(inputnode, 't1w', t1w_masker, 't1w') wf.connect(inputnode, 'manual_inside', t1w_masker, 'manual_inside') wf.connect(inputnode, 'manual_outside', t1w_masker, 'manual_outside') wf.connect(n4, 'output_image', t1w_masker, 'inv2') wf.connect(afni_mask, 'out_file', t1w_masker, 't1w_mask') wf.connect(threshold_dura, 'out_file', t1w_masker, 'dura_mask') ds_t1map = pe.Node(DerivativesDataSink(base_directory=derivatives, keep_dtype=False, out_path_base='masked_mp2rages', suffix='T1map', desc='masked', space='average'), name='ds_t1map') wf.connect(inputnode, 't1map', ds_t1map, 'source_file') wf.connect(mask_t1map, 'out_file', ds_t1map, 'in_file') ds_t1w = pe.Node(DerivativesDataSink(base_directory=derivatives, keep_dtype=False, out_path_base='masked_mp2rages', desc='masked', suffix='T1w'), name='ds_t1w') ds_dura = pe.Node(DerivativesDataSink(base_directory=derivatives, keep_dtype=False, out_path_base='masked_mp2rages', desc='dura', suffix='mask'), name='ds_dura') ds_brainmask = pe.Node(DerivativesDataSink(base_directory=derivatives, keep_dtype=False, out_path_base='masked_mp2rages', desc='brainmask', suffix='mask'), name='ds_brainmask') wf.connect(inputnode, 't1w', ds_t1w, 'source_file') wf.connect(t1w_masker, 'out_file', ds_t1w, 'in_file') wf.connect(inputnode, 't1w', ds_dura, 'source_file') wf.connect(dura_masker, 'duramask', ds_dura, 'in_file') wf.connect(inputnode, 't1w', ds_brainmask, 'source_file') wf.connect(t1w_masker, 'brain_mask', ds_brainmask, 'in_file') return wf
def init_enhance_and_skullstrip_bold_wf( brainmask_thresh=0.5, name="enhance_and_skullstrip_bold_wf", omp_nthreads=1, pre_mask=False, ): """ Enhance and run brain extraction on a BOLD EPI image. This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)` :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image averaging non-steady-state timepoints), and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s :abbr:`EPI (echo-planar imaging)` -*boldref* template, which is in MNI space. The tentative mask is obtained by resampling the MNI template's brainmask into *boldref*-space. 2. Binary dilation of the tentative mask with a sphere of 3mm diameter. 3. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`BOLD (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology dilation of one iteration and a sphere of 6mm as structuring element. 5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image with the latest mask calculated in 3), then use AFNI's ``3dUnifize`` to *standardize* the T2* contrast distribution. 6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast enhancement of 4). 7. Calculate a final mask as the intersection of 4) and 6). 8. Apply final mask on the enhanced reference. Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. Workflow graph .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1) .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 Parameters ---------- brainmask_thresh: :obj:`float` Lower threshold for the probabilistic brainmask to obtain the final binary mask (default: 0.5). name : str Name of workflow (default: ``enhance_and_skullstrip_bold_wf``) omp_nthreads : int number of threads available to parallel nodes pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). Inputs ------ in_file : str BOLD image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). Outputs ------- bias_corrected_file : str the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file : str the ``bias_corrected_file`` after skull-stripping mask_file : str mask of the skull-stripped input file out_report : str reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode") outputnode = pe.Node( niu.IdentityInterface( fields=["mask_file", "skull_stripped_file", "bias_corrected_file" ]), name="outputnode", ) # Dilate pre_mask pre_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=3.0, internal_datatype="char", ), name="pre_mask_dilate", ) # Ensure mask's header matches reference's check_hdr = pe.Node(MatchHeader(), name="check_hdr", run_without_submitting=True) # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node( N4BiasFieldCorrection(dimension=3, copy_header=True, bspline_fitting_distance=200), shrink_factor=2, name="n4_correct", n_procs=1, ) n4_correct.inputs.rescale_intensities = True # Create a generous BET mask out of the bias-corrected EPI skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass") bet_dilate = pe.Node( fsl.DilateImage( operation="max", kernel_shape="sphere", kernel_size=6.0, internal_datatype="char", ), name="skullstrip_first_dilate", ) bet_mask = pe.Node(fsl.ApplyMask(), name="skullstrip_first_mask") # Use AFNI's unifize for T2 constrast & fix header unifize = pe.Node( afni.Unifize( t2=True, outputtype="NIFTI_GZ", # Default -clfrac is 0.1, 0.4 was too conservative # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation) args="-clfrac 0.2 -rbt 18.3 65.0 90.0", out_file="uni.nii.gz", ), name="unifize", ) fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1) # Run ANFI's 3dAutomask to extract a refined brain mask skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass") fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1) # Take intersection of both masks combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks") # Compute masked brain apply_mask = pe.Node(fsl.ApplyMask(), name="apply_mask") if not pre_mask: from ..interfaces.nibabel import Binarize bold_template = get_template("MNI152NLin2009cAsym", resolution=2, desc="fMRIPrep", suffix="boldref") brain_mask = get_template("MNI152NLin2009cAsym", resolution=2, desc="brain", suffix="mask") # Initialize transforms with antsAI init_aff = pe.Node( AI( fixed_image=str(bold_template), fixed_image_mask=str(brain_mask), metric=("Mattes", 32, "Regular", 0.2), transform=("Affine", 0.1), search_factor=(20, 0.12), principal_axes=False, convergence=(10, 1e-6, 10), verbose=True, ), name="init_aff", n_procs=omp_nthreads, ) # Registration().version may be None if parseversion(Registration().version or "0.0.0") > Version("2.2.0"): init_aff.inputs.search_grid = (40, (0, 40, 40)) # Set up spatial normalization norm = pe.Node( Registration(from_file=pkgr_fn("niworkflows.data", "epi_atlasbased_brainmask.json")), name="norm", n_procs=omp_nthreads, ) norm.inputs.fixed_image = str(bold_template) map_brainmask = pe.Node( ApplyTransforms( interpolation="BSpline", float=True, # Use the higher resolution and probseg for numerical stability in rounding input_image=str( get_template( "MNI152NLin2009cAsym", resolution=1, label="brain", suffix="probseg", )), ), name="map_brainmask", ) binarize_mask = pe.Node(Binarize(thresh_low=brainmask_thresh), name="binarize_mask") # fmt: off workflow.connect([ (inputnode, init_aff, [("in_file", "moving_image")]), (inputnode, map_brainmask, [("in_file", "reference_image")]), (inputnode, norm, [("in_file", "moving_image")]), (init_aff, norm, [("output_transform", "initial_moving_transform") ]), (norm, map_brainmask, [ ("reverse_invert_flags", "invert_transform_flags"), ("reverse_transforms", "transforms"), ]), (map_brainmask, binarize_mask, [("output_image", "in_file")]), (binarize_mask, pre_dilate, [("out_mask", "in_file")]), ]) # fmt: on else: # fmt: off workflow.connect([ (inputnode, pre_dilate, [("pre_mask", "in_file")]), ]) # fmt: on # fmt: off workflow.connect([ (inputnode, check_hdr, [("in_file", "reference")]), (pre_dilate, check_hdr, [("out_file", "in_file")]), (check_hdr, n4_correct, [("out_file", "mask_image")]), (inputnode, n4_correct, [("in_file", "input_image")]), (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]), (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]), (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]), (skullstrip_first_pass, bet_dilate, [("mask_file", "in_file")]), (bet_dilate, bet_mask, [("out_file", "mask_file")]), (skullstrip_first_pass, bet_mask, [("out_file", "in_file")]), (bet_mask, unifize, [("out_file", "in_file")]), (unifize, fixhdr_unifize, [("out_file", "in_file")]), (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]), (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]), (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file") ]), (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]), (fixhdr_unifize, apply_mask, [("out_file", "in_file")]), (combine_masks, apply_mask, [("out_file", "mask_file")]), (combine_masks, outputnode, [("out_file", "mask_file")]), (apply_mask, outputnode, [("out_file", "skull_stripped_file")]), (n4_correct, outputnode, [("output_image", "bias_corrected_file")]), ]) # fmt: on return workflow
datasource = pe.Node(nio.DataGrabber( infields=['subject_id'], outfields=['EPI_bandpassed', "EPI_full_spectrum"]), name="datasource") datasource.inputs.base_directory = "/scr/kalifornien1/mindwandering/results/" datasource.inputs.template = '%s/smri/warped_image/fwhm_6.0/*_afni_%s_wtsimt.nii.gz' datasource.inputs.template_args['EPI_bandpassed'] = [[ 'subject_id', "bandpassed" ]] datasource.inputs.template_args['EPI_full_spectrum'] = [[ 'subject_id', "fullspectrum" ]] datasource.inputs.sort_filelist = True wf.connect(subjects_infosource, "subject_id", datasource, "subject_id") epi_mask = pe.Node(interface=afni.Automask(), name="epi_mask") wf.connect(datasource, "EPI_bandpassed", epi_mask, "in_file") wf.connect(epi_mask, 'out_file', ds, "functional_mask") reho = create_reho() reho.inputs.inputspec.cluster_size = 27 reho.inputs.inputspec.rest_mask = "/scr/kalifornien1/mindwandering/workingdir/group_analysis/restrict_to_grey/group_mask_masked.nii.gz" #wf.connect(epi_mask, "out_file", reho, "inputspec.rest_mask") #reho.inputs.inputspec.rest_mask = "/SCR/MNI152_T1_2mm_ones.nii.gz" wf.connect(datasource, "EPI_bandpassed", reho, "inputspec.rest_res_filt") wf.connect(reho, 'outputspec.z_score', ds, "reho_z") alff = create_alff() alff.inputs.hp_input.hp = 0.01 alff.inputs.lp_input.lp = 0.1 alff.inputs.inputspec.rest_mask = "/scr/kalifornien1/mindwandering/workingdir/group_analysis/restrict_to_grey/group_mask_masked.nii.gz"
def nonlinear_alignment_iteration(iternum=0, gradient_step=0.2): """ Takes a template image and a set of input images, does a linear alignment to the template and updates it with the inverse of the average affine transform to the new template Returns a workflow """ iteration_wf = Workflow(name="nl_iterative_alignment_%03d" % iternum) input_node_fields = ["image_paths", "template_image", "iteration_num"] inputnode = pe.Node( niu.IdentityInterface(fields=input_node_fields), name='inputnode') inputnode.inputs.iteration_num = iternum outputnode = pe.Node( niu.IdentityInterface(fields=["registered_image_paths", "affine_transforms", "warp_transforms", "composite_transforms", "updated_template"]), name='outputnode') ants_settings = pkgrf("qsiprep", "data/intramodal_nonlinear.json") reg = ants.Registration(from_file=ants_settings) iter_reg = pe.MapNode( reg, name="nlreg_%03d" % iternum, iterfield=["moving_image"]) # Average the images averaged_images = pe.Node( ants.AverageImages(normalize=True, dimension=3), name="averaged_images") # Make an automask mask_average = pe.Node(afni.Automask(), name='mask_average') # Shape update to template: # Average the affines so that the inverse can be applied to the template affines_to_list = pe.Node(niu.Merge(1), name="affines_to_list") warps_to_list = pe.Node(niu.Merge(1), name="warps_to_list") avg_affines = pe.Node( ants.AverageAffineTransform(dimension=3, output_affine_transform="AveragedAffines.mat"), name="avg_affines") # Average the warps: average_warps = pe.Node( ants.AverageImages(dimension=3, normalize=False), name="average_warps") # Scale by the gradient step scale_warp = pe.Node( ants.MultiplyImages(dimension=3, second_input=gradient_step, output_product_image="scaled_warp.nii.gz"), name="scale_warp") # Align the warps to the template image align_warp = pe.Node( ants.ApplyTransforms( input_image_type=1, invert_transform_flags=[True]), name="align_warp") # transform the template for the shape update shape_update_template = pe.Node( ants.ApplyTransforms(interpolation="LanczosWindowedSinc", invert_transform_flags=[True, False, False, False, False]), name="shape_update_template") shape_update_merge = pe.Node(niu.Merge(5), name="shape_update_merge") # Run the images through antsRegistration def get_first(input_pairs): return [input_pair[0] for input_pair in input_pairs] def get_second(input_pairs): return [input_pair[1] for input_pair in input_pairs] iteration_wf.connect([ (inputnode, iter_reg, [ ('image_paths', 'moving_image'), ('template_image', 'fixed_image')]), (iter_reg, affines_to_list, [(('forward_transforms', get_first), 'in1')]), (affines_to_list, avg_affines, [('out', 'transforms')]), (iter_reg, warps_to_list, [(('forward_transforms', get_second), 'in1')]), (iter_reg, averaged_images, [('warped_image', 'images')]), # Average the warps, scale them, and transform to be aligned with the template (warps_to_list, average_warps, [('out', 'images')]), (average_warps, scale_warp, [('output_average_image', 'first_input')]), (scale_warp, align_warp, [ ('output_product_image', 'input_image')]), (avg_affines, align_warp, [('affine_transform', 'transforms')]), (inputnode, align_warp, [('template_image', 'reference_image')]), (avg_affines, shape_update_merge, [('affine_transform', 'in1')]), (align_warp, shape_update_merge, [ ('output_image', 'in2'), ('output_image', 'in3'), ('output_image', 'in4'), ('output_image', 'in5')]), (shape_update_merge, shape_update_template, [('out', 'transforms')]), (averaged_images, shape_update_template, [ ('output_average_image', 'input_image'), ('output_average_image', 'reference_image')]), (shape_update_template, outputnode, [('output_image', 'updated_template')]), (iter_reg, outputnode, [ ('forward_transforms', 'affine_transforms'), ('warped_image', 'registered_image_paths')]) ]) return iteration_wf
def prepro_func(i): try: subj = i for s in (['session2']): # Define input files: 2xfMRI + 1xMPRAGE func1 = data_path + subj + '/Functional_scans/' + s[:-2] + s[ -1] + '_a/epi.nii.gz' #choose this for patients func2 = data_path + subj + '/Functional_scans/' + s[:-2] + s[ -1] + '_b/epi.nii.gz' #choose this for patients #anat = glob.glob(anat_path + subj +'/'+ s + '/anat/reorient/anat_*.nii.gz') #choose this for session 1 lesion_mask_file = anat_path + subj + '/session1/anat/reorient/lesion_seg.nii.gz' old_lesion_mask_file = glob.glob( anat_path + subj + '/session1/anat/reorient/old_lesion_seg.nii.gz' ) #choose this for ones with no old lesion #old_lesion_mask_file = anat_path + subj +'/session1/anat/reorient/old_lesion_seg.nii.gz' #choose this for ones with old lesion anat = glob.glob(anat_path + subj + '/' + s + '/anat/anat2hr/anat_*.nii.gz' ) #choose this for sessions 2 and 3 anat_CSF = glob.glob( anat_path + subj + '/session1/seg_anat/segmentation/anat_*_pve_0.nii.gz' ) # don't change, same for all sessions anat_WM = glob.glob( anat_path + subj + '/session1/seg_anat/segmentation/anat_*_pve_2.nii.gz' ) # don't change, same for all sessions anat_GM = glob.glob( anat_path + subj + '/session1/seg_anat/segmentation/anat_*_pve_1.nii.gz' ) # don't change, same for all sessions anat2MNI_fieldwarp = glob.glob( anat_path + subj + '/session1/anat/nonlinear_reg/anat_*_fieldwarp.nii.gz' ) # don't change, same for all sessions if not os.path.isdir(data_path + subj + '/' + s): # No data exists continue if not os.path.isfile(func1): print '1. functional file ' + func1 + ' not found. Skipping!' continue if not os.path.isfile(func2): print '2. functional file ' + func2 + ' not found. Skipping!' continue if not anat: print 'Preprocessed anatomical file not found. Skipping!' continue if len(anat) > 1: print 'WARNING: found multiple files of preprocessed anatomical image!' continue anat = anat[0] if not anat2MNI_fieldwarp: print 'Anatomical registration to MNI152-space field file not found. Skipping!' continue if len(anat2MNI_fieldwarp) > 1: print 'WARNING: found multiple files of anat2MNI fieldwarp!' continue anat2MNI_fieldwarp = anat2MNI_fieldwarp[0] if not anat_CSF: anat_CSF = glob.glob( anat_path + subj + '/' + s + '/seg_anat/segmentation/anat_*_pve_0.nii.gz') if not anat_CSF: print 'Anatomical segmentation CSF file not found. Skipping!' continue if len(anat_CSF) > 1: print 'WARNING: found multiple files of anatomical CSF file!' continue anat_CSF = anat_CSF[0] if not anat_WM: anat_WM = glob.glob( anat_path + subj + '/' + s + '/seg_anat/segmentation/anat_*_pve_2.nii.gz') if not anat_WM: print 'Anatomical segmentation WM file not found. Skipping!' continue if len(anat_WM) > 1: print 'WARNING: found multiple files of anatomical WM file!' continue anat_WM = anat_WM[0] if not anat_GM: anat_GM = glob.glob( anat_path + subj + '/' + s + '/seg_anat/segmentation/anat_*_pve_1.nii.gz') if not anat_GM: print 'Anatomical segmentation GM file not found. Skipping!' continue if len(anat_GM) > 1: print 'WARNING: found multiple files of anatomical GM file!' continue anat_GM = anat_GM[0] if not os.path.isdir(results_path + subj): os.mkdir(results_path + subj) if not os.path.isdir(results_path + subj + '/' + s): os.mkdir(results_path + subj + '/' + s) for data in acquisitions: os.chdir(results_path + subj + '/' + s) print "Currently processing subject: " + subj + '/' + s + ' ' + data #Initialize workflows workflow = pe.Workflow(name=data) workflow.base_dir = '.' inputnode = pe.Node( interface=util.IdentityInterface(fields=['source_file']), name='inputspec') outputnode = pe.Node( interface=util.IdentityInterface(fields=['result_func']), name='outputspec') if data == 'func1': inputnode.inputs.source_file = func1 else: inputnode.inputs.source_file = func2 # Remove n_dummies first volumes trim = pe.Node(interface=Trim(begin_index=n_dummies), name='trim') workflow.connect(inputnode, 'source_file', trim, 'in_file') # Motion correction + slice timing correction realign4d = pe.Node(interface=SpaceTimeRealigner(), name='realign4d') #realign4d.inputs.ignore_exception=True realign4d.inputs.slice_times = 'asc_alt_siemens' realign4d.inputs.slice_info = 2 # horizontal slices realign4d.inputs.tr = mytr # TR in seconds workflow.connect(trim, 'out_file', realign4d, 'in_file') # Reorient #deoblique = pe.Node(interface=afni.Warp(deoblique=True, outputtype='NIFTI_GZ'), name='deoblique') #leave out if you don't need this #workflow.connect(realign4d, 'out_file', deoblique, 'in_file') reorient = pe.Node( interface=fsl.Reorient2Std(output_type='NIFTI_GZ'), name='reorient') workflow.connect(realign4d, 'out_file', reorient, 'in_file') # AFNI skullstrip and mean image skullstrip tstat1 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"), name='tstat1') automask = pe.Node(interface=afni.Automask( dilate=1, outputtype="NIFTI_GZ"), name='automask') skullstrip = pe.Node(interface=afni.Calc( expr='a*b', outputtype="NIFTI_GZ"), name='skullstrip') tstat2 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"), name='tstat2') workflow.connect(reorient, 'out_file', tstat1, 'in_file') workflow.connect(tstat1, 'out_file', automask, 'in_file') workflow.connect(automask, 'out_file', skullstrip, 'in_file_b') workflow.connect(reorient, 'out_file', skullstrip, 'in_file_a') workflow.connect(skullstrip, 'out_file', tstat2, 'in_file') # Register to anatomical space #can be changed #mean2anat = pe.Node(fsl.FLIRT(bins=40, cost='normmi', dof=7, interp='nearestneighbour', searchr_x=[-180,180], searchr_y=[-180,180], searchr_z=[-180,180]), name='mean2anat') mean2anat = pe.Node(fsl.FLIRT(bins=40, cost='normmi', dof=7, interp='nearestneighbour'), name='mean2anat') #mean2anat = pe.Node(fsl.FLIRT(no_search=True), name='mean2anat') mean2anat.inputs.reference = anat workflow.connect(tstat2, 'out_file', mean2anat, 'in_file') # Transform mean functional image warpmean = pe.Node(interface=fsl.ApplyWarp(), name='warpmean') warpmean.inputs.ref_file = MNI_brain warpmean.inputs.field_file = anat2MNI_fieldwarp workflow.connect(mean2anat, 'out_matrix_file', warpmean, 'premat') workflow.connect(tstat2, 'out_file', warpmean, 'in_file') # ----- inversion matrix and eroded brain mask for regression ----- # create inverse matrix from mean2anat registration invmat = pe.Node(fsl.ConvertXFM(), name='invmat') invmat.inputs.invert_xfm = True workflow.connect(mean2anat, 'out_matrix_file', invmat, 'in_file') # erode functional brain mask erode_brain = pe.Node(fsl.ImageMaths(), name='erode_brain') erode_brain.inputs.args = '-kernel boxv 3 -ero' workflow.connect(automask, 'out_file', erode_brain, 'in_file') # register GM mask to functional image space, this is done for quality control reg_GM = pe.Node(fsl.preprocess.ApplyXFM(), name='register_GM') reg_GM.inputs.apply_xfm = True reg_GM.inputs.in_file = anat_GM workflow.connect(tstat2, 'out_file', reg_GM, 'reference') workflow.connect(invmat, 'out_file', reg_GM, 'in_matrix_file') # --------- motion regression and censor signals ------------------ # normalize motion parameters norm_motion = pe.Node(interface=Function( input_names=['in_file'], output_names=['out_file'], function=normalize_motion_data), name='normalize_motion') workflow.connect(realign4d, 'par_file', norm_motion, 'in_file') # create censor file, for censoring motion get_censor = pe.Node(afni.OneDToolPy(), name='motion_censors') get_censor.inputs.set_nruns = 1 get_censor.inputs.censor_motion = (censor_thr, 'motion') get_censor.inputs.show_censor_count = True if overwrite: get_censor.inputs.args = '-overwrite' workflow.connect(norm_motion, 'out_file', get_censor, 'in_file') # compute motion parameter derivatives (for use in regression) deriv_motion = pe.Node(afni.OneDToolPy(), name='deriv_motion') deriv_motion.inputs.set_nruns = 1 deriv_motion.inputs.derivative = True if overwrite: deriv_motion.inputs.args = '-overwrite' deriv_motion.inputs.out_file = 'motion_derivatives.txt' workflow.connect(norm_motion, 'out_file', deriv_motion, 'in_file') # scale motion parameters and get quadratures quadr_motion = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_motion') quadr_motion.inputs.multicol = True workflow.connect(norm_motion, 'out_file', quadr_motion, 'in_file') # scale motion derivatives and get quadratures quadr_motion_deriv = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_motion_deriv') quadr_motion_deriv.inputs.multicol = True workflow.connect(deriv_motion, 'out_file', quadr_motion_deriv, 'in_file') # -------- CSF regression signals --------------- # threshold and erode CSF mask erode_CSF_mask = pe.Node(fsl.ImageMaths(), name='erode_CSF_mask') erode_CSF_mask.inputs.args = '-thr 0.5 -kernel boxv 3 -ero' erode_CSF_mask.inputs.in_file = anat_CSF # register CSF mask to functional image space reg_CSF_mask = pe.Node(fsl.preprocess.ApplyXFM(), name='register_CSF_mask') reg_CSF_mask.inputs.apply_xfm = True workflow.connect(tstat2, 'out_file', reg_CSF_mask, 'reference') workflow.connect(invmat, 'out_file', reg_CSF_mask, 'in_matrix_file') # inverse lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask inverse_lesion_mask = pe.Node(fsl.ImageMaths(), name='inverse_lesion_mask') inverse_lesion_mask.inputs.args = '-add 1 -rem 2' inverse_lesion_mask.inputs.in_file = lesion_mask_file rem_lesion = pe.Node(fsl.ImageMaths(), name='remove_lesion') workflow.connect(erode_CSF_mask, 'out_file', rem_lesion, 'in_file') workflow.connect(inverse_lesion_mask, 'out_file', rem_lesion, 'mask_file') ''' # Transform lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space warp_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_lesion') warp_lesion.inputs.ref_file = MNI_brain warp_lesion.inputs.field_file = anat2MNI_fieldwarp warp_lesion.inputs.in_file = lesion_mask_file warp_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/lesion_seg_warp.nii.gz' warp_lesion.run() ''' # inverse old lesion mask and remove it from CSF mask #remove this if you don't have a lesion mask if old_lesion_mask_file: inverse_old_lesion_mask = pe.Node( fsl.ImageMaths(), name='inverse_old_lesion_mask') inverse_old_lesion_mask.inputs.args = '-add 1 -rem 3' #inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file[0] inverse_old_lesion_mask.inputs.in_file = old_lesion_mask_file rem_old_lesion = pe.Node(fsl.ImageMaths(), name='remove_old_lesion') workflow.connect(rem_lesion, 'out_file', rem_old_lesion, 'in_file') workflow.connect(inverse_old_lesion_mask, 'out_file', rem_old_lesion, 'mask_file') workflow.connect(rem_old_lesion, 'out_file', reg_CSF_mask, 'in_file') ''' # Transform old lesion mask to MNI152 space #remove if lesion masks are already in MNI152 space warp_old_lesion = pe.Node(interface=fsl.ApplyWarp(), name='warp_old_lesion') warp_old_lesion.inputs.ref_file = MNI_brain warp_old_lesion.inputs.field_file = anat2MNI_fieldwarp warp_old_lesion.inputs.in_file = old_lesion_mask_file warp_old_lesion.inputs.out_file = anat_path + subj +'/'+ s + '/anat/nonlinear_reg/old_lesion_seg_warp.nii.gz' warp_old_lesion.run() ''' else: workflow.connect(rem_lesion, 'out_file', reg_CSF_mask, 'in_file') # threshold CSF mask and intersect with functional brain mask thr_CSF_mask = pe.Node(fsl.ImageMaths(), name='threshold_CSF_mask') thr_CSF_mask.inputs.args = '-thr 0.25' workflow.connect(reg_CSF_mask, 'out_file', thr_CSF_mask, 'in_file') workflow.connect(erode_brain, 'out_file', thr_CSF_mask, 'mask_file') # extract CSF values get_CSF_noise = pe.Node(fsl.ImageMeants(), name='get_CSF_noise') workflow.connect(skullstrip, 'out_file', get_CSF_noise, 'in_file') workflow.connect(thr_CSF_mask, 'out_file', get_CSF_noise, 'mask') # compute CSF noise derivatives deriv_CSF = pe.Node(afni.OneDToolPy(), name='deriv_CSF') deriv_CSF.inputs.set_nruns = 1 deriv_CSF.inputs.derivative = True if overwrite: deriv_CSF.inputs.args = '-overwrite' deriv_CSF.inputs.out_file = 'CSF_derivatives.txt' workflow.connect(get_CSF_noise, 'out_file', deriv_CSF, 'in_file') # scale SCF noise and get quadratures quadr_CSF = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_CSF') quadr_CSF.inputs.multicol = False workflow.connect(get_CSF_noise, 'out_file', quadr_CSF, 'in_file') # scale CSF noise derivatives and get quadratures quadr_CSF_deriv = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_CSF_deriv') quadr_CSF_deriv.inputs.multicol = False workflow.connect(deriv_CSF, 'out_file', quadr_CSF_deriv, 'in_file') # -------- WM regression signals ----------------- # threshold and erode WM mask erode_WM_mask = pe.Node(fsl.ImageMaths(), name='erode_WM_mask') erode_WM_mask.inputs.args = '-thr 0.5 -kernel boxv 7 -ero' erode_WM_mask.inputs.in_file = anat_WM # registrer WM mask to functional image space reg_WM_mask = pe.Node(fsl.preprocess.ApplyXFM(), name='register_WM_mask') reg_WM_mask.inputs.apply_xfm = True workflow.connect(tstat2, 'out_file', reg_WM_mask, 'reference') workflow.connect(invmat, 'out_file', reg_WM_mask, 'in_matrix_file') workflow.connect(erode_WM_mask, 'out_file', reg_WM_mask, 'in_file') # create inverse nonlinear registration MNI2anat invwarp = pe.Node(fsl.InvWarp(output_type='NIFTI_GZ'), name='invwarp') invwarp.inputs.warp = anat2MNI_fieldwarp invwarp.inputs.reference = anat # transform ventricle mask to functional space reg_ventricles = pe.Node(fsl.ApplyWarp(), name='register_ventricle_mask') reg_ventricles.inputs.in_file = ventricle_mask workflow.connect(tstat2, 'out_file', reg_ventricles, 'ref_file') workflow.connect(invwarp, 'inverse_warp', reg_ventricles, 'field_file') workflow.connect(invmat, 'out_file', reg_ventricles, 'postmat') # threshold WM mask and intersect with functional brain mask thr_WM_mask = pe.Node(fsl.ImageMaths(), name='threshold_WM_mask') thr_WM_mask.inputs.args = '-thr 0.25' workflow.connect(reg_WM_mask, 'out_file', thr_WM_mask, 'in_file') workflow.connect(erode_brain, 'out_file', thr_WM_mask, 'mask_file') # remove ventricles from WM mask exclude_ventricles = pe.Node(fsl.ImageMaths(), name='exclude_ventricles') workflow.connect(thr_WM_mask, 'out_file', exclude_ventricles, 'in_file') workflow.connect(reg_ventricles, 'out_file', exclude_ventricles, 'mask_file') # check that WM is collected from both hemispheres check_WM_bilat = pe.Node(interface=Function( input_names=['in_file'], output_names=['errors'], function=check_bilateralism), name='check_WM_bilateralism') workflow.connect(exclude_ventricles, 'out_file', check_WM_bilat, 'in_file') # extract WM values get_WM_noise = pe.Node(fsl.ImageMeants(), name='get_WM_noise') workflow.connect(skullstrip, 'out_file', get_WM_noise, 'in_file') workflow.connect(exclude_ventricles, 'out_file', get_WM_noise, 'mask') # compute WM noise derivatives deriv_WM = pe.Node(afni.OneDToolPy(), name='deriv_WM') deriv_WM.inputs.set_nruns = 1 deriv_WM.inputs.derivative = True if overwrite: deriv_WM.inputs.args = '-overwrite' deriv_WM.inputs.out_file = 'WM_derivatives.txt' workflow.connect(get_WM_noise, 'out_file', deriv_WM, 'in_file') # scale WM noise and get quadratures quadr_WM = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_WM') quadr_WM.inputs.multicol = False workflow.connect(get_WM_noise, 'out_file', quadr_WM, 'in_file') # scale WM noise derivatives and get quadratures quadr_WM_deriv = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_WM_deriv') quadr_WM_deriv.inputs.multicol = False workflow.connect(deriv_WM, 'out_file', quadr_WM_deriv, 'in_file') # ---------- global regression signals ---------------- if global_reg: # register anatomical whole brain mask to functional image space reg_glob_mask = pe.Node(fsl.preprocess.ApplyXFM(), name='register_global_mask') reg_glob_mask.inputs.apply_xfm = True reg_glob_mask.inputs.in_file = anat workflow.connect(tstat2, 'out_file', reg_glob_mask, 'reference') workflow.connect(invmat, 'out_file', reg_glob_mask, 'in_matrix_file') # threshold anatomical brain mask and intersect with functional brain mask thr_glob_mask = pe.Node(fsl.ImageMaths(), name='threshold_global_mask') thr_glob_mask.inputs.args = '-thr -0.1' workflow.connect(reg_glob_mask, 'out_file', thr_glob_mask, 'in_file') workflow.connect(erode_brain, 'out_file', thr_glob_mask, 'mask_file') # extract global signal values get_glob_noise = pe.Node(fsl.ImageMeants(), name='get_global_noise') workflow.connect(skullstrip, 'out_file', get_glob_noise, 'in_file') workflow.connect(thr_glob_mask, 'out_file', get_glob_noise, 'mask') # compute global noise derivative deriv_glob = pe.Node(afni.OneDToolPy(), name='deriv_global') deriv_glob.inputs.set_nruns = 1 deriv_glob.inputs.derivative = True if overwrite: deriv_glob.inputs.args = '-overwrite' deriv_glob.inputs.out_file = 'global_derivatives.txt' workflow.connect(get_glob_noise, 'out_file', deriv_glob, 'in_file') # scale global noise and get quadratures quadr_glob = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_glob') quadr_glob.inputs.multicol = False workflow.connect(get_glob_noise, 'out_file', quadr_glob, 'in_file') # scale global noise derivatives and get quadratures quadr_glob_deriv = pe.Node(interface=Function( input_names=['in_file', 'multicol'], output_names=['out_file', 'out_quadr_file'], function=scale_and_quadrature), name='quadr_glob_deriv') quadr_glob_deriv.inputs.multicol = False workflow.connect(deriv_glob, 'out_file', quadr_glob_deriv, 'in_file') # ---------- regression matrix ---------- # create bandpass regressors, can not be easily implemented to workflow get_bandpass = pe.Node(interface=Function( input_names=['minf', 'maxf', 'example_file', 'tr'], output_names=['out_tuple'], function=bandpass), name='bandpass_regressors') get_bandpass.inputs.minf = myminf get_bandpass.inputs.maxf = mymaxf get_bandpass.inputs.tr = mytr workflow.connect(norm_motion, 'out_file', get_bandpass, 'example_file') # concatenate regressor time series cat_reg_name = 'cat_regressors' if global_reg: cat_reg_name = cat_reg_name + '_global' cat_reg = pe.Node(interface=Function( input_names=[ 'mot', 'motd', 'motq', 'motdq', 'CSF', 'CSFd', 'CSFq', 'CSFdq', 'WM', 'WMd', 'WMq', 'WMdq', 'include_global', 'glob', 'globd', 'globq', 'globdq' ], output_names=['reg_file_args'], function=concatenate_regressors), name=cat_reg_name) cat_reg.inputs.include_global = global_reg workflow.connect(quadr_motion, 'out_file', cat_reg, 'mot') workflow.connect(quadr_motion_deriv, 'out_file', cat_reg, 'motd') workflow.connect(quadr_motion, 'out_quadr_file', cat_reg, 'motq') workflow.connect(quadr_motion_deriv, 'out_quadr_file', cat_reg, 'motdq') workflow.connect(quadr_CSF, 'out_file', cat_reg, 'CSF') workflow.connect(quadr_CSF_deriv, 'out_file', cat_reg, 'CSFd') workflow.connect(quadr_CSF, 'out_quadr_file', cat_reg, 'CSFq') workflow.connect(quadr_CSF_deriv, 'out_quadr_file', cat_reg, 'CSFdq') workflow.connect(quadr_WM, 'out_file', cat_reg, 'WM') workflow.connect(quadr_WM_deriv, 'out_file', cat_reg, 'WMd') workflow.connect(quadr_WM, 'out_quadr_file', cat_reg, 'WMq') workflow.connect(quadr_WM_deriv, 'out_quadr_file', cat_reg, 'WMdq') if global_reg: workflow.connect(quadr_glob, 'out_file', cat_reg, 'glob') workflow.connect(quadr_glob_deriv, 'out_file', cat_reg, 'globd') workflow.connect(quadr_glob, 'out_quadr_file', cat_reg, 'globq') workflow.connect(quadr_glob_deriv, 'out_quadr_file', cat_reg, 'globdq') else: cat_reg.inputs.glob = None cat_reg.inputs.globd = None cat_reg.inputs.globq = None cat_reg.inputs.globdq = None # create regression matrix deconvolve_name = 'deconvolve' if global_reg: deconvolve_name = deconvolve_name + '_global' deconvolve = pe.Node(afni.Deconvolve(), name=deconvolve_name) deconvolve.inputs.polort = 2 # contstant, linear and quadratic background signals removed deconvolve.inputs.fout = True deconvolve.inputs.tout = True deconvolve.inputs.x1D_stop = True deconvolve.inputs.force_TR = mytr workflow.connect(cat_reg, 'reg_file_args', deconvolve, 'args') workflow.connect(get_bandpass, 'out_tuple', deconvolve, 'ortvec') workflow.connect([(skullstrip, deconvolve, [(('out_file', str2list), 'in_files')])]) # regress out motion and other unwanted signals tproject_name = 'tproject' if global_reg: tproject_name = tproject_name + '_global' tproject = pe.Node(afni.TProject(outputtype="NIFTI_GZ"), name=tproject_name) tproject.inputs.TR = mytr tproject.inputs.polort = 0 # use matrix created with 3dDeconvolve, higher order polynomials not needed tproject.inputs.cenmode = 'NTRP' # interpolate removed time points workflow.connect(get_censor, 'out_file', tproject, 'censor') workflow.connect(skullstrip, 'out_file', tproject, 'in_file') workflow.connect(automask, 'out_file', tproject, 'mask') workflow.connect(deconvolve, 'x1D', tproject, 'ort') # Transform all images warpall_name = 'warpall' if global_reg: warpall_name = warpall_name + '_global' warpall = pe.Node(interface=fsl.ApplyWarp(), name=warpall_name) warpall.inputs.ref_file = MNI_brain warpall.inputs.field_file = anat2MNI_fieldwarp workflow.connect(mean2anat, 'out_matrix_file', warpall, 'premat') workflow.connect(tproject, 'out_file', warpall, 'in_file') workflow.connect(warpall, 'out_file', outputnode, 'result_func') # Run workflow workflow.write_graph() workflow.run() print "FUNCTIONAL PREPROCESSING DONE! Results in ", results_path + subj + '/' + s except: print "Error with patient: ", subj traceback.print_exc()
def init_skullstrip_bold_wf(name='skullstrip_bold_wf'): """ This workflow applies skull-stripping to a BOLD image. It is intended to be used on an image that has previously been bias-corrected with :py:func:`~niworkflows.func.util.init_enhance_and_skullstrip_bold_wf` .. workflow :: :graph2use: orig :simple_form: yes from niworkflows.func.util import init_skullstrip_bold_wf wf = init_skullstrip_bold_wf() Inputs in_file BOLD image (single volume) Outputs skull_stripped_file the ``in_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'out_report']), name='outputnode') skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass') skullstrip_second_pass = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass') combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks') apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask') mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet') workflow.connect([ (inputnode, skullstrip_first_pass, [('in_file', 'in_file')]), (skullstrip_first_pass, skullstrip_second_pass, [('out_file', 'in_file')]), (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]), (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file') ]), (combine_masks, outputnode, [('out_file', 'mask_file')]), # Masked file (inputnode, apply_mask, [('in_file', 'in_file')]), (combine_masks, apply_mask, [('out_file', 'mask_file')]), (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]), # Reportlet (inputnode, mask_reportlet, [('in_file', 'background_file')]), (combine_masks, mask_reportlet, [('out_file', 'mask_file')]), (mask_reportlet, outputnode, [('out_report', 'out_report')]), ]) return workflow
def init_enhance_and_skullstrip_dwi_wf(name='enhance_and_skullstrip_dwi_wf', omp_nthreads=1): """ This workflow takes in a b0 template from head motion correction and sharpens the histogram with the application of the N4 algorithm for removing the :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal mask. Steps of this workflow are: 1. Us Dipy's ``median_otsu`` brain masking for a preliminary mask with a huge amount of dilation (8 voxels) 2. Run ANTs' ``N4BiasFieldCorrection`` on the input :abbr:`dwi (blood-oxygen level-dependant)` average, using the mask generated in 1) instead of the internal Otsu thresholding. 3. Apply Dipy's ``histeq`` to enhance the contrast of the data Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and a tentative mask is passed in to the workflow throught the ``pre_mask`` Nipype input. .. workflow :: :graph2use: orig :simple_form: yes from qsiprep.workflows.dwi.util import init_enhance_and_skullstrip_dwi_wf wf = init_enhance_and_skullstrip_dwi_wf(omp_nthreads=1) **Parameters** name : str Name of workflow (default: ``enhance_and_skullstrip_dwi_wf``) pre_mask : bool Indicates whether the ``pre_mask`` input will be set (and thus, step 1 should be skipped). omp_nthreads : int number of threads available to parallel nodes **Inputs** in_file dwi image (single volume) pre_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). **Outputs** bias_corrected_file the ``in_file`` after `N4BiasFieldCorrection`_ skull_stripped_file the ``bias_corrected_file`` after skull-stripping mask_file mask of the skull-stripped input file out_report reportlet for the skull-stripping .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053 """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']), name='outputnode') # Basic mask initial_mask = pe.Node(afni.Automask(dilate=3, outputtype="NIFTI_GZ"), name="initial_mask") # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1) n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True), name='n4_correct', n_procs=1) hist_eq = pe.Node(HistEQ(), name='hist_eq') workflow.connect([ (inputnode, initial_mask, [('in_file', 'in_file')]), (initial_mask, n4_correct, [('out_file', 'mask_image')]), (inputnode, n4_correct, [('in_file', 'input_image')]), (n4_correct, hist_eq, [('output_image', 'in_file')]), (initial_mask, hist_eq, [('out_file', 'mask_file')]), (hist_eq, outputnode, [('out_file', 'bias_corrected_file'), ('out_file', 'skull_stripped_file')]), (initial_mask, outputnode, [('out_file', 'mask_file')]), ]) return workflow