def init_func_preproc_wf(bold_file): """ This workflow controls the functional preprocessing stages of *fMRIPrep*. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.tests import mock_config from fmriprep import config from fmriprep.workflows.bold.base import init_func_preproc_wf with mock_config(): bold_file = config.execution.bids_dir / 'sub-01' / 'func' \ / 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' wf = init_func_preproc_wf(str(bold_file)) Parameters ---------- bold_file BOLD series NIfTI file Inputs ------ bold_file BOLD series NIfTI file t1w_preproc Bias-corrected structural template image t1w_mask Mask of the skull-stripped template image t1w_dseg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1w_asec Segmentation of structural image, done with FreeSurfer. t1w_aparc Parcellation of structural image, done with FreeSurfer. t1w_tpms List of tissue probability maps in T1w space template List of templates to target anat2std_xfm List of transform files, collated with templates std2anat_xfm List of inverse transform files, collated with templates subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2t1w_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w Outputs ------- bold_t1 BOLD series, resampled to T1w space bold_mask_t1 BOLD series mask in T1w space bold_std BOLD series, resampled to template space bold_mask_std BOLD series mask in template space confounds TSV of confounds surfaces BOLD series, resampled to FreeSurfer surfaces aroma_noise_ics Noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix bold_cifti BOLD CIFTI image cifti_variant combination of target spaces for `bold_cifti` See Also -------- * :py:func:`~niworkflows.func.util.init_bold_reference_wf` * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf` * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf` * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf` * :py:func:`~sdcflows.workflows.fmap.init_fmap_wf` * :py:func:`~sdcflows.workflows.pepolar.init_pepolar_unwarp_wf` * :py:func:`~sdcflows.workflows.phdiff.init_phdiff_wf` * :py:func:`~sdcflows.workflows.syn.init_syn_sdc_wf` * :py:func:`~sdcflows.workflows.unwarp.init_sdc_unwarp_wf` """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.nibabel import ApplyMask from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.utils import DictMerge from sdcflows.workflows.base import init_sdc_estimate_wf, fieldmap_wrangler ref_file = bold_file mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1} bold_tlen = 10 multiecho = isinstance(bold_file, list) # Have some options handy layout = config.execution.layout omp_nthreads = config.nipype.omp_nthreads freesurfer = config.workflow.run_reconall spaces = config.workflow.spaces if multiecho: tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file] ref_file = dict(zip(tes, bold_file))[min(tes)] if os.path.isfile(ref_file): bold_tlen, mem_gb = _create_mem_gb(ref_file) wf_name = _get_wf_name(ref_file) config.loggers.workflow.debug( 'Creating bold processing workflow for "%s" (%.2f GB / %d TRs). ' 'Memory resampled/largemem=%.2f/%.2f GB.', ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem']) sbref_file = None # Find associated sbref, if possible entities = layout.parse_file_entities(ref_file) entities['suffix'] = 'sbref' entities['extension'] = ['nii', 'nii.gz'] # Overwrite extensions files = layout.get(return_type='file', **entities) refbase = os.path.basename(ref_file) if 'sbref' in config.workflow.ignore: config.loggers.workflow.info("Single-band reference files ignored.") elif files and multiecho: config.loggers.workflow.warning( "Single-band reference found, but not supported in " "multi-echo workflows at this time. Ignoring.") elif files: sbref_file = files[0] sbbase = os.path.basename(sbref_file) if len(files) > 1: config.loggers.workflow.warning( "Multiple single-band reference files found for {}; using " "{}".format(refbase, sbbase)) else: config.loggers.workflow.info( "Using single-band reference file %s.", sbbase) else: config.loggers.workflow.info("No single-band-reference found for %s.", refbase) metadata = layout.get_metadata(ref_file) # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn) fmaps = None if 'fieldmaps' not in config.workflow.ignore: fmaps = fieldmap_wrangler(layout, ref_file, use_syn=config.workflow.use_syn, force_syn=config.workflow.force_syn) elif config.workflow.use_syn or config.workflow.force_syn: # If fieldmaps are not enabled, activate SyN-SDC in unforced (False) mode fmaps = {'syn': False} # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort' run_stc = (bool(metadata.get("SliceTiming")) and 'slicetiming' not in config.workflow.ignore and (_get_series_len(ref_file) > 4 or "TooShort")) # Check if MEEPI for T2* coregistration target if config.workflow.t2s_coreg and not multiecho: config.loggers.workflow.warning( "No multiecho BOLD images found for T2* coregistration. " "Using standard EPI-T1 coregistration.") config.workflow.t2s_coreg = False # By default, force-bbr for t2s_coreg unless user specifies otherwise if config.workflow.t2s_coreg and config.workflow.use_bbr is None: config.workflow.use_bbr = True # Build workflow workflow = Workflow(name=wf_name) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion transform matrices, susceptibility distortion correction when available, and co-registrations to anatomical and output spaces). Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs), configured with Lanczos interpolation to minimize the smoothing effects of other kernels [@lanczos]. Non-gridded (surface) resamplings were performed using `mri_vol2surf` (FreeSurfer). """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_file', 'subjects_dir', 'subject_id', 't1w_preproc', 't1w_mask', 't1w_dseg', 't1w_tpms', 't1w_aseg', 't1w_aparc', 'anat2std_xfm', 'std2anat_xfm', 'template', 't1w2fsnative_xfm', 'fsnative2t1w_xfm' ]), name='inputnode') inputnode.inputs.bold_file = bold_file if sbref_file is not None: from niworkflows.interfaces.images import ValidateImage val_sbref = pe.Node(ValidateImage(in_file=sbref_file), name='val_sbref') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'bold_std', 'bold_std_ref', 'bold_mask_std', 'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti', 'cifti_variant', 'cifti_metadata', 'cifti_density', 'surfaces', 'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file', 'confounds_metadata' ]), name='outputnode') # Generate a brain-masked conversion of the t1w t1w_brain = pe.Node(ApplyMask(), name='t1w_brain') # BOLD buffer: an identity used as a pointer to either the original BOLD # or the STC'ed one for further use. boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer') summary = pe.Node(FunctionalSummary( slice_timing=run_stc, registration=('FSL', 'FreeSurfer')[freesurfer], registration_dof=config.workflow.bold2t1w_dof, registration_init=config.workflow.bold2t1w_init, pe_direction=metadata.get("PhaseEncodingDirection"), tr=metadata.get("RepetitionTime")), name='summary', mem_gb=config.DEFAULT_MEMORY_MIN_GB, run_without_submitting=True) summary.inputs.dummy_scans = config.workflow.dummy_scans func_derivatives_wf = init_func_derivatives_wf( bids_root=layout.root, cifti_output=config.workflow.cifti_output, freesurfer=freesurfer, metadata=metadata, output_dir=str(config.execution.output_dir), spaces=spaces, use_aroma=config.workflow.use_aroma, ) workflow.connect([ (outputnode, func_derivatives_wf, [ ('bold_t1', 'inputnode.bold_t1'), ('bold_t1_ref', 'inputnode.bold_t1_ref'), ('bold_aseg_t1', 'inputnode.bold_aseg_t1'), ('bold_aparc_t1', 'inputnode.bold_aparc_t1'), ('bold_mask_t1', 'inputnode.bold_mask_t1'), ('bold_native', 'inputnode.bold_native'), ('confounds', 'inputnode.confounds'), ('surfaces', 'inputnode.surf_files'), ('aroma_noise_ics', 'inputnode.aroma_noise_ics'), ('melodic_mix', 'inputnode.melodic_mix'), ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'), ('bold_cifti', 'inputnode.bold_cifti'), ('cifti_variant', 'inputnode.cifti_variant'), ('cifti_metadata', 'inputnode.cifti_metadata'), ('cifti_density', 'inputnode.cifti_density'), ('confounds_metadata', 'inputnode.confounds_metadata'), ]), ]) # Generate a tentative boldref bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads) bold_reference_wf.inputs.inputnode.dummy_scans = config.workflow.dummy_scans if sbref_file is not None: workflow.connect([ (val_sbref, bold_reference_wf, [('out_file', 'inputnode.sbref_file')]), ]) # Top-level BOLD splitter bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split', mem_gb=mem_gb['filesize'] * 3) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf', mem_gb=mem_gb['filesize'], omp_nthreads=omp_nthreads) # calculate BOLD registration to T1w bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf', freesurfer=freesurfer, use_bbr=config.workflow.use_bbr, bold2t1w_dof=config.workflow.bold2t1w_dof, bold2t1w_init=config.workflow.bold2t1w_init, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # apply BOLD registration to T1w bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf', freesurfer=freesurfer, use_fieldwarp=bool(fmaps), multiecho=multiecho, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # get confounds bold_confounds_wf = init_bold_confs_wf( mem_gb=mem_gb['largemem'], metadata=metadata, regressors_all_comps=config.workflow.regressors_all_comps, regressors_fd_th=config.workflow.regressors_fd_th, regressors_dvars_th=config.workflow.regressors_dvars_th, name='bold_confounds_wf') bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False] # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_bold_trans_wf = init_bold_preproc_trans_wf( mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=not config.execution.low_mem, use_fieldwarp=bool(fmaps), name='bold_bold_trans_wf') bold_bold_trans_wf.inputs.inputnode.name_source = ref_file # SLICE-TIME CORRECTION (or bypass) ############################################# if run_stc is True: # bool('TooShort') == True, so check True explicitly bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata) workflow.connect([ (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]), ]) if not multiecho: workflow.connect([(bold_reference_wf, bold_stc_wf, [ ('outputnode.bold_file', 'inputnode.bold_file') ])]) else: # for meepi, iterate through stc_wf for all workflows meepi_echos = boldbuffer.clone(name='meepi_echos') meepi_echos.iterables = ('bold_file', bold_file) workflow.connect([(meepi_echos, bold_stc_wf, [('bold_file', 'inputnode.bold_file')])]) elif not multiecho: # STC is too short or False # bypass STC from original BOLD to the splitter through boldbuffer workflow.connect([(bold_reference_wf, boldbuffer, [('outputnode.bold_file', 'bold_file')])]) else: # for meepi, iterate over all meepi echos to boldbuffer boldbuffer.iterables = ('bold_file', bold_file) # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ########################## bold_sdc_wf = init_sdc_estimate_wf(fmaps, metadata, omp_nthreads=omp_nthreads, debug=config.execution.debug) # MULTI-ECHO EPI DATA ############################################# if multiecho: from niworkflows.func.util import init_skullstrip_bold_wf skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf') inputnode.inputs.bold_file = ref_file # Replace reference w first echo join_echos = pe.JoinNode( niu.IdentityInterface(fields=['bold_files']), joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'), joinfield=['bold_files'], name='join_echos') # create optimal combination, adaptive T2* map bold_t2s_wf = init_bold_t2s_wf(echo_times=tes, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, t2s_coreg=config.workflow.t2s_coreg, name='bold_t2smap_wf') workflow.connect([ (skullstrip_bold_wf, join_echos, [('outputnode.skull_stripped_file', 'bold_files')]), (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]), ]) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ (inputnode, t1w_brain, [('t1w_preproc', 'in_file'), ('t1w_mask', 'in_mask')]), # Generate early reference (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]), # BOLD buffer has slice-time corrected if it was run, original otherwise (boldbuffer, bold_split, [('bold_file', 'in_file')]), # HMC (bold_reference_wf, bold_hmc_wf, [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'), ('outputnode.bold_file', 'inputnode.bold_file')]), (bold_reference_wf, summary, [('outputnode.algo_dummy_scans', 'algo_dummy_scans')]), # EPI-T1 registration workflow ( inputnode, bold_reg_wf, [ ('t1w_dseg', 'inputnode.t1w_dseg'), # Undefined if --fs-no-reconall, but this is safe ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm') ]), (t1w_brain, bold_reg_wf, [('out_file', 'inputnode.t1w_brain')]), (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'), ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_aseg', 'inputnode.t1w_aseg'), ('t1w_aparc', 'inputnode.t1w_aparc')]), (t1w_brain, bold_t1_trans_wf, [('out_file', 'inputnode.t1w_brain')]), # unused if multiecho, but this is safe (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_t1_trans_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'), ('outputnode.bold_t1_ref', 'bold_t1_ref'), ('outputnode.bold_aseg_t1', 'bold_aseg_t1'), ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]), (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]), # SDC (or pass-through workflow) (t1w_brain, bold_sdc_wf, [('out_file', 'inputnode.t1w_brain')]), (bold_reference_wf, bold_sdc_wf, [('outputnode.ref_image', 'inputnode.epi_file'), ('outputnode.ref_image_brain', 'inputnode.epi_brain'), ('outputnode.bold_mask', 'inputnode.epi_mask')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_sdc_wf, bold_bold_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp'), ('outputnode.epi_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction') ]), # Connect bold_confounds_wf (inputnode, bold_confounds_wf, [('t1w_tpms', 'inputnode.t1w_tpms'), ('t1w_mask', 'inputnode.t1w_mask')]), (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), # Connect bold_bold_trans_wf (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')] ), (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), # Summary (outputnode, summary, [('confounds', 'confounds_file')]), ]) if not config.workflow.t2s_coreg: workflow.connect([ (bold_sdc_wf, bold_reg_wf, [('outputnode.epi_brain', 'inputnode.ref_bold_brain')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.epi_brain', 'inputnode.ref_bold_brain'), ('outputnode.epi_mask', 'inputnode.ref_bold_mask')]), ]) else: workflow.connect([ # For t2s_coreg, replace EPI-to-T1w registration inputs (bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'), ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]), ]) # for standard EPI data, pass along correct file if not multiecho: workflow.connect([ (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]), (bold_bold_trans_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_split, bold_t1_trans_wf, [('out_files', 'inputnode.bold_split')]), ]) else: # for meepi, create and use optimal combination workflow.connect([ # update name source for optimal combination (inputnode, func_derivatives_wf, [(('bold_file', combine_meepi_source), 'inputnode.source_file')]), (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold', 'inputnode.in_file')]), (bold_t2s_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold', 'inputnode.bold_split')]), ]) if fmaps: from sdcflows.workflows.outputs import init_sdc_unwarp_report_wf # Report on BOLD correction fmap_unwarp_report_wf = init_sdc_unwarp_report_wf() workflow.connect([ (inputnode, fmap_unwarp_report_wf, [('t1w_dseg', 'inputnode.in_seg')]), (bold_reference_wf, fmap_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.epi_corrected', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in fmap_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): fmap_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' for node in bold_sdc_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep' if 'syn' in fmaps: sdc_select_std = pe.Node(KeySelect(fields=['std2anat_xfm']), name='sdc_select_std', run_without_submitting=True) sdc_select_std.inputs.key = 'MNI152NLin2009cAsym' workflow.connect([ (inputnode, sdc_select_std, [('std2anat_xfm', 'std2anat_xfm'), ('template', 'keys')]), (sdc_select_std, bold_sdc_wf, [('std2anat_xfm', 'inputnode.std2anat_xfm')]), ]) if fmaps.get('syn') is True: # SyN forced syn_unwarp_report_wf = init_sdc_unwarp_report_wf( name='syn_unwarp_report_wf', forcedsyn=True) workflow.connect([ (inputnode, syn_unwarp_report_wf, [('t1w_dseg', 'inputnode.in_seg')]), (bold_reference_wf, syn_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, syn_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in syn_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): syn_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' # Map final BOLD mask into T1w space (if required) nonstd_spaces = set(spaces.get_nonstandard()) if nonstd_spaces.intersection(('T1w', 'anat')): from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as ApplyTransforms) boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True), name='boldmask_to_t1w', mem_gb=0.1) workflow.connect([ (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1', 'transforms')]), (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1', 'reference_image')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]), (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]), ]) if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')): workflow.connect([ (bold_bold_trans_wf, outputnode, [('outputnode.bold', 'bold_native')]), (bold_bold_trans_wf, func_derivatives_wf, [('outputnode.bold_ref', 'inputnode.bold_native_ref'), ('outputnode.bold_mask', 'inputnode.bold_mask_native')]), ]) if spaces.get_spaces(nonstandard=False, dim=(3, )): # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_std_trans_wf = init_bold_std_trans_wf( freesurfer=freesurfer, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, spaces=spaces, name='bold_std_trans_wf', use_compression=not config.execution.low_mem, use_fieldwarp=bool(fmaps), ) workflow.connect([ (inputnode, bold_std_trans_wf, [('template', 'inputnode.templates'), ('anat2std_xfm', 'inputnode.anat2std_xfm'), ('bold_file', 'inputnode.name_source'), ('t1w_aseg', 'inputnode.bold_aseg'), ('t1w_aparc', 'inputnode.bold_aparc')]), (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, bold_std_trans_wf, [('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_std_trans_wf, outputnode, [('outputnode.bold_std', 'bold_std'), ('outputnode.bold_std_ref', 'bold_std_ref'), ('outputnode.bold_mask_std', 'bold_mask_std')]), ]) if freesurfer: workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('outputnode.bold_aseg_std', 'inputnode.bold_aseg_std'), ('outputnode.bold_aparc_std', 'inputnode.bold_aparc_std'), ]), (bold_std_trans_wf, outputnode, [('outputnode.bold_aseg_std', 'bold_aseg_std'), ('outputnode.bold_aparc_std', 'bold_aparc_std')]), ]) if not multiecho: workflow.connect([(bold_split, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) else: split_opt_comb = bold_split.clone(name='split_opt_comb') workflow.connect([(bold_t2s_wf, split_opt_comb, [('outputnode.bold', 'in_file')]), (split_opt_comb, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) # func_derivatives_wf internally parametrizes over snapshotted spaces. workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('outputnode.template', 'inputnode.template'), ('outputnode.spatial_reference', 'inputnode.spatial_reference'), ('outputnode.bold_std_ref', 'inputnode.bold_std_ref'), ('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ]), ]) if config.workflow.use_aroma: # ICA-AROMA workflow from .confounds import init_ica_aroma_wf ica_aroma_wf = init_ica_aroma_wf( mem_gb=mem_gb['resampled'], metadata=metadata, omp_nthreads=omp_nthreads, use_fieldwarp=bool(fmaps), err_on_aroma_warn=config.workflow.aroma_err_on_warn, aroma_melodic_dim=config.workflow.aroma_melodic_dim, name='ica_aroma_wf') join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join), name='aroma_confounds') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) workflow.disconnect([ (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), ]) workflow.connect([ (inputnode, ica_aroma_wf, [('bold_file', 'inputnode.name_source')]), (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, join, [('outputnode.confounds_file', 'in_file')]), (bold_confounds_wf, mrg_conf_metadata, [('outputnode.confounds_metadata', 'in1')]), (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]), (ica_aroma_wf, mrg_conf_metadata, [('outputnode.aroma_metadata', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), (ica_aroma_wf, outputnode, [('outputnode.aroma_noise_ics', 'aroma_noise_ics'), ('outputnode.melodic_mix', 'melodic_mix'), ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file') ]), (join, outputnode, [('out_file', 'confounds')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), (bold_std_trans_wf, ica_aroma_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ('outputnode.spatial_reference', 'inputnode.spatial_reference')]), ]) # SURFACES ################################################################################## # Freesurfer freesurfer_spaces = spaces.get_fs_spaces() if freesurfer and freesurfer_spaces: config.loggers.workflow.debug( 'Creating BOLD surface-sampling workflow.') bold_surf_wf = init_bold_surf_wf( mem_gb=mem_gb['resampled'], surface_spaces=freesurfer_spaces, medial_surface_nan=config.workflow.medial_surface_nan, name='bold_surf_wf') workflow.connect([ (inputnode, bold_surf_wf, [('t1w_preproc', 'inputnode.t1w_preproc'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm')]), (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]), (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), (bold_surf_wf, func_derivatives_wf, [('outputnode.target', 'inputnode.surf_refs')]), ]) # CIFTI output if config.workflow.cifti_output: from .resampling import init_bold_grayords_wf bold_grayords_wf = init_bold_grayords_wf( grayord_density=config.workflow.cifti_output, mem_gb=mem_gb['resampled'], repetition_time=metadata['RepetitionTime']) workflow.connect([ (inputnode, bold_grayords_wf, [('subjects_dir', 'inputnode.subjects_dir')]), (bold_std_trans_wf, bold_grayords_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.spatial_reference', 'inputnode.spatial_reference')]), (bold_surf_wf, bold_grayords_wf, [ ('outputnode.surfaces', 'inputnode.surf_files'), ('outputnode.target', 'inputnode.surf_refs'), ]), (bold_grayords_wf, outputnode, [('outputnode.cifti_bold', 'bold_cifti'), ('outputnode.cifti_variant', 'cifti_variant'), ('outputnode.cifti_metadata', 'cifti_metadata'), ('outputnode.cifti_density', 'cifti_density')]), ]) if spaces.get_spaces(nonstandard=False, dim=(3, )): carpetplot_wf = init_carpetplot_wf( mem_gb=mem_gb['resampled'], metadata=metadata, cifti_output=config.workflow.cifti_output, name='carpetplot_wf') if config.workflow.cifti_output: workflow.connect(bold_grayords_wf, 'outputnode.cifti_bold', carpetplot_wf, 'inputnode.cifti_bold') else: # Xform to 'MNI152NLin2009cAsym' is always computed. carpetplot_select_std = pe.Node(KeySelect( fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), name='carpetplot_select_std', run_without_submitting=True) workflow.connect([ (inputnode, carpetplot_select_std, [('std2anat_xfm', 'std2anat_xfm'), ('template', 'keys')]), (carpetplot_select_std, carpetplot_wf, [('std2anat_xfm', 'inputnode.std2anat_xfm')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, carpetplot_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), ]) workflow.connect([(bold_confounds_wf, carpetplot_wf, [ ('outputnode.confounds_file', 'inputnode.confounds_file') ])]) # REPORTING ############################################################ reportlets_dir = str(config.execution.work_dir / 'reportlets') ds_report_summary = pe.Node(DerivativesDataSink(desc='summary', keep_dtype=True), name='ds_report_summary', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB) ds_report_validation = pe.Node(DerivativesDataSink( base_directory=reportlets_dir, desc='validation', keep_dtype=True), name='ds_report_validation', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB) workflow.connect([ (summary, ds_report_summary, [('out_report', 'in_file')]), (bold_reference_wf, ds_report_validation, [('outputnode.validation_report', 'in_file')]), ]) # Fill-in datasinks of reportlets seen so far for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_report'): workflow.get_node(node).inputs.base_directory = reportlets_dir workflow.get_node(node).inputs.source_file = ref_file return workflow
def init_anat_preproc_wf(bids_root, freesurfer, hires, longitudinal, omp_nthreads, output_dir, output_spaces, num_t1w, reportlets_dir, skull_strip_template, debug=False, name='anat_preproc_wf', skull_strip_fixed_seed=False): """ Stage the anatomical preprocessing steps of *sMRIPrep*. This includes: - T1w reference: realigning and then averaging T1w images. - Brain extraction and INU (bias field) correction. - Brain tissue segmentation. - Spatial normalization to standard spaces. - Surface reconstruction with FreeSurfer_. .. include:: ../links.rst Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from collections import OrderedDict from smriprep.workflows.anatomical import init_anat_preproc_wf wf = init_anat_preproc_wf( bids_root='.', freesurfer=True, hires=True, longitudinal=False, num_t1w=1, omp_nthreads=1, output_dir='.', output_spaces=OrderedDict([ ('MNI152NLin2009cAsym', {}), ('fsaverage5', {})]), reportlets_dir='.', skull_strip_template=('MNI152NLin2009cAsym', {}), ) Parameters ---------- bids_root : str Path of the input BIDS dataset root debug : bool Enable debugging outputs freesurfer : bool Enable FreeSurfer surface reconstruction (increases runtime by 6h, at the very least) output_spaces : list List of spatial normalization targets. Some parts of pipeline will only be instantiated for some output spaces. Valid spaces: - Any template identifier from TemplateFlow - Path to a template folder organized following TemplateFlow's conventions hires : bool Enable sub-millimeter preprocessing in FreeSurfer longitudinal : bool Create unbiased structural template, regardless of number of inputs (may increase runtime) name : str, optional Workflow name (default: anat_preproc_wf) omp_nthreads : int Maximum number of threads an individual process may use output_dir : str Directory in which to save derivatives reportlets_dir : str Directory in which to save reportlets skull_strip_fixed_seed : bool Do not use a random seed for skull-stripping - will ensure run-to-run replicability when used with --omp-nthreads 1 (default: ``False``). skull_strip_template : tuple Name of ANTs skull-stripping template and specifications. Inputs ------ t1w List of T1-weighted structural images t2w List of T2-weighted structural images flair List of FLAIR images subjects_dir FreeSurfer SUBJECTS_DIR Outputs ------- t1w_preproc The T1w reference map, which is calculated as the average of bias-corrected and preprocessed T1w images, defining the anatomical space. t1w_brain Skull-stripped ``t1w_preproc`` t1w_mask Brain (binary) mask estimated by brain extraction. t1w_dseg Brain tissue segmentation of the preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF). t1w_tpms List of tissue probability maps corresponding to ``t1w_dseg``. std_t1w T1w reference resampled in one or more standard spaces. std_mask Mask of skull-stripped template, in MNI space std_dseg Segmentation, resampled into MNI space std_tpms List of tissue probability maps in MNI space subjects_dir FreeSurfer SUBJECTS_DIR anat2std_xfm Nonlinear spatial transform to resample imaging data given in anatomical space into standard space. std2anat_xfm Inverse transform of the above. subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2t1w_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces (gray/white boundary, midthickness, pial, inflated) See also -------- * :py:func:`~niworkflows.anat.ants.init_brain_extraction_wf` * :py:func:`~smriprep.workflows.surfaces.init_surface_recon_wf` """ workflow = Workflow(name=name) desc = """Anatomical data preprocessing : """ desc += """\ A total of {num_t1w} T1-weighted (T1w) images were found within the input BIDS dataset. All of them were corrected for intensity non-uniformity (INU) """ if num_t1w > 1 else """\ The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) """ desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += '.\n' if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" desc += """\ The T1w-reference was then skull-stripped with a *Nipype* implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using `fast` [FSL {fsl_ver}, RRID:SCR_002823, @fsl_fast]. """ workflow.__desc__ = desc.format( ants_ver=ANTsInfo.version() or '(version unknown)', fsl_ver=fsl.FAST().version or '(version unknown)', num_t1w=num_t1w, skullstrip_tpl=skull_strip_template[0], ) inputnode = pe.Node(niu.IdentityInterface( fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 't1w_preproc', 't1w_brain', 't1w_mask', 't1w_dseg', 't1w_tpms', 'template', 'std_t1w', 'anat2std_xfm', 'std2anat_xfm', 'joint_template', 'joint_anat2std_xfm', 'joint_std2anat_xfm', 'std_mask', 'std_dseg', 'std_tpms', 't1w_realign_xfm', 'subjects_dir', 'subject_id', 't1w2fsnative_xfm', 'fsnative2t1w_xfm', 'surfaces', 't1w_aseg', 't1w_aparc' ]), name='outputnode') buffernode = pe.Node( niu.IdentityInterface(fields=['t1w_brain', 't1w_mask']), name='buffernode') # 1. Anatomical reference generation - average input T1w images. anat_template_wf = init_anat_template_wf(longitudinal=longitudinal, omp_nthreads=omp_nthreads, num_t1w=num_t1w) anat_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) # 2. Brain-extraction and INU (bias field) correction. brain_extraction_wf = init_brain_extraction_wf( in_template=skull_strip_template[0], template_spec=skull_strip_template[1], atropos_use_random_seed=not skull_strip_fixed_seed, omp_nthreads=omp_nthreads, normalization_quality='precise' if not debug else 'testing') # 3. Brain tissue segmentation t1w_dseg = pe.Node(fsl.FAST(segments=True, no_bias=True, probability_maps=True), name='t1w_dseg', mem_gb=3) workflow.connect([ (buffernode, t1w_dseg, [('t1w_brain', 'in_files')]), (t1w_dseg, outputnode, [('tissue_class_map', 't1w_dseg'), ('probability_maps', 't1w_tpms')]), ]) # 4. Spatial normalization vol_spaces = [k for k in output_spaces.keys() if not k.startswith('fs')] anat_norm_wf = init_anat_norm_wf( debug=debug, omp_nthreads=omp_nthreads, templates=[(v, output_spaces[v]) for v in vol_spaces], ) workflow.connect([ # Step 1. (inputnode, anat_template_wf, [('t1w', 'inputnode.t1w')]), (anat_template_wf, anat_validate, [('outputnode.t1w_ref', 'in_file')]), (anat_validate, brain_extraction_wf, [('out_file', 'inputnode.in_files')]), (brain_extraction_wf, outputnode, [('outputnode.bias_corrected', 't1w_preproc')]), (anat_template_wf, outputnode, [('outputnode.t1w_realign_xfm', 't1w_ref_xfms')]), (buffernode, outputnode, [('t1w_brain', 't1w_brain'), ('t1w_mask', 't1w_mask')]), # Steps 2, 3 and 4 (inputnode, anat_norm_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.orig_t1w'), ('roi', 'inputnode.lesion_mask')]), (brain_extraction_wf, anat_norm_wf, [(('outputnode.bias_corrected', _pop), 'inputnode.moving_image')]), (buffernode, anat_norm_wf, [('t1w_mask', 'inputnode.moving_mask')]), (t1w_dseg, anat_norm_wf, [('tissue_class_map', 'inputnode.moving_segmentation')]), (t1w_dseg, anat_norm_wf, [('probability_maps', 'inputnode.moving_tpms') ]), (anat_norm_wf, outputnode, [ ('poutputnode.standardized', 'std_t1w'), ('poutputnode.template', 'template'), ('poutputnode.anat2std_xfm', 'anat2std_xfm'), ('poutputnode.std2anat_xfm', 'std2anat_xfm'), ('poutputnode.std_mask', 'std_mask'), ('poutputnode.std_dseg', 'std_dseg'), ('poutputnode.std_tpms', 'std_tpms'), ('outputnode.template', 'joint_template'), ('outputnode.anat2std_xfm', 'joint_anat2std_xfm'), ('outputnode.std2anat_xfm', 'joint_std2anat_xfm'), ]), ]) # Write outputs ############################################3 anat_reports_wf = init_anat_reports_wf(reportlets_dir=reportlets_dir, freesurfer=freesurfer) anat_derivatives_wf = init_anat_derivatives_wf( bids_root=bids_root, freesurfer=freesurfer, num_t1w=num_t1w, output_dir=output_dir, ) workflow.connect([ # Connect reportlets (inputnode, anat_reports_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.source_file')]), (anat_template_wf, anat_reports_wf, [('outputnode.out_report', 'inputnode.t1w_conform_report')]), (outputnode, anat_reports_wf, [('t1w_preproc', 'inputnode.t1w_preproc'), ('t1w_dseg', 'inputnode.t1w_dseg'), ('t1w_mask', 'inputnode.t1w_mask'), ('std_t1w', 'inputnode.std_t1w'), ('std_mask', 'inputnode.std_mask')]), (anat_norm_wf, anat_reports_wf, [('poutputnode.template', 'inputnode.template'), ('poutputnode.template_spec', 'inputnode.template_spec')]), # Connect derivatives (anat_template_wf, anat_derivatives_wf, [('outputnode.t1w_valid_list', 'inputnode.source_files')]), (anat_norm_wf, anat_derivatives_wf, [('poutputnode.template', 'inputnode.template')]), (outputnode, anat_derivatives_wf, [ ('std_t1w', 'inputnode.std_t1w'), ('anat2std_xfm', 'inputnode.anat2std_xfm'), ('std2anat_xfm', 'inputnode.std2anat_xfm'), ('t1w_ref_xfms', 'inputnode.t1w_ref_xfms'), ('t1w_preproc', 'inputnode.t1w_preproc'), ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_dseg', 'inputnode.t1w_dseg'), ('t1w_tpms', 'inputnode.t1w_tpms'), ('std_mask', 'inputnode.std_mask'), ('std_dseg', 'inputnode.std_dseg'), ('std_tpms', 'inputnode.std_tpms'), ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), ('surfaces', 'inputnode.surfaces'), ]), ]) if not freesurfer: # Flag --fs-no-reconall is set - return workflow.connect([ (brain_extraction_wf, buffernode, [(('outputnode.out_file', _pop), 't1w_brain'), ('outputnode.out_mask', 't1w_mask')]), ]) return workflow # 5. Surface reconstruction (--fs-no-reconall not set) surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf', omp_nthreads=omp_nthreads, hires=hires) applyrefined = pe.Node(fsl.ApplyMask(), name='applyrefined') workflow.connect([ (inputnode, surface_recon_wf, [('t2w', 'inputnode.t2w'), ('flair', 'inputnode.flair'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (anat_validate, surface_recon_wf, [('out_file', 'inputnode.t1w')]), (brain_extraction_wf, surface_recon_wf, [(('outputnode.out_file', _pop), 'inputnode.skullstripped_t1'), ('outputnode.out_segm', 'inputnode.ants_segs'), (('outputnode.bias_corrected', _pop), 'inputnode.corrected_t1')]), (brain_extraction_wf, applyrefined, [(('outputnode.bias_corrected', _pop), 'in_file')]), (surface_recon_wf, applyrefined, [('outputnode.out_brainmask', 'mask_file')]), (surface_recon_wf, outputnode, [('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.t1w2fsnative_xfm', 't1w2fsnative_xfm'), ('outputnode.fsnative2t1w_xfm', 'fsnative2t1w_xfm'), ('outputnode.surfaces', 'surfaces'), ('outputnode.out_aseg', 't1w_aseg'), ('outputnode.out_aparc', 't1w_aparc')]), (applyrefined, buffernode, [('out_file', 't1w_brain')]), (surface_recon_wf, buffernode, [('outputnode.out_brainmask', 't1w_mask')]), (surface_recon_wf, anat_reports_wf, [('outputnode.subject_id', 'inputnode.subject_id'), ('outputnode.subjects_dir', 'inputnode.subjects_dir')]), (surface_recon_wf, anat_derivatives_wf, [ ('outputnode.out_aseg', 'inputnode.t1w_fs_aseg'), ('outputnode.out_aparc', 'inputnode.t1w_fs_aparc'), ]), ]) return workflow
def init_func_preproc_wf( aroma_melodic_dim, bold2t1w_dof, bold_file, cifti_output, debug, dummy_scans, err_on_aroma_warn, fmap_bspline, fmap_demean, force_syn, freesurfer, ignore, low_mem, medial_surface_nan, omp_nthreads, output_dir, output_spaces, regressors_all_comps, regressors_dvars_th, regressors_fd_th, reportlets_dir, t2s_coreg, use_aroma, use_bbr, use_syn, layout=None, num_bold=1, ): """ This workflow controls the functional preprocessing stages of FMRIPREP. .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold import init_func_preproc_wf from collections import namedtuple, OrderedDict BIDSLayout = namedtuple('BIDSLayout', ['root']) wf = init_func_preproc_wf( aroma_melodic_dim=-200, bold2t1w_dof=9, bold_file='/completely/made/up/path/sub-01_task-nback_bold.nii.gz', cifti_output=False, debug=False, dummy_scans=None, err_on_aroma_warn=False, fmap_bspline=True, fmap_demean=True, force_syn=True, freesurfer=True, ignore=[], low_mem=False, medial_surface_nan=False, omp_nthreads=1, output_dir='.', output_spaces=OrderedDict([ ('MNI152Lin', {}), ('fsaverage', {'density': '10k'}), ('T1w', {}), ('fsnative', {})]), regressors_all_comps=False, regressors_dvars_th=1.5, regressors_fd_th=0.5, reportlets_dir='.', t2s_coreg=False, use_aroma=False, use_bbr=True, use_syn=True, layout=BIDSLayout('.'), num_bold=1, ) **Parameters** aroma_melodic_dim : int Maximum number of components identified by MELODIC within ICA-AROMA (default is -200, ie. no limitation). bold2t1w_dof : 6, 9 or 12 Degrees-of-freedom for BOLD-T1w registration bold_file : str BOLD series NIfTI file cifti_output : bool Generate bold CIFTI file in output spaces debug : bool Enable debugging outputs dummy_scans : int or None Number of volumes to consider as non steady state err_on_aroma_warn : bool Do not crash on ICA-AROMA errors fmap_bspline : bool **Experimental**: Fit B-Spline field using least-squares fmap_demean : bool Demean voxel-shift map during unwarp force_syn : bool **Temporary**: Always run SyN-based SDC freesurfer : bool Enable FreeSurfer functional registration (bbregister) and resampling BOLD series to FreeSurfer surface meshes. ignore : list Preprocessing steps to skip (may include "slicetiming", "fieldmaps") low_mem : bool Write uncompressed .nii files in some cases to reduce memory usage medial_surface_nan : bool Replace medial wall values with NaNs on functional GIFTI files omp_nthreads : int Maximum number of threads an individual process may use output_dir : str Directory in which to save derivatives output_spaces : OrderedDict Ordered dictionary where keys are TemplateFlow ID strings (e.g. ``MNI152Lin``, ``MNI152NLin6Asym``, ``MNI152NLin2009cAsym``, or ``fsLR``) strings designating nonstandard references (e.g. ``T1w`` or ``anat``, ``sbref``, ``run``, etc.), or paths pointing to custom templates organized in a TemplateFlow-like structure. Values of the dictionary aggregate modifiers (e.g. the value for the key ``MNI152Lin`` could be ``{'resolution': 2}`` if one wants the resampling to be done on the 2mm resolution version of the selected template). regressors_all_comps Return all CompCor component time series instead of the top fraction regressors_dvars_th Criterion for flagging DVARS outliers regressors_fd_th Criterion for flagging framewise displacement outliers reportlets_dir : str Absolute path of a directory in which reportlets will be temporarily stored t2s_coreg : bool For multiecho EPI, use the calculated T2*-map for T2*-driven coregistration use_aroma : bool Perform ICA-AROMA on MNI-resampled functional series use_bbr : bool or None Enable/disable boundary-based registration refinement. If ``None``, test BBR result for distortion before accepting. When using ``t2s_coreg``, BBR will be enabled by default unless explicitly specified otherwise. use_syn : bool **Experimental**: Enable ANTs SyN-based susceptibility distortion correction (SDC). If fieldmaps are present and enabled, this is not run, by default. layout : BIDSLayout BIDSLayout structure to enable metadata retrieval num_bold : int Total number of BOLD files that have been set for preprocessing (default is 1) **Inputs** bold_file BOLD series NIfTI file t1_preproc Bias-corrected structural template image t1_brain Skull-stripped ``t1_preproc`` t1_mask Mask of the skull-stripped template image t1_seg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1_tpms List of tissue probability maps in T1w space anat2std_xfm ANTs-compatible affine-and-warp transform file std2anat_xfm ANTs-compatible affine-and-warp transform file (inverse) subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1_2_fsnative_forward_transform LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space t1_2_fsnative_reverse_transform LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w **Outputs** bold_t1 BOLD series, resampled to T1w space bold_mask_t1 BOLD series mask in T1w space bold_std BOLD series, resampled to template space bold_mask_std BOLD series mask in template space confounds TSV of confounds surfaces BOLD series, resampled to FreeSurfer surfaces aroma_noise_ics Noise components identified by ICA-AROMA melodic_mix FSL MELODIC mixing matrix bold_cifti BOLD CIFTI image cifti_variant combination of target spaces for `bold_cifti` **Subworkflows** * :py:func:`~fmriprep.workflows.bold.util.init_bold_reference_wf` * :py:func:`~fmriprep.workflows.bold.stc.init_bold_stc_wf` * :py:func:`~fmriprep.workflows.bold.hmc.init_bold_hmc_wf` * :py:func:`~fmriprep.workflows.bold.t2s.init_bold_t2s_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_t1_trans_wf` * :py:func:`~fmriprep.workflows.bold.registration.init_bold_reg_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_bold_confounds_wf` * :py:func:`~fmriprep.workflows.bold.confounds.init_ica_aroma_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_std_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_preproc_trans_wf` * :py:func:`~fmriprep.workflows.bold.resampling.init_bold_surf_wf` * :py:func:`~fmriprep.workflows.fieldmap.pepolar.init_pepolar_unwarp_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_fmap_estimator_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_sdc_unwarp_wf` * :py:func:`~fmriprep.workflows.fieldmap.init_nonlinear_sdc_wf` """ from .resampling import NONSTANDARD_REFERENCES from ..fieldmap.base import init_sdc_wf # Avoid circular dependency (#1066) # Filter out standard spaces to a separate dict std_spaces = OrderedDict([(key, modifiers) for key, modifiers in output_spaces.items() if key not in NONSTANDARD_REFERENCES]) volume_std_spaces = OrderedDict([(key, modifiers) for key, modifiers in std_spaces.items() if not key.startswith('fs')]) ref_file = bold_file mem_gb = {'filesize': 1, 'resampled': 1, 'largemem': 1} bold_tlen = 10 multiecho = isinstance(bold_file, list) if multiecho: tes = [layout.get_metadata(echo)['EchoTime'] for echo in bold_file] ref_file = dict(zip(tes, bold_file))[min(tes)] if os.path.isfile(ref_file): bold_tlen, mem_gb = _create_mem_gb(ref_file) wf_name = _get_wf_name(ref_file) LOGGER.log( 25, ('Creating bold processing workflow for "%s" (%.2f GB / %d TRs). ' 'Memory resampled/largemem=%.2f/%.2f GB.'), ref_file, mem_gb['filesize'], bold_tlen, mem_gb['resampled'], mem_gb['largemem']) sbref_file = None # For doc building purposes if not hasattr(layout, 'parse_file_entities'): LOGGER.log(25, 'No valid layout: building empty workflow.') metadata = { 'RepetitionTime': 2.0, 'SliceTiming': [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9], 'PhaseEncodingDirection': 'j', } fmaps = [{ 'suffix': 'phasediff', 'phasediff': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_phasediff.nii.gz', 'magnitude1': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude1.nii.gz', 'magnitude2': 'sub-03/ses-2/fmap/sub-03_ses-2_run-1_magnitude2.nii.gz', }] run_stc = True multiecho = False else: # Find associated sbref, if possible entities = layout.parse_file_entities(ref_file) entities['suffix'] = 'sbref' entities['extension'] = ['nii', 'nii.gz'] # Overwrite extensions files = layout.get(return_type='file', **entities) refbase = os.path.basename(ref_file) if 'sbref' in ignore: LOGGER.info("Single-band reference files ignored.") elif files and multiecho: LOGGER.warning("Single-band reference found, but not supported in " "multi-echo workflows at this time. Ignoring.") elif files: sbref_file = files[0] sbbase = os.path.basename(sbref_file) if len(files) > 1: LOGGER.warning( "Multiple single-band reference files found for {}; using " "{}".format(refbase, sbbase)) else: LOGGER.log( 25, "Using single-band reference file {}".format(sbbase)) else: LOGGER.log(25, "No single-band-reference found for {}".format(refbase)) metadata = layout.get_metadata(ref_file) # Find fieldmaps. Options: (phase1|phase2|phasediff|epi|fieldmap|syn) fmaps = [] if 'fieldmaps' not in ignore: for fmap in layout.get_fieldmap(ref_file, return_list=True): if fmap['suffix'] == 'phase': LOGGER.warning("""\ Found phase1/2 type of fieldmaps, which are not currently supported. \ fMRIPrep will discard them for susceptibility distortion correction. \ Please, follow up on this issue at \ https://github.com/poldracklab/fmriprep/issues/1655.""") else: fmap['metadata'] = layout.get_metadata( fmap[fmap['suffix']]) fmaps.append(fmap) # Run SyN if forced or in the absence of fieldmap correction if force_syn or (use_syn and not fmaps): fmaps.append({'suffix': 'syn'}) # Short circuits: (True and True and (False or 'TooShort')) == 'TooShort' run_stc = ("SliceTiming" in metadata and 'slicetiming' not in ignore and (_get_series_len(ref_file) > 4 or "TooShort")) # Check if MEEPI for T2* coregistration target if t2s_coreg and not multiecho: LOGGER.warning( "No multiecho BOLD images found for T2* coregistration. " "Using standard EPI-T1 coregistration.") t2s_coreg = False # By default, force-bbr for t2s_coreg unless user specifies otherwise if t2s_coreg and use_bbr is None: use_bbr = True # Build workflow workflow = Workflow(name=wf_name) workflow.__desc__ = """ Functional data preprocessing : For each of the {num_bold} BOLD runs found per subject (across all tasks and sessions), the following preprocessing was performed. """.format(num_bold=num_bold) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion transform matrices, susceptibility distortion correction when available, and co-registrations to anatomical and output spaces). Gridded (volumetric) resamplings were performed using `antsApplyTransforms` (ANTs), configured with Lanczos interpolation to minimize the smoothing effects of other kernels [@lanczos]. Non-gridded (surface) resamplings were performed using `mri_vol2surf` (FreeSurfer). """ inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_file', 'subjects_dir', 'subject_id', 't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 't1_aseg', 't1_aparc', 'anat2std_xfm', 'std2anat_xfm', 'template', 'joint_anat2std_xfm', 'joint_std2anat_xfm', 'joint_template', 't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform' ]), name='inputnode') inputnode.inputs.bold_file = bold_file if sbref_file is not None: from niworkflows.interfaces.images import ValidateImage val_sbref = pe.Node(ValidateImage(in_file=sbref_file), name='val_sbref') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1', 'bold_std', 'bold_std_ref' 'bold_mask_std', 'bold_aseg_std', 'bold_aparc_std', 'bold_native', 'bold_cifti', 'cifti_variant', 'cifti_variant_key', 'surfaces', 'confounds', 'aroma_noise_ics', 'melodic_mix', 'nonaggr_denoised_file', 'confounds_metadata' ]), name='outputnode') # BOLD buffer: an identity used as a pointer to either the original BOLD # or the STC'ed one for further use. boldbuffer = pe.Node(niu.IdentityInterface(fields=['bold_file']), name='boldbuffer') summary = pe.Node(FunctionalSummary( slice_timing=run_stc, registration=('FSL', 'FreeSurfer')[freesurfer], registration_dof=bold2t1w_dof, pe_direction=metadata.get("PhaseEncodingDirection"), tr=metadata.get("RepetitionTime")), name='summary', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True) summary.inputs.dummy_scans = dummy_scans # CIfTI output: currently, we only support fsaverage{5,6} cifti_spaces = set(s for s in output_spaces.keys() if s in ('fsaverage5', 'fsaverage6')) fsaverage_den = output_spaces.get('fsaverage', {}).get('den') if fsaverage_den: cifti_spaces.add(FSAVERAGE_DENSITY[fsaverage_den]) cifti_output = cifti_output and cifti_spaces func_derivatives_wf = init_func_derivatives_wf( bids_root=layout.root, cifti_output=cifti_output, freesurfer=freesurfer, metadata=metadata, output_dir=output_dir, output_spaces=output_spaces, standard_spaces=list(std_spaces.keys()), use_aroma=use_aroma, ) workflow.connect([ (outputnode, func_derivatives_wf, [ ('bold_t1', 'inputnode.bold_t1'), ('bold_t1_ref', 'inputnode.bold_t1_ref'), ('bold_aseg_t1', 'inputnode.bold_aseg_t1'), ('bold_aparc_t1', 'inputnode.bold_aparc_t1'), ('bold_mask_t1', 'inputnode.bold_mask_t1'), ('bold_native', 'inputnode.bold_native'), ('confounds', 'inputnode.confounds'), ('surfaces', 'inputnode.surfaces'), ('aroma_noise_ics', 'inputnode.aroma_noise_ics'), ('melodic_mix', 'inputnode.melodic_mix'), ('nonaggr_denoised_file', 'inputnode.nonaggr_denoised_file'), ('bold_cifti', 'inputnode.bold_cifti'), ('cifti_variant', 'inputnode.cifti_variant'), ('cifti_variant_key', 'inputnode.cifti_variant_key'), ('confounds_metadata', 'inputnode.confounds_metadata'), ]), ]) # Generate a tentative boldref bold_reference_wf = init_bold_reference_wf(omp_nthreads=omp_nthreads) bold_reference_wf.inputs.inputnode.dummy_scans = dummy_scans if sbref_file is not None: workflow.connect([ (val_sbref, bold_reference_wf, [('out_file', 'inputnode.sbref_file')]), ]) # Top-level BOLD splitter bold_split = pe.Node(FSLSplit(dimension='t'), name='bold_split', mem_gb=mem_gb['filesize'] * 3) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf(name='bold_hmc_wf', mem_gb=mem_gb['filesize'], omp_nthreads=omp_nthreads) # calculate BOLD registration to T1w bold_reg_wf = init_bold_reg_wf(name='bold_reg_wf', freesurfer=freesurfer, use_bbr=use_bbr, bold2t1w_dof=bold2t1w_dof, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # apply BOLD registration to T1w bold_t1_trans_wf = init_bold_t1_trans_wf(name='bold_t1_trans_wf', freesurfer=freesurfer, use_fieldwarp=(fmaps is not None or use_syn), multiecho=multiecho, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=False) # get confounds bold_confounds_wf = init_bold_confs_wf( mem_gb=mem_gb['largemem'], metadata=metadata, regressors_all_comps=regressors_all_comps, regressors_fd_th=regressors_fd_th, regressors_dvars_th=regressors_dvars_th, name='bold_confounds_wf') bold_confounds_wf.get_node('inputnode').inputs.t1_transform_flags = [False] # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_bold_trans_wf = init_bold_preproc_trans_wf( mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_compression=not low_mem, use_fieldwarp=(fmaps is not None or use_syn), name='bold_bold_trans_wf') bold_bold_trans_wf.inputs.inputnode.name_source = ref_file # SLICE-TIME CORRECTION (or bypass) ############################################# if run_stc is True: # bool('TooShort') == True, so check True explicitly bold_stc_wf = init_bold_stc_wf(name='bold_stc_wf', metadata=metadata) workflow.connect([ (bold_reference_wf, bold_stc_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]), ]) if not multiecho: workflow.connect([(bold_reference_wf, bold_stc_wf, [ ('outputnode.bold_file', 'inputnode.bold_file') ])]) else: # for meepi, iterate through stc_wf for all workflows meepi_echos = boldbuffer.clone(name='meepi_echos') meepi_echos.iterables = ('bold_file', bold_file) workflow.connect([(meepi_echos, bold_stc_wf, [('bold_file', 'inputnode.bold_file')])]) elif not multiecho: # STC is too short or False # bypass STC from original BOLD to the splitter through boldbuffer workflow.connect([(bold_reference_wf, boldbuffer, [('outputnode.bold_file', 'bold_file')])]) else: # for meepi, iterate over all meepi echos to boldbuffer boldbuffer.iterables = ('bold_file', bold_file) # SDC (SUSCEPTIBILITY DISTORTION CORRECTION) or bypass ########################## bold_sdc_wf = init_sdc_wf(fmaps, metadata, omp_nthreads=omp_nthreads, debug=debug, fmap_demean=fmap_demean, fmap_bspline=fmap_bspline) # If no standard space is given, use the default for SyN-SDC if not volume_std_spaces or 'MNI152NLin2009cAsym' in volume_std_spaces: bold_sdc_wf.inputs.inputnode.template = 'MNI152NLin2009cAsym' else: bold_sdc_wf.inputs.inputnode.template = next(iter(volume_std_spaces)) if not fmaps: LOGGER.warning('SDC: no fieldmaps found or they were ignored (%s).', ref_file) elif fmaps[0]['suffix'] == 'syn': LOGGER.warning( 'SDC: no fieldmaps found or they were ignored. ' 'Using EXPERIMENTAL "fieldmap-less SyN" correction ' 'for dataset %s.', ref_file) else: LOGGER.log( 25, 'SDC: fieldmap estimation of type "%s" intended for %s found.', fmaps[0]['suffix'], ref_file) # Overwrite ``out_path_base`` of sdcflows' DataSinks for node in bold_sdc_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): bold_sdc_wf.get_node(node).interface.out_path_base = 'fmriprep' # MULTI-ECHO EPI DATA ############################################# if multiecho: from .util import init_skullstrip_bold_wf skullstrip_bold_wf = init_skullstrip_bold_wf(name='skullstrip_bold_wf') inputnode.inputs.bold_file = ref_file # Replace reference w first echo join_echos = pe.JoinNode( niu.IdentityInterface(fields=['bold_files']), joinsource=('meepi_echos' if run_stc is True else 'boldbuffer'), joinfield=['bold_files'], name='join_echos') # create optimal combination, adaptive T2* map bold_t2s_wf = init_bold_t2s_wf(echo_times=tes, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, t2s_coreg=t2s_coreg, name='bold_t2smap_wf') workflow.connect([ (skullstrip_bold_wf, join_echos, [('outputnode.skull_stripped_file', 'bold_files')]), (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]), ]) # MAIN WORKFLOW STRUCTURE ####################################################### workflow.connect([ # Generate early reference (inputnode, bold_reference_wf, [('bold_file', 'inputnode.bold_file')]), # BOLD buffer has slice-time corrected if it was run, original otherwise (boldbuffer, bold_split, [('bold_file', 'in_file')]), # HMC (bold_reference_wf, bold_hmc_wf, [('outputnode.raw_ref_image', 'inputnode.raw_ref_image'), ('outputnode.bold_file', 'inputnode.bold_file')]), (bold_reference_wf, summary, [('outputnode.algo_dummy_scans', 'algo_dummy_scans')]), # EPI-T1 registration workflow ( inputnode, bold_reg_wf, [ ('t1_brain', 'inputnode.t1_brain'), ('t1_seg', 'inputnode.t1_seg'), # Undefined if --no-freesurfer, but this is safe ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1_2_fsnative_reverse_transform', 'inputnode.t1_2_fsnative_reverse_transform') ]), (inputnode, bold_t1_trans_wf, [('bold_file', 'inputnode.name_source'), ('t1_brain', 'inputnode.t1_brain'), ('t1_mask', 'inputnode.t1_mask'), ('t1_aseg', 'inputnode.t1_aseg'), ('t1_aparc', 'inputnode.t1_aparc')]), # unused if multiecho, but this is safe (bold_hmc_wf, bold_t1_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_t1_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_t1_trans_wf, outputnode, [('outputnode.bold_t1', 'bold_t1'), ('outputnode.bold_t1_ref', 'bold_t1_ref'), ('outputnode.bold_aseg_t1', 'bold_aseg_t1'), ('outputnode.bold_aparc_t1', 'bold_aparc_t1')]), (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]), # SDC (or pass-through workflow) (inputnode, bold_sdc_wf, [('joint_template', 'inputnode.templates'), ('joint_std2anat_xfm', 'inputnode.std2anat_xfm')]), (inputnode, bold_sdc_wf, [('t1_brain', 'inputnode.t1_brain')]), (bold_reference_wf, bold_sdc_wf, [('outputnode.ref_image', 'inputnode.bold_ref'), ('outputnode.ref_image_brain', 'inputnode.bold_ref_brain'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), # For t2s_coreg, replace EPI-to-T1w registration inputs (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_reg_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain')]), (bold_sdc_wf if not t2s_coreg else bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold_ref_brain', 'inputnode.ref_bold_brain'), ('outputnode.bold_mask', 'inputnode.ref_bold_mask')]), (bold_sdc_wf, bold_t1_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_sdc_wf, bold_bold_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, summary, [('outputnode.method', 'distortion_correction') ]), # Connect bold_confounds_wf (inputnode, bold_confounds_wf, [('t1_tpms', 'inputnode.t1_tpms'), ('t1_mask', 'inputnode.t1_mask')]), (bold_hmc_wf, bold_confounds_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reg_wf, bold_confounds_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_reference_wf, bold_confounds_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), # Connect bold_bold_trans_wf (bold_split, bold_bold_trans_wf, [('out_files', 'inputnode.bold_file')] ), (bold_hmc_wf, bold_bold_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), # Summary (outputnode, summary, [('confounds', 'confounds_file')]), ]) # for standard EPI data, pass along correct file if not multiecho: workflow.connect([ (inputnode, func_derivatives_wf, [('bold_file', 'inputnode.source_file')]), (bold_bold_trans_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_split, bold_t1_trans_wf, [('out_files', 'inputnode.bold_split')]), ]) else: # for meepi, create and use optimal combination workflow.connect([ # update name source for optimal combination (inputnode, func_derivatives_wf, [(('bold_file', combine_meepi_source), 'inputnode.source_file')]), (bold_bold_trans_wf, skullstrip_bold_wf, [('outputnode.bold', 'inputnode.in_file')]), (bold_t2s_wf, bold_confounds_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_t2s_wf, bold_t1_trans_wf, [('outputnode.bold', 'inputnode.bold_split')]), ]) if fmaps: from ..fieldmap.unwarp import init_fmap_unwarp_report_wf # Report on BOLD correction fmap_unwarp_report_wf = init_fmap_unwarp_report_wf() workflow.connect([ (inputnode, fmap_unwarp_report_wf, [('t1_seg', 'inputnode.in_seg') ]), (bold_reference_wf, fmap_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, fmap_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, fmap_unwarp_report_wf, [('outputnode.bold_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in fmap_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): fmap_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' if force_syn and fmaps[0]['suffix'] != 'syn': syn_unwarp_report_wf = init_fmap_unwarp_report_wf( name='syn_unwarp_report_wf', forcedsyn=True) workflow.connect([ (inputnode, syn_unwarp_report_wf, [('t1_seg', 'inputnode.in_seg')]), (bold_reference_wf, syn_unwarp_report_wf, [('outputnode.ref_image', 'inputnode.in_pre')]), (bold_reg_wf, syn_unwarp_report_wf, [('outputnode.itk_t1_to_bold', 'inputnode.in_xfm')]), (bold_sdc_wf, syn_unwarp_report_wf, [('outputnode.syn_bold_ref', 'inputnode.in_post')]), ]) # Overwrite ``out_path_base`` of unwarping DataSinks for node in syn_unwarp_report_wf.list_node_names(): if node.split('.')[-1].startswith('ds_'): syn_unwarp_report_wf.get_node( node).interface.out_path_base = 'fmriprep' # Map final BOLD mask into T1w space (if required) if 'T1w' in output_spaces or 'anat' in output_spaces: from niworkflows.interfaces.fixes import (FixHeaderApplyTransforms as ApplyTransforms) boldmask_to_t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel', float=True), name='boldmask_to_t1w', mem_gb=0.1) workflow.connect([ (bold_reg_wf, boldmask_to_t1w, [('outputnode.itk_bold_to_t1', 'transforms')]), (bold_t1_trans_wf, boldmask_to_t1w, [('outputnode.bold_mask_t1', 'reference_image')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, boldmask_to_t1w, [('outputnode.bold_mask', 'input_image')]), (boldmask_to_t1w, outputnode, [('output_image', 'bold_mask_t1')]), ]) if set(['func', 'run', 'bold', 'boldref', 'sbref']).intersection(output_spaces): workflow.connect([ (bold_bold_trans_wf, outputnode, [('outputnode.bold', 'bold_native')]), (bold_bold_trans_wf, func_derivatives_wf, [('outputnode.bold_ref', 'inputnode.bold_native_ref'), ('outputnode.bold_mask', 'inputnode.bold_mask_native')]), ]) if volume_std_spaces: # Apply transforms in 1 shot # Only use uncompressed output if AROMA is to be run bold_std_trans_wf = init_bold_std_trans_wf( freesurfer=freesurfer, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, standard_spaces=volume_std_spaces, name='bold_std_trans_wf', use_compression=not low_mem, use_fieldwarp=fmaps is not None, ) workflow.connect([ (inputnode, bold_std_trans_wf, [('joint_template', 'inputnode.templates'), ('joint_anat2std_xfm', 'inputnode.anat2std_xfm'), ('bold_file', 'inputnode.name_source'), ('t1_aseg', 'inputnode.bold_aseg'), ('t1_aparc', 'inputnode.bold_aparc')]), (bold_hmc_wf, bold_std_trans_wf, [('outputnode.xforms', 'inputnode.hmc_xforms')]), (bold_reg_wf, bold_std_trans_wf, [('outputnode.itk_bold_to_t1', 'inputnode.itk_bold_to_t1')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, bold_std_trans_wf, [('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_sdc_wf, bold_std_trans_wf, [('outputnode.out_warp', 'inputnode.fieldwarp')]), (bold_std_trans_wf, outputnode, [('outputnode.bold_std', 'bold_std'), ('outputnode.bold_std_ref', 'bold_std_ref'), ('outputnode.bold_mask_std', 'bold_mask_std')]), ]) if freesurfer: workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('poutputnode.bold_aseg_std', 'inputnode.bold_aseg_std'), ('poutputnode.bold_aparc_std', 'inputnode.bold_aparc_std'), ]), (bold_std_trans_wf, outputnode, [('outputnode.bold_aseg_std', 'bold_aseg_std'), ('outputnode.bold_aparc_std', 'bold_aparc_std')]), ]) if 'MNI152NLin2009cAsym' in std_spaces: carpetplot_wf = init_carpetplot_wf(standard_spaces=std_spaces, mem_gb=mem_gb['resampled'], metadata=metadata, name='carpetplot_wf') workflow.connect([ (inputnode, carpetplot_wf, [('joint_std2anat_xfm', 'inputnode.std2anat_xfm')]), (bold_bold_trans_wf if not multiecho else bold_t2s_wf, carpetplot_wf, [('outputnode.bold', 'inputnode.bold'), ('outputnode.bold_mask', 'inputnode.bold_mask')]), (bold_reg_wf, carpetplot_wf, [('outputnode.itk_t1_to_bold', 'inputnode.t1_bold_xform')]), (bold_confounds_wf, carpetplot_wf, [('outputnode.confounds_file', 'inputnode.confounds_file')]), ]) if not multiecho: workflow.connect([(bold_split, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) else: split_opt_comb = bold_split.clone(name='split_opt_comb') workflow.connect([(bold_t2s_wf, split_opt_comb, [('outputnode.bold', 'in_file')]), (split_opt_comb, bold_std_trans_wf, [('out_files', 'inputnode.bold_split')])]) # Artifacts resampled in MNI space can only be sinked if they # were actually generated. See #1348. # Uses the parameterized outputnode to generate all outputs workflow.connect([ (bold_std_trans_wf, func_derivatives_wf, [ ('poutputnode.templates', 'inputnode.template'), ('poutputnode.bold_std_ref', 'inputnode.bold_std_ref'), ('poutputnode.bold_std', 'inputnode.bold_std'), ('poutputnode.bold_mask_std', 'inputnode.bold_mask_std'), ]), ]) if use_aroma and 'MNI152NLin6Asym' in std_spaces: # ICA-AROMA workflow from .confounds import init_ica_aroma_wf ica_aroma_wf = init_ica_aroma_wf( metadata=metadata, mem_gb=mem_gb['resampled'], omp_nthreads=omp_nthreads, use_fieldwarp=fmaps is not None, err_on_aroma_warn=err_on_aroma_warn, aroma_melodic_dim=aroma_melodic_dim, name='ica_aroma_wf') join = pe.Node(niu.Function(output_names=["out_file"], function=_to_join), name='aroma_confounds') mrg_conf_metadata = pe.Node(niu.Merge(2), name='merge_confound_metadata', run_without_submitting=True) mrg_conf_metadata2 = pe.Node(DictMerge(), name='merge_confound_metadata2', run_without_submitting=True) workflow.disconnect([ (bold_confounds_wf, outputnode, [ ('outputnode.confounds_file', 'confounds'), ]), (bold_confounds_wf, outputnode, [ ('outputnode.confounds_metadata', 'confounds_metadata'), ]), ]) workflow.connect([ (bold_std_trans_wf, ica_aroma_wf, [('outputnode.bold_std', 'inputnode.bold_std'), ('outputnode.bold_mask_std', 'inputnode.bold_mask_std'), ('outputnode.templates', 'inputnode.templates')]), (inputnode, ica_aroma_wf, [('bold_file', 'inputnode.name_source')]), (bold_hmc_wf, ica_aroma_wf, [('outputnode.movpar_file', 'inputnode.movpar_file')]), (bold_reference_wf, ica_aroma_wf, [('outputnode.skip_vols', 'inputnode.skip_vols')]), (bold_confounds_wf, join, [('outputnode.confounds_file', 'in_file')]), (bold_confounds_wf, mrg_conf_metadata, [('outputnode.confounds_metadata', 'in1')]), (ica_aroma_wf, join, [('outputnode.aroma_confounds', 'join_file')]), (ica_aroma_wf, mrg_conf_metadata, [('outputnode.aroma_metadata', 'in2')]), (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), (ica_aroma_wf, outputnode, [('outputnode.aroma_noise_ics', 'aroma_noise_ics'), ('outputnode.melodic_mix', 'melodic_mix'), ('outputnode.nonaggr_denoised_file', 'nonaggr_denoised_file') ]), (join, outputnode, [('out_file', 'confounds')]), (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), ]) # SURFACES ################################################################################## surface_spaces = [ space for space in output_spaces.keys() if space.startswith('fs') ] if freesurfer and surface_spaces: LOGGER.log(25, 'Creating BOLD surface-sampling workflow.') bold_surf_wf = init_bold_surf_wf(mem_gb=mem_gb['resampled'], output_spaces=surface_spaces, medial_surface_nan=medial_surface_nan, name='bold_surf_wf') workflow.connect([ (inputnode, bold_surf_wf, [('t1_preproc', 'inputnode.t1_preproc'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform')]), (bold_t1_trans_wf, bold_surf_wf, [('outputnode.bold_t1', 'inputnode.source_file')]), (bold_surf_wf, outputnode, [('outputnode.surfaces', 'surfaces')]), ]) if cifti_output: from niworkflows.interfaces.utility import KeySelect bold_surf_wf.__desc__ += """\ *Grayordinates* files [@hcppipelines], which combine surface-sampled data and volume-sampled data, were also generated. """ select_std = pe.Node(KeySelect(fields=['bold_std']), name='select_std', run_without_submitting=True) select_std.inputs.key = 'MNI152NLin2009cAsym' gen_cifti = pe.MapNode(GenerateCifti(), iterfield=["surface_target", "gifti_files"], name="gen_cifti") gen_cifti.inputs.TR = metadata.get("RepetitionTime") gen_cifti.inputs.surface_target = list(cifti_spaces) workflow.connect([ (bold_std_trans_wf, select_std, [('outputnode.templates', 'keys'), ('outputnode.bold_std', 'bold_std')]), (bold_surf_wf, gen_cifti, [('outputnode.surfaces', 'gifti_files')]), (inputnode, gen_cifti, [('subjects_dir', 'subjects_dir')]), (select_std, gen_cifti, [('bold_std', 'bold_file')]), (gen_cifti, outputnode, [('out_file', 'bold_cifti'), ('variant', 'cifti_variant'), ('variant_key', 'cifti_variant_key') ]), ]) # REPORTING ############################################################ ds_report_summary = pe.Node(DerivativesDataSink(desc='summary', keep_dtype=True), name='ds_report_summary', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) ds_report_validation = pe.Node(DerivativesDataSink( base_directory=reportlets_dir, desc='validation', keep_dtype=True), name='ds_report_validation', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) workflow.connect([ (summary, ds_report_summary, [('out_report', 'in_file')]), (bold_reference_wf, ds_report_validation, [('outputnode.validation_report', 'in_file')]), ]) # Fill-in datasinks of reportlets seen so far for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_report'): workflow.get_node(node).inputs.base_directory = reportlets_dir workflow.get_node(node).inputs.source_file = ref_file return workflow
def init_bold_reference_wf(omp_nthreads, bold_file=None, pre_mask=False, name='bold_reference_wf', gen_report=False): """ This workflow generates reference BOLD images for a series The raw reference image is the target of :abbr:`HMC (head motion correction)`, and a contrast-enhanced reference is the subject of distortion correction, as well as boundary-based registration to T1w and template spaces. .. workflow:: :graph2use: orig :simple_form: yes from fmriprep.workflows.bold import init_bold_reference_wf wf = init_bold_reference_wf(omp_nthreads=1) **Parameters** bold_file : str BOLD series NIfTI file omp_nthreads : int Maximum number of threads an individual process may use name : str Name of workflow (default: ``bold_reference_wf``) gen_report : bool Whether a mask report node should be appended in the end enhance_t2 : bool Perform logarithmic transform of input BOLD image to improve contrast before calculating the preliminary mask **Inputs** bold_file BOLD series NIfTI file bold_mask : bool A tentative brain mask to initialize the workflow (requires ``pre_mask`` parameter set ``True``). **Outputs** bold_file Validated BOLD series NIfTI file raw_ref_image Reference image to which BOLD series is motion corrected skip_vols Number of non-steady-state volumes detected at beginning of ``bold_file`` ref_image Contrast-enhanced reference image ref_image_brain Skull-stripped reference image bold_mask Skull-stripping mask of reference image validation_report HTML reportlet indicating whether ``bold_file`` had a valid affine **Subworkflows** * :py:func:`~fmriprep.workflows.bold.util.init_enhance_and_skullstrip_wf` """ workflow = Workflow(name=name) workflow.__desc__ = """\ First, a reference volume and its skull-stripped version were generated using a custom methodology of *fMRIPrep*. """ inputnode = pe.Node(niu.IdentityInterface(fields=['bold_file', 'sbref_file', 'bold_mask']), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['bold_file', 'raw_ref_image', 'skip_vols', 'ref_image', 'ref_image_brain', 'bold_mask', 'validation_report', 'mask_report']), name='outputnode') # Simplify manually setting input image if bold_file is not None: inputnode.inputs.bold_file = bold_file validate = pe.Node(ValidateImage(), name='validate', mem_gb=DEFAULT_MEMORY_MIN_GB) gen_ref = pe.Node(EstimateReferenceImage(), name="gen_ref", mem_gb=1) # OE: 128x128x128x50 * 64 / 8 ~ 900MB. # Re-run validation; no effect if no sbref; otherwise apply same validation to sbref as bold validate_ref = pe.Node(ValidateImage(), name='validate_ref', mem_gb=DEFAULT_MEMORY_MIN_GB) enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf( omp_nthreads=omp_nthreads, pre_mask=pre_mask) workflow.connect([ (inputnode, enhance_and_skullstrip_bold_wf, [('bold_mask', 'inputnode.pre_mask')]), (inputnode, validate, [('bold_file', 'in_file')]), (inputnode, gen_ref, [('sbref_file', 'sbref_file')]), (validate, gen_ref, [('out_file', 'in_file')]), (gen_ref, validate_ref, [('ref_image', 'in_file')]), (validate_ref, enhance_and_skullstrip_bold_wf, [('out_file', 'inputnode.in_file')]), (validate, outputnode, [('out_file', 'bold_file'), ('out_report', 'validation_report')]), (gen_ref, outputnode, [('n_volumes_to_discard', 'skip_vols')]), (validate_ref, outputnode, [('out_file', 'raw_ref_image')]), (enhance_and_skullstrip_bold_wf, outputnode, [ ('outputnode.bias_corrected_file', 'ref_image'), ('outputnode.mask_file', 'bold_mask'), ('outputnode.skull_stripped_file', 'ref_image_brain')]), ]) if gen_report: mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet') workflow.connect([ (enhance_and_skullstrip_bold_wf, mask_reportlet, [ ('outputnode.bias_corrected_file', 'background_file'), ('outputnode.mask_file', 'mask_file'), ]), ]) return workflow
def init_anat_preproc_wf( workdir=None, freesurfer=False, no_compose_transforms=False, skull_strip_algorithm="ants", name="anat_preproc_wf", ): """ modified from smriprep/workflows/anatomical.py """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=["t1w", "metadata"]), name="inputnode") buffernode = pe.Node( niu.IdentityInterface(fields=["t1w_brain", "t1w_mask"]), name="buffernode") outputnode = pe.Node( niu.IdentityInterface(fields=anat_preproc_wf_output_attrs, ), name="outputnode", ) skull_strip_template = Reference.from_string( config.workflow.skull_strip_template)[0] # Step 1 anat_validate = pe.Node(ValidateImage(), name="anat_validate", run_without_submitting=True) if skull_strip_algorithm == "none": brain_extraction_wf = init_n4_only_wf( omp_nthreads=config.nipype.omp_nthreads, atropos_use_random_seed=not config.workflow.skull_strip_fixed_seed, ) elif skull_strip_algorithm == "ants": brain_extraction_wf = init_brain_extraction_wf( in_template=skull_strip_template.space, template_spec=skull_strip_template.spec, atropos_use_random_seed=not config.workflow.skull_strip_fixed_seed, omp_nthreads=config.nipype.omp_nthreads, normalization_quality="precise", ) else: raise ValueError( f'Unknown skull_strip_algorithm "{skull_strip_algorithm}"') workflow.connect([ (inputnode, anat_validate, [("t1w", "in_file")]), (anat_validate, brain_extraction_wf, [("out_file", "inputnode.in_files")]), ( brain_extraction_wf, outputnode, [("outputnode.bias_corrected", "t1w_preproc")], ), ( brain_extraction_wf, buffernode, [ (("outputnode.out_file", first), "t1w_brain"), ("outputnode.out_mask", "t1w_mask"), ], ), ( buffernode, outputnode, [("t1w_brain", "t1w_brain"), ("t1w_mask", "t1w_mask")], ), ]) # Step 2 t1w_dseg = pe.Node( fsl.FAST(segments=True, no_bias=True, probability_maps=True), name="t1w_dseg", mem_gb=3, ) workflow.connect([ (buffernode, t1w_dseg, [("t1w_brain", "in_files")]), ( t1w_dseg, outputnode, [("tissue_class_map", "t1w_dseg"), ("probability_maps", "t1w_tpms")], ), ]) # Step 3 anat_norm_wf = init_anat_norm_wf( debug=config.execution.debug, omp_nthreads=config.nipype.omp_nthreads, templates=norm_templates if not no_compose_transforms else norm_templates + extra_templates, ) workflow.connect([ ( inputnode, anat_norm_wf, [("t1w", "inputnode.orig_t1w")], ), ( brain_extraction_wf, anat_norm_wf, [(("outputnode.bias_corrected", first), "inputnode.moving_image")], ), (buffernode, anat_norm_wf, [("t1w_mask", "inputnode.moving_mask")]), ( t1w_dseg, anat_norm_wf, [("tissue_class_map", "inputnode.moving_segmentation")], ), (t1w_dseg, anat_norm_wf, [("probability_maps", "inputnode.moving_tpms") ]), ]) # Write outputs anat_reports_wf = init_anat_reports_wf(freesurfer=freesurfer, output_dir="/") workflow.connect([ ( outputnode, anat_reports_wf, [ ("t1w_preproc", "inputnode.t1w_preproc"), ("t1w_mask", "inputnode.t1w_mask"), ("t1w_dseg", "inputnode.t1w_dseg"), ], ), (inputnode, anat_reports_wf, [("t1w", "inputnode.source_file")]), ( anat_norm_wf, anat_reports_wf, [ ("poutputnode.template", "inputnode.template"), ("poutputnode.standardized", "inputnode.std_t1w"), ("poutputnode.std_mask", "inputnode.std_mask"), ], ), ]) # Custom add_templates_by_composing_transforms( workflow, templates=extra_templates if not no_compose_transforms else []) make_reportnode(workflow, spaces=True) assert workdir is not None make_reportnode_datasink(workflow, workdir) if freesurfer: def get_subject(dic): return dic.get("subject") # 5. Surface reconstruction (--fs-no-reconall not set) surface_recon_wf = init_surface_recon_wf( name="surface_recon_wf", omp_nthreads=config.nipype.omp_nthreads, hires=config.workflow.hires, ) subjects_dir = Path(workdir) / "subjects_dir" subjects_dir.mkdir(parents=True, exist_ok=True) surface_recon_wf.get_node("inputnode").inputs.subjects_dir = str( subjects_dir) workflow.connect([ ( inputnode, surface_recon_wf, [(("metadata", get_subject), "inputnode.subject_id")], ), (anat_validate, surface_recon_wf, [("out_file", "inputnode.t1w")]), ( brain_extraction_wf, surface_recon_wf, [ (("outputnode.out_file", first), "inputnode.skullstripped_t1"), ("outputnode.out_segm", "inputnode.ants_segs"), (("outputnode.bias_corrected", first), "inputnode.corrected_t1"), ], ), ( surface_recon_wf, anat_reports_wf, [ ("outputnode.subject_id", "inputnode.subject_id"), ("outputnode.subjects_dir", "inputnode.subjects_dir"), ], ), ]) return workflow
def init_anat_preproc_wf( *, bids_root, freesurfer, hires, longitudinal, t1w, omp_nthreads, output_dir, skull_strip_mode, skull_strip_template, spaces, debug=False, existing_derivatives=None, name='anat_preproc_wf', skull_strip_fixed_seed=False, ): """ Stage the anatomical preprocessing steps of *sMRIPrep*. This includes: - T1w reference: realigning and then averaging T1w images. - Brain extraction and INU (bias field) correction. - Brain tissue segmentation. - Spatial normalization to standard spaces. - Surface reconstruction with FreeSurfer_. .. include:: ../links.rst Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from niworkflows.utils.spaces import SpatialReferences, Reference from smriprep.workflows.anatomical import init_anat_preproc_wf wf = init_anat_preproc_wf( bids_root='.', freesurfer=True, hires=True, longitudinal=False, t1w=['t1w.nii.gz'], omp_nthreads=1, output_dir='.', skull_strip_mode='force', skull_strip_template=Reference('OASIS30ANTs'), spaces=SpatialReferences(spaces=['MNI152NLin2009cAsym', 'fsaverage5']), ) Parameters ---------- bids_root : :obj:`str` Path of the input BIDS dataset root existing_derivatives : :obj:`dict` or None Dictionary mapping output specification attribute names and paths to corresponding derivatives. freesurfer : :obj:`bool` Enable FreeSurfer surface reconstruction (increases runtime by 6h, at the very least) hires : :obj:`bool` Enable sub-millimeter preprocessing in FreeSurfer longitudinal : :obj:`bool` Create unbiased structural template, regardless of number of inputs (may increase runtime) t1w : :obj:`list` List of T1-weighted structural images. omp_nthreads : :obj:`int` Maximum number of threads an individual process may use output_dir : :obj:`str` Directory in which to save derivatives skull_strip_template : :py:class:`~niworkflows.utils.spaces.Reference` Spatial reference to use in atlas-based brain extraction. spaces : :py:class:`~niworkflows.utils.spaces.SpatialReferences` Object containing standard and nonstandard space specifications. debug : :obj:`bool` Enable debugging outputs name : :obj:`str`, optional Workflow name (default: anat_preproc_wf) skull_strip_mode : :obj:`str` Determiner for T1-weighted skull stripping (`force` ensures skull stripping, `skip` ignores skull stripping, and `auto` automatically ignores skull stripping if pre-stripped brains are detected). skull_strip_fixed_seed : :obj:`bool` Do not use a random seed for skull-stripping - will ensure run-to-run replicability when used with --omp-nthreads 1 (default: ``False``). Inputs ------ t1w List of T1-weighted structural images t2w List of T2-weighted structural images roi A mask to exclude regions during standardization flair List of FLAIR images subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID Outputs ------- t1w_preproc The T1w reference map, which is calculated as the average of bias-corrected and preprocessed T1w images, defining the anatomical space. t1w_brain Skull-stripped ``t1w_preproc`` t1w_mask Brain (binary) mask estimated by brain extraction. t1w_dseg Brain tissue segmentation of the preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF). t1w_tpms List of tissue probability maps corresponding to ``t1w_dseg``. std_preproc T1w reference resampled in one or more standard spaces. std_mask Mask of skull-stripped template, in MNI space std_dseg Segmentation, resampled into MNI space std_tpms List of tissue probability maps in MNI space subjects_dir FreeSurfer SUBJECTS_DIR anat2std_xfm Nonlinear spatial transform to resample imaging data given in anatomical space into standard space. std2anat_xfm Inverse transform of the above. subject_id FreeSurfer subject ID t1w2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2t1w_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces (gray/white boundary, midthickness, pial, inflated) See Also -------- * :py:func:`~niworkflows.anat.ants.init_brain_extraction_wf` * :py:func:`~smriprep.workflows.surfaces.init_surface_recon_wf` """ workflow = Workflow(name=name) num_t1w = len(t1w) desc = """Anatomical data preprocessing : """ desc += """\ A total of {num_t1w} T1-weighted (T1w) images were found within the input BIDS dataset.""".format(num_t1w=num_t1w) inputnode = pe.Node(niu.IdentityInterface( fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['template', 'subjects_dir', 'subject_id'] + get_outputnode_spec()), name='outputnode') # Connect reportlets workflows anat_reports_wf = init_anat_reports_wf( freesurfer=freesurfer, output_dir=output_dir, ) workflow.connect([ (outputnode, anat_reports_wf, [('t1w_preproc', 'inputnode.t1w_preproc'), ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_dseg', 'inputnode.t1w_dseg')]), ]) if existing_derivatives is not None: LOGGER.log( 25, "Anatomical workflow will reuse prior derivatives found in the " "output folder (%s).", output_dir) desc += """ Anatomical preprocessing was reused from previously existing derivative objects.\n""" workflow.__desc__ = desc templates = existing_derivatives.pop('template') templatesource = pe.Node(niu.IdentityInterface(fields=['template']), name='templatesource') templatesource.iterables = [('template', templates)] outputnode.inputs.template = templates for field, value in existing_derivatives.items(): setattr(outputnode.inputs, field, value) anat_reports_wf.inputs.inputnode.source_file = fix_multi_T1w_source_name( [existing_derivatives['t1w_preproc']]) stdselect = pe.Node(KeySelect(fields=['std_preproc', 'std_mask'], keys=templates), name='stdselect', run_without_submitting=True) workflow.connect([ (inputnode, outputnode, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (inputnode, anat_reports_wf, [('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (templatesource, stdselect, [('template', 'key')]), (outputnode, stdselect, [('std_preproc', 'std_preproc'), ('std_mask', 'std_mask')]), (stdselect, anat_reports_wf, [ ('key', 'inputnode.template'), ('std_preproc', 'inputnode.std_t1w'), ('std_mask', 'inputnode.std_mask'), ]), ]) return workflow # The workflow is not cached. desc += """ All of them were corrected for intensity non-uniformity (INU) """ if num_t1w > 1 else """\ The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) """ desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += '.\n' if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" desc += """\ The T1w-reference was then skull-stripped with a *Nipype* implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using `fast` [FSL {fsl_ver}, RRID:SCR_002823, @fsl_fast]. """ workflow.__desc__ = desc.format( ants_ver=ANTsInfo.version() or '(version unknown)', fsl_ver=fsl.FAST().version or '(version unknown)', num_t1w=num_t1w, skullstrip_tpl=skull_strip_template.fullname, ) buffernode = pe.Node( niu.IdentityInterface(fields=['t1w_brain', 't1w_mask']), name='buffernode') # 1. Anatomical reference generation - average input T1w images. anat_template_wf = init_anat_template_wf(longitudinal=longitudinal, omp_nthreads=omp_nthreads, num_t1w=num_t1w) anat_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) # 2. Brain-extraction and INU (bias field) correction. if skull_strip_mode == 'auto': import numpy as np import nibabel as nb def _is_skull_stripped(imgs): """Check if T1w images are skull-stripped.""" def _check_img(img): data = np.abs(nb.load(img).get_fdata(dtype=np.float32)) sidevals = data[0, :, :].sum() + data[-1, :, :].sum() + \ data[:, 0, :].sum() + data[:, -1, :].sum() + \ data[:, :, 0].sum() + data[:, :, -1].sum() return sidevals < 10 return all(_check_img(img) for img in imgs) skull_strip_mode = _is_skull_stripped(t1w) if skull_strip_mode in (True, 'skip'): brain_extraction_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, atropos_use_random_seed=not skull_strip_fixed_seed, ) else: brain_extraction_wf = init_brain_extraction_wf( in_template=skull_strip_template.space, template_spec=skull_strip_template.spec, atropos_use_random_seed=not skull_strip_fixed_seed, omp_nthreads=omp_nthreads, normalization_quality='precise' if not debug else 'testing') # 4. Spatial normalization anat_norm_wf = init_anat_norm_wf( debug=debug, omp_nthreads=omp_nthreads, templates=spaces.get_spaces(nonstandard=False, dim=(3, )), ) workflow.connect([ # Step 1. (inputnode, anat_template_wf, [('t1w', 'inputnode.t1w')]), (anat_template_wf, anat_validate, [('outputnode.t1w_ref', 'in_file')]), (anat_validate, brain_extraction_wf, [('out_file', 'inputnode.in_files')]), (brain_extraction_wf, outputnode, [(('outputnode.bias_corrected', _pop), 't1w_preproc')]), (anat_template_wf, outputnode, [('outputnode.t1w_realign_xfm', 't1w_ref_xfms')]), (buffernode, outputnode, [('t1w_brain', 't1w_brain'), ('t1w_mask', 't1w_mask')]), # Steps 2, 3 and 4 (inputnode, anat_norm_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.orig_t1w'), ('roi', 'inputnode.lesion_mask')]), (brain_extraction_wf, anat_norm_wf, [(('outputnode.bias_corrected', _pop), 'inputnode.moving_image')]), (buffernode, anat_norm_wf, [('t1w_mask', 'inputnode.moving_mask')]), (anat_norm_wf, outputnode, [ ('poutputnode.standardized', 'std_preproc'), ('poutputnode.std_mask', 'std_mask'), ('poutputnode.std_dseg', 'std_dseg'), ('poutputnode.std_tpms', 'std_tpms'), ('outputnode.template', 'template'), ('outputnode.anat2std_xfm', 'anat2std_xfm'), ('outputnode.std2anat_xfm', 'std2anat_xfm'), ]), ]) # Change LookUp Table - BIDS wants: 0 (bg), 1 (gm), 2 (wm), 3 (csf) lut_t1w_dseg = pe.Node(niu.Function(function=_apply_bids_lut), name='lut_t1w_dseg') workflow.connect([ (lut_t1w_dseg, anat_norm_wf, [('out', 'inputnode.moving_segmentation') ]), (lut_t1w_dseg, outputnode, [('out', 't1w_dseg')]), ]) # Connect reportlets workflow.connect([ (inputnode, anat_reports_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.source_file')]), (outputnode, anat_reports_wf, [ ('std_preproc', 'inputnode.std_t1w'), ('std_mask', 'inputnode.std_mask'), ]), (anat_template_wf, anat_reports_wf, [('outputnode.out_report', 'inputnode.t1w_conform_report')]), (anat_norm_wf, anat_reports_wf, [('poutputnode.template', 'inputnode.template')]), ]) # Write outputs ############################################3 anat_derivatives_wf = init_anat_derivatives_wf( bids_root=bids_root, freesurfer=freesurfer, num_t1w=num_t1w, output_dir=output_dir, ) workflow.connect([ # Connect derivatives (anat_template_wf, anat_derivatives_wf, [('outputnode.t1w_valid_list', 'inputnode.source_files')]), (anat_norm_wf, anat_derivatives_wf, [('poutputnode.template', 'inputnode.template'), ('poutputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), ('poutputnode.std2anat_xfm', 'inputnode.std2anat_xfm')]), (outputnode, anat_derivatives_wf, [ ('std_preproc', 'inputnode.std_t1w'), ('t1w_ref_xfms', 'inputnode.t1w_ref_xfms'), ('t1w_preproc', 'inputnode.t1w_preproc'), ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_dseg', 'inputnode.t1w_dseg'), ('t1w_tpms', 'inputnode.t1w_tpms'), ('std_mask', 'inputnode.std_mask'), ('std_dseg', 'inputnode.std_dseg'), ('std_tpms', 'inputnode.std_tpms'), ]), ]) if not freesurfer: # Flag --fs-no-reconall is set - return # Brain tissue segmentation - FAST produces: 0 (bg), 1 (wm), 2 (csf), 3 (gm) t1w_dseg = pe.Node(fsl.FAST(segments=True, no_bias=True, probability_maps=True), name='t1w_dseg', mem_gb=3) lut_t1w_dseg.inputs.lut = (0, 3, 1, 2 ) # Maps: 0 -> 0, 3 -> 1, 1 -> 2, 2 -> 3. fast2bids = pe.Node(niu.Function(function=_probseg_fast2bids), name="fast2bids", run_without_submitting=True) workflow.connect([ (brain_extraction_wf, buffernode, [(('outputnode.out_file', _pop), 't1w_brain'), ('outputnode.out_mask', 't1w_mask')]), (buffernode, t1w_dseg, [('t1w_brain', 'in_files')]), (t1w_dseg, lut_t1w_dseg, [('partial_volume_map', 'in_dseg')]), (t1w_dseg, fast2bids, [('partial_volume_files', 'inlist')]), (fast2bids, anat_norm_wf, [('out', 'inputnode.moving_tpms')]), (fast2bids, outputnode, [('out', 't1w_tpms')]), ]) return workflow # Map FS' aseg labels onto three-tissue segmentation lut_t1w_dseg.inputs.lut = _aseg_to_three() split_seg = pe.Node(niu.Function(function=_split_segments), name='split_seg') # check for older IsRunning files and remove accordingly fs_isrunning = pe.Node(niu.Function(function=_fs_isRunning), overwrite=True, name='fs_isrunning') fs_isrunning.inputs.logger = LOGGER # 5. Surface reconstruction (--fs-no-reconall not set) surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf', omp_nthreads=omp_nthreads, hires=hires) applyrefined = pe.Node(fsl.ApplyMask(), name='applyrefined') workflow.connect([ (inputnode, fs_isrunning, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (inputnode, surface_recon_wf, [('t2w', 'inputnode.t2w'), ('flair', 'inputnode.flair'), ('subject_id', 'inputnode.subject_id') ]), (fs_isrunning, surface_recon_wf, [('out', 'inputnode.subjects_dir')]), (anat_validate, surface_recon_wf, [('out_file', 'inputnode.t1w')]), (brain_extraction_wf, surface_recon_wf, [(('outputnode.out_file', _pop), 'inputnode.skullstripped_t1'), ('outputnode.out_segm', 'inputnode.ants_segs'), (('outputnode.bias_corrected', _pop), 'inputnode.corrected_t1')]), (brain_extraction_wf, applyrefined, [(('outputnode.bias_corrected', _pop), 'in_file')]), (surface_recon_wf, applyrefined, [('outputnode.out_brainmask', 'mask_file')]), (surface_recon_wf, lut_t1w_dseg, [('outputnode.out_aseg', 'in_dseg')]), (lut_t1w_dseg, split_seg, [('out', 'in_file')]), (split_seg, anat_norm_wf, [('out', 'inputnode.moving_tpms')]), (split_seg, outputnode, [('out', 't1w_tpms')]), (surface_recon_wf, outputnode, [('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.t1w2fsnative_xfm', 't1w2fsnative_xfm'), ('outputnode.fsnative2t1w_xfm', 'fsnative2t1w_xfm'), ('outputnode.surfaces', 'surfaces'), ('outputnode.out_aseg', 't1w_aseg'), ('outputnode.out_aparc', 't1w_aparc')]), (applyrefined, buffernode, [('out_file', 't1w_brain')]), (surface_recon_wf, buffernode, [('outputnode.out_brainmask', 't1w_mask')]), (surface_recon_wf, anat_reports_wf, [('outputnode.subject_id', 'inputnode.subject_id'), ('outputnode.subjects_dir', 'inputnode.subjects_dir')]), (surface_recon_wf, anat_derivatives_wf, [ ('outputnode.out_aseg', 'inputnode.t1w_fs_aseg'), ('outputnode.out_aparc', 'inputnode.t1w_fs_aparc'), ]), (outputnode, anat_derivatives_wf, [ ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), ('surfaces', 'inputnode.surfaces'), ]), ]) return workflow
def init_infant_brain_extraction_wf( age_months=None, ants_affine_init=False, bspline_fitting_distance=200, sloppy=False, skull_strip_template="UNCInfant", template_specs=None, interim_checkpoints=True, mem_gb=3.0, mri_scheme="T1w", name="infant_brain_extraction_wf", atropos_model=None, omp_nthreads=None, output_dir=None, use_float=True, use_t2w=False, ): """ Build an atlas-based brain extraction pipeline for infant T1w/T2w MRI data. Pros/Cons of available templates -------------------------------- * MNIInfant + More cohorts available for finer-grain control + T1w/T2w images available - Template masks are poor * UNCInfant + Accurate masks - No T2w image available Parameters ---------- ants_affine_init : :obj:`bool`, optional Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images. """ # handle template specifics template_specs = template_specs or {} if skull_strip_template == 'MNIInfant': template_specs['resolution'] = 2 if sloppy else 1 if not template_specs.get('cohort'): if age_months is None: raise KeyError( f"Age or cohort for {skull_strip_template} must be provided!") template_specs['cohort'] = cohort_by_months(skull_strip_template, age_months) inputnode = pe.Node( niu.IdentityInterface(fields=["t1w", "t2w", "in_mask"]), name="inputnode") outputnode = pe.Node(niu.IdentityInterface( fields=["t1w_corrected", "t1w_corrected_brain", "t1w_mask"]), name="outputnode") if not use_t2w: raise RuntimeError("A T2w image is currently required.") tpl_target_path = get_template( skull_strip_template, suffix='T1w', # no T2w template desc=None, **template_specs, ) if not tpl_target_path: raise RuntimeError( f"An instance of template <tpl-{skull_strip_template}> with MR scheme " f"'{'T1w' or mri_scheme}' could not be found.") tpl_brainmask_path = get_template(skull_strip_template, label="brain", suffix="probseg", **template_specs) or get_template( skull_strip_template, desc="brain", suffix="mask", **template_specs) tpl_regmask_path = get_template(skull_strip_template, label="BrainCerebellumExtraction", suffix="mask", **template_specs) # validate images val_tmpl = pe.Node(ValidateImage(), name='val_tmpl') val_t1w = val_tmpl.clone("val_t1w") val_t2w = val_tmpl.clone("val_t2w") val_tmpl.inputs.in_file = _pop(tpl_target_path) gauss_tmpl = pe.Node(niu.Function(function=_gauss_filter), name="gauss_tmpl") # Spatial normalization step lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"), name="lap_tmpl") lap_t1w = lap_tmpl.clone("lap_t1w") lap_t2w = lap_tmpl.clone("lap_t2w") # Merge image nodes mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl") mrg_t2w = mrg_tmpl.clone("mrg_t2w") mrg_t1w = mrg_tmpl.clone("mrg_t1w") norm_lap_tmpl = pe.Node(niu.Function(function=_trunc), name="norm_lap_tmpl") norm_lap_tmpl.inputs.dtype = "float32" norm_lap_tmpl.inputs.out_max = 1.0 norm_lap_tmpl.inputs.percentile = (0.01, 99.99) norm_lap_tmpl.inputs.clip_max = None norm_lap_t1w = norm_lap_tmpl.clone('norm_lap_t1w') norm_lap_t2w = norm_lap_t1w.clone('norm_lap_t2w') # Set up initial spatial normalization ants_params = "testing" if sloppy else "precise" norm = pe.Node( Registration(from_file=pkgr_fn( "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")), name="norm", n_procs=omp_nthreads, mem_gb=mem_gb, ) norm.inputs.float = use_float if tpl_regmask_path: norm.inputs.fixed_image_masks = tpl_regmask_path # Set up T2w -> T1w within-subject registration norm_subj = pe.Node( Registration( from_file=pkgr_fn("nibabies.data", "within_subject_t1t2.json")), name="norm_subj", n_procs=omp_nthreads, mem_gb=mem_gb, ) norm_subj.inputs.float = use_float # main workflow wf = pe.Workflow(name) # Create a buffer interface as a cache for the actual inputs to registration buffernode = pe.Node( niu.IdentityInterface(fields=["hires_target", "smooth_target"]), name="buffernode") # truncate target intensity for N4 correction clip_tmpl = pe.Node(niu.Function(function=_trunc), name="clip_tmpl") clip_t2w = clip_tmpl.clone('clip_t2w') clip_t1w = clip_tmpl.clone('clip_t1w') # INU correction of the t1w init_t2w_n4 = pe.Node( N4BiasFieldCorrection( dimension=3, save_bias=False, copy_header=True, n_iterations=[50] * (4 - sloppy), convergence_threshold=1e-7, shrink_factor=4, bspline_fitting_distance=bspline_fitting_distance, ), n_procs=omp_nthreads, name="init_t2w_n4", ) init_t1w_n4 = init_t2w_n4.clone("init_t1w_n4") clip_t2w_inu = pe.Node(niu.Function(function=_trunc), name="clip_t2w_inu") clip_t1w_inu = clip_t2w_inu.clone("clip_t1w_inu") map_mask_t2w = pe.Node(ApplyTransforms(interpolation="Gaussian", float=True), name="map_mask_t2w", mem_gb=1) map_mask_t1w = map_mask_t2w.clone("map_mask_t1w") # map template brainmask to t2w space map_mask_t2w.inputs.input_image = str(tpl_brainmask_path) thr_t2w_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_t2w_mask") thr_t1w_mask = thr_t2w_mask.clone('thr_t1w_mask') bspline_grid = pe.Node(niu.Function(function=_bspline_distance), name="bspline_grid") # Refine INU correction final_n4 = pe.Node( N4BiasFieldCorrection( dimension=3, bspline_fitting_distance=bspline_fitting_distance, save_bias=True, copy_header=True, n_iterations=[50] * 5, convergence_threshold=1e-7, rescale_intensities=True, shrink_factor=4, ), n_procs=omp_nthreads, name="final_n4", ) final_mask = pe.Node(ApplyMask(), name="final_mask") if atropos_model is None: atropos_model = tuple(ATROPOS_MODELS[mri_scheme].values()) atropos_wf = init_atropos_wf( use_random_seed=False, omp_nthreads=omp_nthreads, mem_gb=mem_gb, in_segmentation_model=atropos_model, ) # if tpl_regmask_path: # atropos_wf.get_node('inputnode').inputs.in_mask_dilated = tpl_regmask_path sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1), name='sel_wm', run_without_submitting=True) wf.connect([ # 1. massage template (val_tmpl, clip_tmpl, [("out_file", "in_file")]), (clip_tmpl, lap_tmpl, [("out", "op1")]), (clip_tmpl, mrg_tmpl, [("out", "in1")]), (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]), (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]), # 2. massage T2w (inputnode, val_t2w, [('t2w', 'in_file')]), (val_t2w, clip_t2w, [('out_file', 'in_file')]), (clip_t2w, init_t2w_n4, [('out', 'input_image')]), (init_t2w_n4, clip_t2w_inu, [("output_image", "in_file")]), (clip_t2w_inu, lap_t2w, [('out', 'op1')]), (clip_t2w_inu, mrg_t2w, [('out', 'in1')]), (lap_t2w, norm_lap_t2w, [("output_image", "in_file")]), (norm_lap_t2w, mrg_t2w, [("out", "in2")]), # 3. normalize T2w to target template (UNC) (mrg_t2w, norm, [("out", "moving_image")]), (mrg_tmpl, norm, [("out", "fixed_image")]), # 4. map template brainmask to T2w space (inputnode, map_mask_t2w, [('t2w', 'reference_image')]), (norm, map_mask_t2w, [("reverse_transforms", "transforms"), ("reverse_invert_flags", "invert_transform_flags")]), (map_mask_t2w, thr_t2w_mask, [("output_image", "in_file")]), # 5. massage T1w (inputnode, val_t1w, [("t1w", "in_file")]), (val_t1w, clip_t1w, [("out_file", "in_file")]), (clip_t1w, init_t1w_n4, [("out", "input_image")]), (init_t1w_n4, clip_t1w_inu, [("output_image", "in_file")]), (clip_t1w_inu, lap_t1w, [('out', 'op1')]), (clip_t1w_inu, mrg_t1w, [('out', 'in1')]), (lap_t1w, norm_lap_t1w, [("output_image", "in_file")]), (norm_lap_t1w, mrg_t1w, [("out", "in2")]), # 6. normalize within subject T1w to T2w (mrg_t1w, norm_subj, [("out", "moving_image")]), (mrg_t2w, norm_subj, [("out", "fixed_image")]), (thr_t2w_mask, norm_subj, [("out_mask", "fixed_image_mask")]), # 7. map mask to T1w space (thr_t2w_mask, map_mask_t1w, [("out_mask", "input_image")]), (inputnode, map_mask_t1w, [("t1w", "reference_image")]), (norm_subj, map_mask_t1w, [ ("reverse_transforms", "transforms"), ("reverse_invert_flags", "invert_transform_flags"), ]), (map_mask_t1w, thr_t1w_mask, [("output_image", "in_file")]), # 8. T1w INU (inputnode, final_n4, [("t1w", "input_image")]), (inputnode, bspline_grid, [("t1w", "in_file")]), (bspline_grid, final_n4, [("out", "args")]), (map_mask_t1w, final_n4, [("output_image", "weight_image")]), (final_n4, final_mask, [("output_image", "in_file")]), (thr_t1w_mask, final_mask, [("out_mask", "in_mask")]), # 9. Outputs (final_n4, outputnode, [("output_image", "t1w_corrected")]), (thr_t1w_mask, outputnode, [("out_mask", "t1w_mask")]), (final_mask, outputnode, [("out_file", "t1w_corrected_brain")]), ]) if ants_affine_init: ants_kwargs = dict( metric=("Mattes", 32, "Regular", 0.2), transform=("Affine", 0.1), search_factor=(20, 0.12), principal_axes=False, convergence=(10, 1e-6, 10), search_grid=(40, (0, 40, 40)), verbose=True, ) if ants_affine_init == 'random': ants_kwargs['metric'] = ("Mattes", 32, "Random", 0.2) if ants_affine_init == 'search': ants_kwargs['search_grid'] = (20, (20, 40, 40)) init_aff = pe.Node( AI(**ants_kwargs), name="init_aff", n_procs=omp_nthreads, ) if tpl_regmask_path: init_aff.inputs.fixed_image_mask = _pop(tpl_regmask_path) wf.connect([ (clip_tmpl, init_aff, [("out", "fixed_image")]), (clip_t2w_inu, init_aff, [("out", "moving_image")]), (init_aff, norm, [("output_transform", "initial_moving_transform") ]), ]) return wf
def init_dwi_reference_wf(mem_gb, omp_nthreads, name='dwi_reference_wf'): """ Build a workflow that generates a reference b0 image from a DWI dataset. To generate the reference *b0*, this workflow takes in a DWI dataset, extracts the b0s, registers them to each other, rescales the signal intensity values, and calculates a median image. Then, the reference *b0* and its skull-stripped version are generated using a custom methodology adapted from *niworkflows*. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes from dmriprep.workflows.dwi.util import init_dwi_reference_wf wf = init_dwi_reference_wf(mem_gb=0.01, omp_nthreads=1) wf.inputs.inputnode.b0_ixs=[0] Parameters ---------- omp_nthreads : int Maximum number of threads an individual process may use name : str Name of workflow (default: ``dwi_reference_wf``) Inputs ------ dwi_file dwi NIfTI file b0_ixs : list index of b0s in dwi NIfTI file Outputs ------- dwi_file Validated dwi NIfTI file raw_ref_image Reference image ref_image Contrast-enhanced reference image ref_image_brain Skull-stripped reference image dwi_mask Skull-stripping mask of reference image validation_report HTML reportlet indicating whether ``dwi_file`` had a valid affine See Also -------- * :py:func:`~dmriprep.workflows.dwi.util.init_enhance_and_skullstrip_wf` """ workflow = Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=['dwi_file', 'b0_ixs']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 'dwi_file', 'raw_ref_image', 'ref_image', 'ref_image_brain', 'dwi_mask', 'validation_report' ]), name='outputnode') validate = pe.Node(ValidateImage(), name='validate', mem_gb=mem_gb) extract_b0 = pe.Node(ExtractB0(), name='extract_b0') reg_b0 = pe.Node(fsl.MCFLIRT(ref_vol=0, interpolation='sinc'), name='reg_b0') pre_mask = pe.Node(afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='pre_mask') rescale_b0 = pe.Node(RescaleB0(), name='rescale_b0') enhance_and_skullstrip_dwi_wf = init_enhance_and_skullstrip_dwi_wf( omp_nthreads=omp_nthreads) workflow.connect([ (inputnode, validate, [('dwi_file', 'in_file')]), (validate, extract_b0, [('out_file', 'in_file')]), (inputnode, extract_b0, [('b0_ixs', 'b0_ixs')]), (extract_b0, reg_b0, [('out_file', 'in_file')]), (reg_b0, pre_mask, [('out_file', 'in_file')]), (reg_b0, rescale_b0, [('out_file', 'in_file')]), (pre_mask, rescale_b0, [('out_file', 'mask_file')]), (rescale_b0, enhance_and_skullstrip_dwi_wf, [('out_ref', 'inputnode.in_file')]), (pre_mask, enhance_and_skullstrip_dwi_wf, [('out_file', 'inputnode.pre_mask')]), (validate, outputnode, [('out_file', 'dwi_file'), ('out_report', 'validation_report')]), (rescale_b0, outputnode, [('out_ref', 'raw_ref_image')]), (enhance_and_skullstrip_dwi_wf, outputnode, [('outputnode.bias_corrected_file', 'ref_image'), ('outputnode.mask_file', 'dwi_mask'), ('outputnode.skull_stripped_file', 'ref_image_brain')]), ]) return workflow
def init_anat_preproc_wf(bids_root, freesurfer, hires, longitudinal, omp_nthreads, output_dir, output_spaces, num_t1w, reportlets_dir, skull_strip_template, debug=False, name='anat_preproc_wf', skull_strip_fixed_seed=False): """ This workflow controls the anatomical preprocessing stages of smriprep. This includes: - Creation of a structural template - Skull-stripping and bias correction - Tissue segmentation - Normalization - Surface reconstruction with FreeSurfer .. workflow:: :graph2use: orig :simple_form: yes from collections import OrderedDict from smriprep.workflows.anatomical import init_anat_preproc_wf wf = init_anat_preproc_wf( bids_root='.', freesurfer=True, hires=True, longitudinal=False, num_t1w=1, omp_nthreads=1, output_dir='.', output_spaces=OrderedDict([ ('MNI152NLin2009cAsym', {}), ('fsaverage5', {})]), reportlets_dir='.', skull_strip_template=('MNI152NLin2009cAsym', {}), ) **Parameters** bids_root : str Path of the input BIDS dataset root debug : bool Enable debugging outputs freesurfer : bool Enable FreeSurfer surface reconstruction (increases runtime by 6h, at the very least) output_spaces : list List of spatial normalization targets. Some parts of pipeline will only be instantiated for some output spaces. Valid spaces: - Any template identifier from TemplateFlow - Path to a template folder organized following TemplateFlow's conventions hires : bool Enable sub-millimeter preprocessing in FreeSurfer longitudinal : bool Create unbiased structural template, regardless of number of inputs (may increase runtime) name : str, optional Workflow name (default: anat_preproc_wf) omp_nthreads : int Maximum number of threads an individual process may use output_dir : str Directory in which to save derivatives reportlets_dir : str Directory in which to save reportlets skull_strip_fixed_seed : bool Do not use a random seed for skull-stripping - will ensure run-to-run replicability when used with --omp-nthreads 1 (default: ``False``). skull_strip_template : tuple Name of ANTs skull-stripping template and specifications. **Inputs** t1w List of T1-weighted structural images t2w List of T2-weighted structural images flair List of FLAIR images subjects_dir FreeSurfer SUBJECTS_DIR **Outputs** t1_preproc Bias-corrected structural template, defining T1w space t1_brain Skull-stripped ``t1_preproc`` t1_mask Mask of the skull-stripped template image t1_seg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1_tpms List of tissue probability maps in T1w space t1_2_tpl T1w template, normalized to MNI space t1_2_tpl_forward_transform ANTs-compatible affine-and-warp transform file t1_2_tpl_reverse_transform ANTs-compatible affine-and-warp transform file (inverse) tpl_mask Mask of skull-stripped template, in MNI space tpl_seg Segmentation, resampled into MNI space tpl_tpms List of tissue probability maps in MNI space subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1_2_fsnative_forward_transform LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space t1_2_fsnative_reverse_transform LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces (gray/white boundary, midthickness, pial, inflated) **Subworkflows** * :py:func:`~niworkflows.anat.ants.init_brain_extraction_wf` * :py:func:`~smriprep.workflows.surfaces.init_surface_recon_wf` """ workflow = Workflow(name=name) desc = """Anatomical data preprocessing : """ desc += """\ A total of {num_t1w} T1-weighted (T1w) images were found within the input BIDS dataset. All of them were corrected for intensity non-uniformity (INU) """ if num_t1w > 1 else """\ The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) """ desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += '.\n' if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" desc += """\ The T1w-reference was then skull-stripped with a *Nipype* implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using `fast` [FSL {fsl_ver}, RRID:SCR_002823, @fsl_fast]. """ workflow.__desc__ = desc.format( ants_ver=ANTsInfo.version() or '(version unknown)', fsl_ver=fsl.FAST().version or '(version unknown)', num_t1w=num_t1w, skullstrip_tpl=skull_strip_template[0], ) inputnode = pe.Node(niu.IdentityInterface( fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=[ 't1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 'template', 'warped', 'forward_transform', 'reverse_transform', 'joint_template', 'joint_forward_transform', 'joint_reverse_transform', 'tpl_mask', 'tpl_seg', 'tpl_tpms', 'template_transforms', 'subjects_dir', 'subject_id', 't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform', 'surfaces', 't1_aseg', 't1_aparc' ]), name='outputnode') buffernode = pe.Node(niu.IdentityInterface(fields=['t1_brain', 't1_mask']), name='buffernode') anat_template_wf = init_anat_template_wf(longitudinal=longitudinal, omp_nthreads=omp_nthreads, num_t1w=num_t1w) anat_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) # 3. Skull-stripping # Bias field correction is handled in skull strip workflows. brain_extraction_wf = init_brain_extraction_wf( in_template=skull_strip_template[0], template_spec=skull_strip_template[1], atropos_use_random_seed=not skull_strip_fixed_seed, omp_nthreads=omp_nthreads, normalization_quality='precise' if not debug else 'testing') workflow.connect([ (inputnode, anat_template_wf, [('t1w', 'inputnode.t1w')]), (anat_template_wf, anat_validate, [('outputnode.t1_template', 'in_file')]), (anat_validate, brain_extraction_wf, [('out_file', 'inputnode.in_files')]), (brain_extraction_wf, outputnode, [('outputnode.bias_corrected', 't1_preproc')]), (anat_template_wf, outputnode, [('outputnode.template_transforms', 't1_template_transforms')]), (buffernode, outputnode, [('t1_brain', 't1_brain'), ('t1_mask', 't1_mask')]), ]) # 4. Surface reconstruction if freesurfer: surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf', omp_nthreads=omp_nthreads, hires=hires) applyrefined = pe.Node(fsl.ApplyMask(), name='applyrefined') workflow.connect([ (inputnode, surface_recon_wf, [('t2w', 'inputnode.t2w'), ('flair', 'inputnode.flair'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (anat_validate, surface_recon_wf, [('out_file', 'inputnode.t1w')]), (brain_extraction_wf, surface_recon_wf, [ (('outputnode.out_file', _pop), 'inputnode.skullstripped_t1'), ('outputnode.out_segm', 'inputnode.ants_segs'), (('outputnode.bias_corrected', _pop), 'inputnode.corrected_t1') ]), (brain_extraction_wf, applyrefined, [(('outputnode.bias_corrected', _pop), 'in_file')]), (surface_recon_wf, applyrefined, [('outputnode.out_brainmask', 'mask_file')]), (surface_recon_wf, outputnode, [('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.t1_2_fsnative_forward_transform', 't1_2_fsnative_forward_transform'), ('outputnode.t1_2_fsnative_reverse_transform', 't1_2_fsnative_reverse_transform'), ('outputnode.surfaces', 'surfaces'), ('outputnode.out_aseg', 't1_aseg'), ('outputnode.out_aparc', 't1_aparc')]), (applyrefined, buffernode, [('out_file', 't1_brain')]), (surface_recon_wf, buffernode, [('outputnode.out_brainmask', 't1_mask')]), ]) else: workflow.connect([ (brain_extraction_wf, buffernode, [(('outputnode.out_file', _pop), 't1_brain'), ('outputnode.out_mask', 't1_mask')]), ]) # 5. Segmentation t1_seg = pe.Node(fsl.FAST(segments=True, no_bias=True, probability_maps=True), name='t1_seg', mem_gb=3) workflow.connect([ (buffernode, t1_seg, [('t1_brain', 'in_files')]), (t1_seg, outputnode, [('tissue_class_map', 't1_seg'), ('probability_maps', 't1_tpms')]), ]) seg_rpt = pe.Node(ROIsPlot(colors=['magenta', 'b'], levels=[1.5, 2.5]), name='seg_rpt') vol_spaces = [k for k in output_spaces.keys() if not k.startswith('fs')] # 6. Spatial normalization anat_norm_wf = init_anat_norm_wf( debug=debug, omp_nthreads=omp_nthreads, reportlets_dir=reportlets_dir, template_list=vol_spaces, template_specs=[output_spaces[k] for k in vol_spaces]) workflow.connect([ (inputnode, anat_norm_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.orig_t1w'), ('roi', 'inputnode.lesion_mask')]), (brain_extraction_wf, anat_norm_wf, [(('outputnode.bias_corrected', _pop), 'inputnode.moving_image')]), (buffernode, anat_norm_wf, [('t1_mask', 'inputnode.moving_mask')]), (t1_seg, anat_norm_wf, [('tissue_class_map', 'inputnode.moving_segmentation')]), (t1_seg, anat_norm_wf, [('probability_maps', 'inputnode.moving_tpms') ]), (anat_norm_wf, outputnode, [ ('poutputnode.warped', 'warped'), ('poutputnode.template', 'template'), ('poutputnode.forward_transform', 'forward_transform'), ('poutputnode.reverse_transform', 'reverse_transform'), ('poutputnode.tpl_mask', 'tpl_mask'), ('poutputnode.tpl_seg', 'tpl_seg'), ('poutputnode.tpl_tpms', 'tpl_tpms'), ('outputnode.template', 'joint_template'), ('outputnode.forward_transform', 'joint_forward_transform'), ('outputnode.reverse_transform', 'joint_reverse_transform'), ]), ]) anat_reports_wf = init_anat_reports_wf(reportlets_dir=reportlets_dir, freesurfer=freesurfer) workflow.connect([ (inputnode, anat_reports_wf, [(('t1w', fix_multi_T1w_source_name), 'inputnode.source_file')]), (anat_template_wf, anat_reports_wf, [('outputnode.out_report', 'inputnode.t1_conform_report')]), (anat_template_wf, seg_rpt, [('outputnode.t1_template', 'in_file')]), (t1_seg, seg_rpt, [('tissue_class_map', 'in_rois')]), (outputnode, seg_rpt, [('t1_mask', 'in_mask')]), (seg_rpt, anat_reports_wf, [('out_report', 'inputnode.seg_report')]), ]) if freesurfer: workflow.connect([ (surface_recon_wf, anat_reports_wf, [('outputnode.out_report', 'inputnode.recon_report')]), ]) anat_derivatives_wf = init_anat_derivatives_wf( bids_root=bids_root, freesurfer=freesurfer, output_dir=output_dir, ) workflow.connect([ (anat_template_wf, anat_derivatives_wf, [('outputnode.t1w_valid_list', 'inputnode.source_files')]), (anat_norm_wf, anat_derivatives_wf, [('poutputnode.template', 'inputnode.template')]), (outputnode, anat_derivatives_wf, [ ('warped', 'inputnode.t1_2_tpl'), ('forward_transform', 'inputnode.t1_2_tpl_forward_transform'), ('reverse_transform', 'inputnode.t1_2_tpl_reverse_transform'), ('t1_template_transforms', 'inputnode.t1_template_transforms'), ('t1_preproc', 'inputnode.t1_preproc'), ('t1_mask', 'inputnode.t1_mask'), ('t1_seg', 'inputnode.t1_seg'), ('t1_tpms', 'inputnode.t1_tpms'), ('tpl_mask', 'inputnode.tpl_mask'), ('tpl_seg', 'inputnode.tpl_seg'), ('tpl_tpms', 'inputnode.tpl_tpms'), ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform'), ('surfaces', 'inputnode.surfaces'), ]), ]) if freesurfer: workflow.connect([ (surface_recon_wf, anat_derivatives_wf, [ ('outputnode.out_aseg', 'inputnode.t1_fs_aseg'), ('outputnode.out_aparc', 'inputnode.t1_fs_aparc'), ]), ]) return workflow
def init_infant_brain_extraction_wf( ants_affine_init=False, bspline_fitting_distance=200, debug=False, in_template="MNIInfant", template_specs=None, interim_checkpoints=True, mem_gb=3.0, mri_scheme="T2w", name="infant_brain_extraction_wf", atropos_model=None, omp_nthreads=None, output_dir=None, use_float=True, ): """ Build an atlas-based brain extraction pipeline for infant T2w MRI data. Parameters ---------- ants_affine_init : :obj:`bool`, optional Set-up a pre-initialization step with ``antsAI`` to account for mis-oriented images. """ inputnode = pe.Node(niu.IdentityInterface(fields=["in_files", "in_mask"]), name="inputnode") outputnode = pe.Node(niu.IdentityInterface( fields=["out_corrected", "out_brain", "out_mask"]), name="outputnode") template_specs = template_specs or {} # Find a suitable target template in TemplateFlow tpl_target_path = get_template(in_template, suffix=mri_scheme, **template_specs) if not tpl_target_path: raise RuntimeError( f"An instance of template <tpl-{in_template}> with MR scheme '{mri_scheme}'" " could not be found.") # tpl_brainmask_path = get_template( # in_template, desc="brain", suffix="probseg", **template_specs # ) # if not tpl_brainmask_path: # ignore probseg for the time being tpl_brainmask_path = get_template(in_template, desc="brain", suffix="mask", **template_specs) tpl_regmask_path = get_template(in_template, desc="BrainCerebellumExtraction", suffix="mask", **template_specs) # validate images val_tmpl = pe.Node(ValidateImage(), name='val_tmpl') val_tmpl.inputs.in_file = _pop(tpl_target_path) val_target = pe.Node(ValidateImage(), name='val_target') # Resample both target and template to a controlled, isotropic resolution res_tmpl = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS), name="res_tmpl") # testing res_target = pe.Node(RegridToZooms(zooms=HIRES_ZOOMS), name="res_target") # testing gauss_tmpl = pe.Node(niu.Function(function=_gauss_filter), name="gauss_tmpl") # Spatial normalization step lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"), name="lap_tmpl") lap_target = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"), name="lap_target") # Merge image nodes mrg_target = pe.Node(niu.Merge(2), name="mrg_target") mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl") norm_lap_tmpl = pe.Node(niu.Function(function=_trunc), name="norm_lap_tmpl") norm_lap_tmpl.inputs.dtype = "float32" norm_lap_tmpl.inputs.out_max = 1.0 norm_lap_tmpl.inputs.percentile = (0.01, 99.99) norm_lap_tmpl.inputs.clip_max = None norm_lap_target = pe.Node(niu.Function(function=_trunc), name="norm_lap_target") norm_lap_target.inputs.dtype = "float32" norm_lap_target.inputs.out_max = 1.0 norm_lap_target.inputs.percentile = (0.01, 99.99) norm_lap_target.inputs.clip_max = None # Set up initial spatial normalization ants_params = "testing" if debug else "precise" norm = pe.Node( Registration(from_file=pkgr_fn( "niworkflows.data", f"antsBrainExtraction_{ants_params}.json")), name="norm", n_procs=omp_nthreads, mem_gb=mem_gb, ) norm.inputs.float = use_float # main workflow wf = pe.Workflow(name) # Create a buffer interface as a cache for the actual inputs to registration buffernode = pe.Node( niu.IdentityInterface(fields=["hires_target", "smooth_target"]), name="buffernode") # truncate target intensity for N4 correction clip_target = pe.Node( niu.Function(function=_trunc), name="clip_target", ) clip_tmpl = pe.Node( niu.Function(function=_trunc), name="clip_tmpl", ) #clip_tmpl.inputs.in_file = _pop(tpl_target_path) # INU correction of the target image init_n4 = pe.Node( N4BiasFieldCorrection( dimension=3, save_bias=False, copy_header=True, n_iterations=[50] * (4 - debug), convergence_threshold=1e-7, shrink_factor=4, bspline_fitting_distance=bspline_fitting_distance, ), n_procs=omp_nthreads, name="init_n4", ) clip_inu = pe.Node( niu.Function(function=_trunc), name="clip_inu", ) gauss_target = pe.Node(niu.Function(function=_gauss_filter), name="gauss_target") wf.connect([ # truncation, resampling, and initial N4 (inputnode, val_target, [(("in_files", _pop), "in_file")]), # (inputnode, res_target, [(("in_files", _pop), "in_file")]), (val_target, res_target, [("out_file", "in_file")]), (res_target, clip_target, [("out_file", "in_file")]), (val_tmpl, clip_tmpl, [("out_file", "in_file")]), (clip_tmpl, res_tmpl, [("out", "in_file")]), (clip_target, init_n4, [("out", "input_image")]), (init_n4, clip_inu, [("output_image", "in_file")]), (clip_inu, gauss_target, [("out", "in_file")]), (clip_inu, buffernode, [("out", "hires_target")]), (gauss_target, buffernode, [("out", "smooth_target")]), (res_tmpl, gauss_tmpl, [("out_file", "in_file")]), # (clip_tmpl, gauss_tmpl, [("out", "in_file")]), ]) # Graft a template registration-mask if present if tpl_regmask_path: hires_mask = pe.Node(ApplyTransforms( input_image=_pop(tpl_regmask_path), transforms="identity", interpolation="NearestNeighbor", float=True), name="hires_mask", mem_gb=1) wf.connect([ (res_tmpl, hires_mask, [("out_file", "reference_image")]), ]) map_brainmask = pe.Node(ApplyTransforms(interpolation="Gaussian", float=True), name="map_brainmask", mem_gb=1) map_brainmask.inputs.input_image = str(tpl_brainmask_path) thr_brainmask = pe.Node(Binarize(thresh_low=0.80), name="thr_brainmask") bspline_grid = pe.Node(niu.Function(function=_bspline_distance), name="bspline_grid") # Refine INU correction final_n4 = pe.Node( N4BiasFieldCorrection( dimension=3, save_bias=True, copy_header=True, n_iterations=[50] * 5, convergence_threshold=1e-7, rescale_intensities=True, shrink_factor=4, ), n_procs=omp_nthreads, name="final_n4", ) final_mask = pe.Node(ApplyMask(), name="final_mask") if atropos_model is None: atropos_model = tuple(ATROPOS_MODELS[mri_scheme].values()) atropos_wf = init_atropos_wf( use_random_seed=False, omp_nthreads=omp_nthreads, mem_gb=mem_gb, in_segmentation_model=atropos_model, ) # if tpl_regmask_path: # atropos_wf.get_node('inputnode').inputs.in_mask_dilated = tpl_regmask_path sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1), name='sel_wm', run_without_submitting=True) wf.connect([ (inputnode, map_brainmask, [(("in_files", _pop), "reference_image")]), (inputnode, final_n4, [(("in_files", _pop), "input_image")]), (inputnode, bspline_grid, [(("in_files", _pop), "in_file")]), # (bspline_grid, final_n4, [("out", "bspline_fitting_distance")]), (bspline_grid, final_n4, [("out", "args")]), # merge laplacian and original images (buffernode, lap_target, [("smooth_target", "op1")]), (buffernode, mrg_target, [("hires_target", "in1")]), (lap_target, norm_lap_target, [("output_image", "in_file")]), (norm_lap_target, mrg_target, [("out", "in2")]), # Template massaging (res_tmpl, lap_tmpl, [("out_file", "op1")]), (res_tmpl, mrg_tmpl, [("out_file", "in1")]), (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]), (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]), # spatial normalization (mrg_target, norm, [("out", "moving_image")]), (mrg_tmpl, norm, [("out", "fixed_image")]), (norm, map_brainmask, [("reverse_transforms", "transforms"), ("reverse_invert_flags", "invert_transform_flags")]), (map_brainmask, thr_brainmask, [("output_image", "in_file")]), # take a second pass of N4 (map_brainmask, final_n4, [("output_image", "weight_image")]), (final_n4, final_mask, [("output_image", "in_file")]), (thr_brainmask, final_mask, [("out_mask", "in_mask")]), (final_n4, outputnode, [("output_image", "out_corrected")]), (thr_brainmask, outputnode, [("out_mask", "out_mask")]), (final_mask, outputnode, [("out_file", "out_brain")]), ]) # wf.disconnect([ # (get_brainmask, apply_mask, [('output_image', 'mask_file')]), # (copy_xform, outputnode, [('out_mask', 'out_mask')]), # ]) # wf.connect([ # (init_n4, atropos_wf, [ # ('output_image', 'inputnode.in_files')]), # intensity image # (thr_brainmask, atropos_wf, [ # ('out_mask', 'inputnode.in_mask')]), # (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]), # (sel_wm, final_n4, [('out', 'weight_image')]), # ]) # wf.connect([ # (atropos_wf, outputnode, [ # ('outputnode.out_mask', 'out_mask'), # ('outputnode.out_segm', 'out_segm'), # ('outputnode.out_tpms', 'out_tpms')]), # ]) if tpl_regmask_path: wf.connect([ (hires_mask, norm, [("output_image", "fixed_image_masks")]), # (hires_mask, atropos_wf, [ # ("output_image", "inputnode.in_mask_dilated")]), ]) if interim_checkpoints: final_apply = pe.Node(ApplyTransforms(interpolation="BSpline", float=True), name="final_apply", mem_gb=1) final_report = pe.Node(SimpleBeforeAfter( before_label=f"tpl-{in_template}", after_label="target", out_report="final_report.svg"), name="final_report") wf.connect([ (inputnode, final_apply, [(("in_files", _pop), "reference_image") ]), (res_tmpl, final_apply, [("out_file", "input_image")]), (norm, final_apply, [("reverse_transforms", "transforms"), ("reverse_invert_flags", "invert_transform_flags")]), (final_apply, final_report, [("output_image", "before")]), (outputnode, final_report, [("out_corrected", "after"), ("out_mask", "wm_seg")]), ]) if output_dir: from nipype.interfaces.io import DataSink ds_final_inu = pe.Node(DataSink(base_directory=str(output_dir.parent)), name="ds_final_inu") ds_final_msk = pe.Node(DataSink(base_directory=str(output_dir.parent)), name="ds_final_msk") ds_report = pe.Node(DataSink(base_directory=str(output_dir.parent)), name="ds_report") wf.connect([ (outputnode, ds_final_inu, [("out_corrected", f"{output_dir.name}.@inu_corrected")]), (outputnode, ds_final_msk, [("out_mask", f"{output_dir.name}.@brainmask")]), (final_report, ds_report, [("out_report", f"{output_dir.name}.@report")]), ]) if not ants_affine_init: return wf # Initialize transforms with antsAI lowres_tmpl = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS), name="lowres_tmpl") lowres_target = pe.Node(RegridToZooms(zooms=LOWRES_ZOOMS), name="lowres_target") init_aff = pe.Node( AI( metric=("Mattes", 32, "Regular", 0.25), transform=("Affine", 0.1), search_factor=(15, 0.1), principal_axes=False, convergence=(10, 1e-6, 10), search_grid=(40, (0, 40, 40)), verbose=True, ), name="init_aff", n_procs=omp_nthreads, ) wf.connect([ (gauss_tmpl, lowres_tmpl, [("out", "in_file")]), (lowres_tmpl, init_aff, [("out_file", "fixed_image")]), (gauss_target, lowres_target, [("out", "in_file")]), (lowres_target, init_aff, [("out_file", "moving_image")]), (init_aff, norm, [("output_transform", "initial_moving_transform")]), ]) if tpl_regmask_path: lowres_mask = pe.Node(ApplyTransforms( input_image=_pop(tpl_regmask_path), transforms="identity", interpolation="MultiLabel", float=True), name="lowres_mask", mem_gb=1) wf.connect([ (lowres_tmpl, lowres_mask, [("out_file", "reference_image")]), (lowres_mask, init_aff, [("output_image", "fixed_image_mask")]), ]) if interim_checkpoints: init_apply = pe.Node(ApplyTransforms(interpolation="BSpline", float=True), name="init_apply", mem_gb=1) init_report = pe.Node(SimpleBeforeAfter( before_label=f"tpl-{in_template}", after_label="target", out_report="init_report.svg"), name="init_report") wf.connect([ (lowres_target, init_apply, [("out_file", "input_image")]), (res_tmpl, init_apply, [("out_file", "reference_image")]), (init_aff, init_apply, [("output_transform", "transforms")]), (init_apply, init_report, [("output_image", "after")]), (res_tmpl, init_report, [("out_file", "before")]), ]) if output_dir: ds_init_report = pe.Node( DataSink(base_directory=str(output_dir.parent)), name="ds_init_report") wf.connect(init_report, "out_report", ds_init_report, f"{output_dir.name}.@init_report") return wf
def init_infant_anat_wf( *, age_months, ants_affine_init, t1w, t2w, anat_modality, bids_root, existing_derivatives, freesurfer, longitudinal, omp_nthreads, output_dir, segmentation_atlases, skull_strip_mode, skull_strip_template, sloppy, spaces, name="infant_anat_wf", ): """ - T1w reference: realigning and then averaging anatomical images. - Brain extraction and INU (bias field) correction. - Brain tissue segmentation. - Spatial normalization to standard spaces. - Surface reconstruction with FreeSurfer_. Outputs ------- anat_preproc The anatomical reference map, which is calculated as the average of bias-corrected and preprocessed anatomical images, defining the anatomical space. anat_brain Skull-stripped ``anat_preproc`` anat_mask Brain (binary) mask estimated by brain extraction. anat_dseg Brain tissue segmentation of the preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF). anat_tpms List of tissue probability maps corresponding to ``t1w_dseg``. std_preproc T1w reference resampled in one or more standard spaces. std_mask Mask of skull-stripped template, in MNI space std_dseg Segmentation, resampled into MNI space std_tpms List of tissue probability maps in MNI space subjects_dir FreeSurfer SUBJECTS_DIR anat2std_xfm Nonlinear spatial transform to resample imaging data given in anatomical space into standard space. std2anat_xfm Inverse transform of the above. subject_id FreeSurfer subject ID anat2fsnative_xfm LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space fsnative2anat_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces (gray/white boundary, midthickness, pial, inflated) """ from nipype.interfaces.base import Undefined from nipype.interfaces.ants.base import Info as ANTsInfo from niworkflows.interfaces.images import ValidateImage from smriprep.workflows.anatomical import init_anat_template_wf, _probseg_fast2bids, _pop from smriprep.workflows.norm import init_anat_norm_wf from smriprep.workflows.outputs import ( init_anat_reports_wf, init_anat_derivatives_wf, ) from ...utils.misc import fix_multi_source_name from .brain_extraction import init_infant_brain_extraction_wf from .segmentation import init_anat_seg_wf from .surfaces import init_infant_surface_recon_wf # for now, T1w only num_t1w = len(t1w) if t1w else 0 num_t2w = len(t2w) if t2w else 0 wf = pe.Workflow(name=name) desc = """Anatomical data preprocessing : """ desc += f"""\ A total of {num_t1w} T1w and {num_t2w} T2w images were found within the input BIDS dataset.""" inputnode = pe.Node( niu.IdentityInterface( fields=["t1w", "t2w", "subject_id", "subjects_dir" ]), # FLAIR / ROI? name="inputnode", ) outputnode = pe.Node( niu.IdentityInterface(fields=[ "anat_preproc", "anat_brain", "anat_mask", "anat_dseg", "anat_tpms", "anat_ref_xfms", "std_preproc", "std_brain", "std_dseg", "std_tpms", "subjects_dir", "subject_id", "anat2std_xfm", "std2anat_xfm", "anat2fsnative_xfm", "fsnative2anat_xfm", "surfaces", "anat_aseg", "anat_aparc", ]), name="outputnode", ) # Connect reportlets workflows anat_reports_wf = init_anat_reports_wf( freesurfer=freesurfer, output_dir=output_dir, ) if existing_derivatives: raise NotImplementedError("Reusing derivatives is not yet supported.") desc += """ All of the T1-weighted images were corrected for intensity non-uniformity (INU) """ if num_t1w > 1 else """\ The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) """ desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += '.\n' if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" desc += """\ The T1w-reference was then skull-stripped with a modified implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using ANTs JointFusion, distributed with ANTs {ants_ver}. """ wf.__desc__ = desc.format( ants_ver=ANTsInfo.version() or '(version unknown)', skullstrip_tpl=skull_strip_template.fullname, ) # Define output workflows anat_reports_wf = init_anat_reports_wf(freesurfer=freesurfer, output_dir=output_dir) # HACK: remove resolution from TFSelect anat_reports_wf.get_node('tf_select').inputs.resolution = Undefined anat_derivatives_wf = init_anat_derivatives_wf( bids_root=bids_root, freesurfer=freesurfer, num_t1w=num_t1w, output_dir=output_dir, spaces=spaces, ) # HACK: remove resolution from TFSelect anat_derivatives_wf.get_node('select_tpl').inputs.resolution = Undefined # Multiple T1w files -> generate average reference t1w_template_wf = init_anat_template_wf( longitudinal=False, omp_nthreads=omp_nthreads, num_t1w=num_t1w, ) use_t2w = False if num_t2w: t2w_template_wf = init_t2w_template_wf( longitudinal=longitudinal, omp_nthreads=omp_nthreads, num_t2w=num_t2w, ) wf.connect(inputnode, 't2w', t2w_template_wf, 'inputnode.t2w') # TODO: determine cutoff (< 8 months) use_t2w = True anat_validate = pe.Node( ValidateImage(), name='anat_validate', run_without_submitting=True, ) # INU + Brain Extraction if skull_strip_mode != 'force': raise NotImplementedError("Skull stripping is currently required.") brain_extraction_wf = init_infant_brain_extraction_wf( age_months=age_months, mri_scheme=anat_modality.capitalize(), ants_affine_init=ants_affine_init, skull_strip_template=skull_strip_template.space, template_specs=skull_strip_template.spec, omp_nthreads=omp_nthreads, output_dir=Path(output_dir), sloppy=sloppy, use_t2w=use_t2w, ) # Ensure single outputs be_buffer = pe.Node( niu.IdentityInterface(fields=["anat_preproc", "anat_brain"]), name='be_buffer') # Segmentation - initial implementation should be simple: JLF anat_seg_wf = init_anat_seg_wf( age_months=age_months, anat_modality=anat_modality.capitalize(), template_dir=segmentation_atlases, sloppy=sloppy, omp_nthreads=omp_nthreads, ) # Spatial normalization (requires segmentation) anat_norm_wf = init_anat_norm_wf( debug=sloppy, omp_nthreads=omp_nthreads, templates=spaces.get_spaces(nonstandard=False, dim=(3, )), ) # HACK: remove resolution from TFSelect anat_norm_wf.get_node('tf_select').inputs.resolution = Undefined # HACK: requires patched niworkflows to allow setting resolution to none anat_norm_wf.get_node('registration').inputs.template_resolution = None # fmt: off if use_t2w: wf.connect(t2w_template_wf, 'outputnode.t2w_ref', brain_extraction_wf, 'inputnode.t2w') wf.connect([ (inputnode, t1w_template_wf, [ ('t1w', 'inputnode.t1w'), ]), (t1w_template_wf, outputnode, [ ('outputnode.t1w_realign_xfm', 'anat_ref_xfms'), ]), (t1w_template_wf, anat_validate, [ ('outputnode.t1w_ref', 'in_file'), ]), (anat_validate, brain_extraction_wf, [ ('out_file', 'inputnode.t1w'), ]), (brain_extraction_wf, be_buffer, [ (('outputnode.t1w_corrected', _pop), 'anat_preproc'), (('outputnode.t1w_corrected_brain', _pop), 'anat_brain'), (('outputnode.t1w_mask', _pop), 'anat_mask'), ]), (be_buffer, outputnode, [ ('anat_preproc', 'anat_preproc'), ('anat_brain', 'anat_brain'), ('anat_mask', 'anat_mask'), ]), (be_buffer, anat_seg_wf, [ ('anat_brain', 'inputnode.anat_brain'), ]), (anat_seg_wf, outputnode, [ ('outputnode.anat_dseg', 'anat_dseg'), ]), (anat_seg_wf, anat_norm_wf, [ ('outputnode.anat_dseg', 'inputnode.moving_segmentation'), ('outputnode.anat_tpms', 'inputnode.moving_tpms'), ]), (be_buffer, anat_norm_wf, [ ('anat_preproc', 'inputnode.moving_image'), ('anat_mask', 'inputnode.moving_mask'), ]), (anat_norm_wf, outputnode, [ ('poutputnode.standardized', 'std_preproc'), ('poutputnode.std_mask', 'std_mask'), ('poutputnode.std_dseg', 'std_dseg'), ('poutputnode.std_tpms', 'std_tpms'), ('outputnode.template', 'template'), ('outputnode.anat2std_xfm', 'anat2std_xfm'), ('outputnode.std2anat_xfm', 'std2anat_xfm'), ]), ( inputnode, anat_norm_wf, [ (('t1w', fix_multi_source_name), 'inputnode.orig_t1w'), # anat_validate? not used... ]), ]) wf.connect([ # reports (inputnode, anat_reports_wf, [ ('t1w', 'inputnode.source_file'), ]), (outputnode, anat_reports_wf, [ ('anat_preproc', 'inputnode.t1w_preproc'), ('anat_mask', 'inputnode.t1w_mask'), ('anat_dseg', 'inputnode.t1w_dseg'), ('std_preproc', 'inputnode.std_t1w'), ('std_mask', 'inputnode.std_mask'), ]), (t1w_template_wf, anat_reports_wf, [ ('outputnode.out_report', 'inputnode.t1w_conform_report'), ]), (anat_norm_wf, anat_reports_wf, [ ('poutputnode.template', 'inputnode.template'), ]), # derivatives (t1w_template_wf, anat_derivatives_wf, [ ('outputnode.t1w_valid_list', 'inputnode.source_files'), ('outputnode.t1w_realign_xfm', 'inputnode.t1w_ref_xfms'), ]), (be_buffer, anat_derivatives_wf, [ ('anat_mask', 'inputnode.t1w_mask'), ('anat_preproc', 'inputnode.t1w_preproc'), ]), (anat_norm_wf, anat_derivatives_wf, [ ('outputnode.template', 'inputnode.template'), ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), ]), (anat_seg_wf, anat_derivatives_wf, [ ('outputnode.anat_dseg', 'inputnode.t1w_dseg'), ('outputnode.anat_tpms', 'inputnode.t1w_tpms'), ]), ]) if not freesurfer: return wf # FreeSurfer surfaces surface_recon_wf = init_infant_surface_recon_wf( age_months=age_months, use_aseg=bool(segmentation_atlases), ) wf.connect([ (anat_seg_wf, surface_recon_wf, [ ('outputnode.anat_aseg', 'inputnode.anat_aseg'), ]), (inputnode, surface_recon_wf, [ ('subject_id', 'inputnode.subject_id'), ('subjects_dir', 'inputnode.subjects_dir'), ('t2w', 'inputnode.t2w'), ]), (anat_validate, surface_recon_wf, [ ('out_file', 'inputnode.anat_orig'), ]), (be_buffer, surface_recon_wf, [ ('anat_brain', 'inputnode.anat_skullstripped'), ('anat_preproc', 'inputnode.anat_preproc'), ]), (surface_recon_wf, outputnode, [ ('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.anat2fsnative_xfm', 'anat2fsnative_xfm'), ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), ('outputnode.surfaces', 'surfaces'), ('outputnode.anat_aparc', 'anat_aparc'), ('outputnode.anat_aseg', 'anat_aseg'), ]), (surface_recon_wf, anat_reports_wf, [ ('outputnode.subject_id', 'inputnode.subject_id'), ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (surface_recon_wf, anat_derivatives_wf, [ ('outputnode.anat_aseg', 'inputnode.t1w_fs_aseg'), ('outputnode.anat_aparc', 'inputnode.t1w_fs_aparc'), ('outputnode.anat2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), ('outputnode.surfaces', 'inputnode.surfaces'), ]), ]) # fmt: on return wf