def init_anat_norm_wf( *, debug, omp_nthreads, templates, name="anat_norm_wf", ): """ Build an individual spatial normalization workflow using ``antsRegistration``. Workflow Graph .. workflow :: :graph2use: orig :simple_form: yes from smriprep.workflows.norm import init_anat_norm_wf wf = init_anat_norm_wf( debug=False, omp_nthreads=1, templates=['MNI152NLin2009cAsym', 'MNI152NLin6Asym'], ) .. important:: This workflow defines an iterable input over the input parameter ``templates``, so Nipype will produce one copy of the downstream workflows which connect ``poutputnode.template`` or ``poutputnode.template_spec`` to their inputs (``poutputnode`` stands for *parametric output node*). Nipype refers to this expansion of the graph as *parameterized execution*. If a joint list of values is required (and thus cutting off parameterization), please use the equivalent outputs of ``outputnode`` (which *joins* all the parameterized execution paths). Parameters ---------- debug : :obj:`bool` Apply sloppy arguments to speed up processing. Use with caution, registration processes will be very inaccurate. omp_nthreads : :obj:`int` Maximum number of threads an individual process may use. templates : :obj:`list` of :obj:`str` List of standard space fullnames (e.g., ``MNI152NLin6Asym`` or ``MNIPediatricAsym:cohort-4``) which are targets for spatial normalization. Inputs ------ moving_image The input image that will be normalized to standard space. moving_mask A precise brain mask separating skull/skin/fat from brain structures. moving_segmentation A brain tissue segmentation of the ``moving_image``. moving_tpms tissue probability maps (TPMs) corresponding to the ``moving_segmentation``. lesion_mask (optional) A mask to exclude regions from the cost-function input domain to enable standardization of lesioned brains. orig_t1w The original T1w image from the BIDS structure. template Template name and specification Outputs ------- standardized The T1w after spatial normalization, in template space. anat2std_xfm The T1w-to-template transform. std2anat_xfm The template-to-T1w transform. std_mask The ``moving_mask`` in template space (matches ``standardized`` output). std_dseg The ``moving_segmentation`` in template space (matches ``standardized`` output). std_tpms The ``moving_tpms`` in template space (matches ``standardized`` output). template Template name extracted from the input parameter ``template``, for further use in downstream nodes. template_spec Template specifications extracted from the input parameter ``template``, for further use in downstream nodes. """ ntpls = len(templates) workflow = Workflow(name=name) if templates: workflow.__desc__ = """\ Volume-based spatial normalization to {targets} ({targets_id}) was performed through nonlinear registration with `antsRegistration` (ANTs {ants_ver}), using brain-extracted versions of both T1w reference and the T1w template. The following template{tpls} selected for spatial normalization: """.format( ants_ver=ANTsInfo.version() or "(version unknown)", targets="%s standard space%s" % ( defaultdict("several".format, { 1: "one", 2: "two", 3: "three", 4: "four" })[ntpls], "s" * (ntpls != 1), ), targets_id=", ".join(templates), tpls=(" was", "s were")[ntpls != 1], ) # Append template citations to description for template in templates: template_meta = get_metadata(template.split(":")[0]) template_refs = ["@%s" % template.split(":")[0].lower()] if template_meta.get("RRID", None): template_refs += ["RRID:%s" % template_meta["RRID"]] workflow.__desc__ += """\ *{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format( template=template, template_name=template_meta["Name"], template_refs=", ".join(template_refs), ) workflow.__desc__ += ".\n" if template == templates[-1] else ", " inputnode = pe.Node( niu.IdentityInterface(fields=[ "lesion_mask", "moving_image", "moving_mask", "moving_segmentation", "moving_tpms", "orig_t1w", "template", ]), name="inputnode", ) inputnode.iterables = [("template", templates)] out_fields = [ "anat2std_xfm", "standardized", "std2anat_xfm", "std_dseg", "std_mask", "std_tpms", "template", "template_spec", ] poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name="poutputnode") split_desc = pe.Node(TemplateDesc(), run_without_submitting=True, name="split_desc") tf_select = pe.Node( TemplateFlowSelect(resolution=1 + debug), name="tf_select", run_without_submitting=True, ) # With the improvements from nipreps/niworkflows#342 this truncation is now necessary trunc_mov = pe.Node( ants.ImageMath(operation="TruncateImageIntensity", op2="0.01 0.999 256"), name="trunc_mov", ) registration = pe.Node( SpatialNormalization( float=True, flavor=["precise", "testing"][debug], ), name="registration", n_procs=omp_nthreads, mem_gb=2, ) # Resample T1w-space inputs tpl_moving = pe.Node( ApplyTransforms( dimension=3, default_value=0, float=True, interpolation="LanczosWindowedSinc", ), name="tpl_moving", ) std_mask = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="std_mask") std_dseg = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="std_dseg") std_tpms = pe.MapNode( ApplyTransforms(dimension=3, default_value=0, float=True, interpolation="Gaussian"), iterfield=["input_image"], name="std_tpms", ) # fmt:off workflow.connect([ (inputnode, split_desc, [('template', 'template')]), (inputnode, poutputnode, [('template', 'template')]), (inputnode, trunc_mov, [('moving_image', 'op1')]), (inputnode, registration, [('moving_mask', 'moving_mask'), ('lesion_mask', 'lesion_mask')]), (inputnode, tpl_moving, [('moving_image', 'input_image')]), (inputnode, std_mask, [('moving_mask', 'input_image')]), (split_desc, tf_select, [('name', 'template'), ('spec', 'template_spec')]), (split_desc, registration, [('name', 'template'), ('spec', 'template_spec')]), (tf_select, tpl_moving, [('t1w_file', 'reference_image')]), (tf_select, std_mask, [('t1w_file', 'reference_image')]), (tf_select, std_dseg, [('t1w_file', 'reference_image')]), (tf_select, std_tpms, [('t1w_file', 'reference_image')]), (trunc_mov, registration, [('output_image', 'moving_image')]), (registration, tpl_moving, [('composite_transform', 'transforms')]), (registration, std_mask, [('composite_transform', 'transforms')]), (inputnode, std_dseg, [('moving_segmentation', 'input_image')]), (registration, std_dseg, [('composite_transform', 'transforms')]), (inputnode, std_tpms, [('moving_tpms', 'input_image')]), (registration, std_tpms, [('composite_transform', 'transforms')]), (registration, poutputnode, [('composite_transform', 'anat2std_xfm'), ('inverse_composite_transform', 'std2anat_xfm')]), (tpl_moving, poutputnode, [('output_image', 'standardized')]), (std_mask, poutputnode, [('output_image', 'std_mask')]), (std_dseg, poutputnode, [('output_image', 'std_dseg')]), (std_tpms, poutputnode, [('output_image', 'std_tpms')]), (split_desc, poutputnode, [('spec', 'template_spec')]), ]) # fmt:on # Provide synchronized output outputnode = pe.JoinNode( niu.IdentityInterface(fields=out_fields), name="outputnode", joinsource="inputnode", ) # fmt:off workflow.connect([ (poutputnode, outputnode, [(f, f) for f in out_fields]), ]) # fmt:on return workflow
def init_anat_norm_wf( *, debug, omp_nthreads, templates, name="anat_norm_wf", ): """ Build an individual spatial normalization workflow using ``antsRegistration``. Workflow Graph .. workflow :: :graph2use: orig :simple_form: yes from fmriprep_rodents.patch.workflows.anatomical import init_anat_norm_wf wf = init_anat_norm_wf( debug=False, omp_nthreads=1, templates=['Fischer344'], ) .. important:: This workflow defines an iterable input over the input parameter ``templates``, so Nipype will produce one copy of the downstream workflows which connect ``poutputnode.template`` or ``poutputnode.template_spec`` to their inputs (``poutputnode`` stands for *parametric output node*). Nipype refers to this expansion of the graph as *parameterized execution*. If a joint list of values is required (and thus cutting off parameterization), please use the equivalent outputs of ``outputnode`` (which *joins* all the parameterized execution paths). Parameters ---------- debug : :obj:`bool` Apply sloppy arguments to speed up processing. Use with caution, registration processes will be very inaccurate. omp_nthreads : :obj:`int` Maximum number of threads an individual process may use. templates : :obj:`list` of :obj:`str` List of standard space fullnames (e.g., ``MNI152NLin6Asym`` or ``MNIPediatricAsym:cohort-4``) which are targets for spatial normalization. Inputs ------ moving_image The input image that will be normalized to standard space. moving_mask A precise brain mask separating skull/skin/fat from brain structures. moving_segmentation A brain tissue segmentation of the ``moving_image``. moving_tpms tissue probability maps (TPMs) corresponding to the ``moving_segmentation``. lesion_mask (optional) A mask to exclude regions from the cost-function input domain to enable standardization of lesioned brains. orig_t1w The original T1w image from the BIDS structure. template Template name and specification Outputs ------- standardized The T1w after spatial normalization, in template space. anat2std_xfm The T1w-to-template transform. std2anat_xfm The template-to-T1w transform. std_mask The ``moving_mask`` in template space (matches ``standardized`` output). std_dseg The ``moving_segmentation`` in template space (matches ``standardized`` output). std_tpms The ``moving_tpms`` in template space (matches ``standardized`` output). template Template name extracted from the input parameter ``template``, for further use in downstream nodes. template_spec Template specifications extracted from the input parameter ``template``, for further use in downstream nodes. """ from collections import defaultdict from nipype.interfaces.ants import ImageMath from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from smriprep.interfaces.templateflow import TemplateDesc from ..interfaces import RobustMNINormalization ntpls = len(templates) workflow = Workflow(name=name) if templates: workflow.__desc__ = """\ Volume-based spatial normalization to {targets} ({targets_id}) was performed through nonlinear registration with `antsRegistration` (ANTs {ants_ver}), using brain-extracted versions of both T1w reference and the T1w template. The following template{tpls} selected for spatial normalization: """.format( ants_ver=ANTsInfo.version() or '(version unknown)', targets='%s standard space%s' % (defaultdict( 'several'.format, {1: 'one', 2: 'two', 3: 'three', 4: 'four'})[ntpls], 's' * (ntpls != 1)), targets_id=', '.join(templates), tpls=(' was', 's were')[ntpls != 1] ) # Append template citations to description for template in templates: template_meta = get_metadata(template.split(':')[0]) template_refs = ['@%s' % template.split(':')[0].lower()] if template_meta.get('RRID', None): template_refs += ['RRID:%s' % template_meta['RRID']] workflow.__desc__ += """\ *{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format( template=template, template_name=template_meta['Name'], template_refs=', '.join(template_refs) ) workflow.__desc__ += (', ', '.')[template == templates[-1][0]] inputnode = pe.Node(niu.IdentityInterface(fields=[ 'lesion_mask', 'moving_image', 'moving_mask', 'moving_segmentation', 'moving_tpms', 'orig_t1w', 'template', ]), name='inputnode') inputnode.iterables = [('template', templates)] out_fields = [ 'anat2std_xfm', 'standardized', 'std2anat_xfm', 'std_dseg', 'std_mask', 'std_tpms', 'template', 'template_spec', ] poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name='poutputnode') split_desc = pe.Node(TemplateDesc(), run_without_submitting=True, name='split_desc') tf_select = pe.Node(TemplateFlowSelect(), name='tf_select', run_without_submitting=True) # With the improvements from nipreps/niworkflows#342 this truncation is now necessary trunc_mov = pe.Node(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'), name='trunc_mov') registration = pe.Node(RobustMNINormalization( float=True, flavor=['precise', 'testing'][debug], ), name='registration', n_procs=omp_nthreads, mem_gb=2) # Resample T1w-space inputs tpl_moving = pe.Node(ApplyTransforms( dimension=3, default_value=0, float=True, interpolation='LanczosWindowedSinc'), name='tpl_moving') std_mask = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='std_mask') std_dseg = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='std_dseg') std_tpms = pe.MapNode(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='Gaussian'), iterfield=['input_image'], name='std_tpms') workflow.connect([ (inputnode, split_desc, [('template', 'template')]), (inputnode, poutputnode, [('template', 'template')]), (inputnode, trunc_mov, [('moving_image', 'op1')]), (inputnode, registration, [ ('moving_mask', 'moving_mask'), ('lesion_mask', 'lesion_mask')]), (inputnode, tpl_moving, [('moving_image', 'input_image')]), (inputnode, std_mask, [('moving_mask', 'input_image')]), (split_desc, tf_select, [('name', 'template'), ('spec', 'template_spec')]), (split_desc, registration, [('name', 'template'), (('spec', _no_atlas), 'template_spec')]), (tf_select, tpl_moving, [('t2w_file', 'reference_image')]), (tf_select, std_mask, [('t2w_file', 'reference_image')]), (tf_select, std_dseg, [('t2w_file', 'reference_image')]), (tf_select, std_tpms, [('t2w_file', 'reference_image')]), (trunc_mov, registration, [ ('output_image', 'moving_image')]), (registration, tpl_moving, [('composite_transform', 'transforms')]), (registration, std_mask, [('composite_transform', 'transforms')]), (inputnode, std_dseg, [('moving_segmentation', 'input_image')]), (registration, std_dseg, [('composite_transform', 'transforms')]), (inputnode, std_tpms, [('moving_tpms', 'input_image')]), (registration, std_tpms, [('composite_transform', 'transforms')]), (registration, poutputnode, [ ('composite_transform', 'anat2std_xfm'), ('inverse_composite_transform', 'std2anat_xfm')]), (tpl_moving, poutputnode, [('output_image', 'standardized')]), (std_mask, poutputnode, [('output_image', 'std_mask')]), (std_dseg, poutputnode, [('output_image', 'std_dseg')]), (std_tpms, poutputnode, [('output_image', 'std_tpms')]), (split_desc, poutputnode, [('spec', 'template_spec')]), ]) # Provide synchronized output outputnode = pe.JoinNode(niu.IdentityInterface(fields=out_fields), name='outputnode', joinsource='inputnode') workflow.connect([ (poutputnode, outputnode, [(f, f) for f in out_fields]), ]) return workflow
def init_anat_norm_wf( debug, omp_nthreads, templates, ): """ Build an individual spatial normalization workflow using ``antsRegistration``. .. workflow :: :graph2use: orig :simple_form: yes from smriprep.workflows.norm import init_anat_norm_wf wf = init_anat_norm_wf( debug=False, omp_nthreads=1, templates=[('MNI152NLin2009cAsym', {}), ('MNI152NLin6Asym', {})], ) **Parameters** debug : bool Apply sloppy arguments to speed up processing. Use with caution, registration processes will be very inaccurate. omp_nthreads : int Maximum number of threads an individual process may use. templates : list of tuples List of tuples containing TemplateFlow identifiers (e.g. ``MNI152NLin6Asym``) and corresponding specs, which specify target templates for spatial normalization. **Inputs** moving_image The input image that will be normalized to standard space. moving_mask A precise brain mask separating skull/skin/fat from brain structures. moving_segmentation A brain tissue segmentation of the ``moving_image``. moving_tpms tissue probability maps (TPMs) corresponding to the ``moving_segmentation``. lesion_mask (optional) A mask to exclude regions from the cost-function input domain to enable standardization of lesioned brains. orig_t1w The original T1w image from the BIDS structure. **Outputs** standardized The T1w after spatial normalization, in template space. anat2std_xfm The T1w-to-template transform. std2anat_xfm The template-to-T1w transform. std_mask The ``moving_mask`` in template space (matches ``standardized`` output). std_dseg The ``moving_segmentation`` in template space (matches ``standardized`` output). std_tpms The ``moving_tpms`` in template space (matches ``standardized`` output). template The input parameter ``template`` for further use in nodes depending on this workflow. """ templateflow = get_templates() missing_tpls = [ template for template, _ in templates if template not in templateflow ] if missing_tpls: raise ValueError("""\ One or more templates were not found (%s). Please make sure TemplateFlow is \ correctly installed and contains the given template identifiers.""" % ', '.join(missing_tpls)) ntpls = len(templates) workflow = Workflow('anat_norm_wf') workflow.__desc__ = """\ Volume-based spatial normalization to {targets} ({targets_id}) was performed through nonlinear registration with `antsRegistration` (ANTs {ants_ver}), using brain-extracted versions of both T1w reference and the T1w template. The following template{tpls} selected for spatial normalization: """.format(ants_ver=ANTsInfo.version() or '(version unknown)', targets='%s standard space%s' % (defaultdict('several'.format, { 1: 'one', 2: 'two', 3: 'three', 4: 'four' })[ntpls], 's' * (ntpls != 1)), targets_id=', '.join((t for t, _ in templates)), tpls=(' was', 's were')[ntpls != 1]) # Append template citations to description for template, _ in templates: template_meta = get_metadata(template) template_refs = ['@%s' % template.lower()] if template_meta.get('RRID', None): template_refs += ['RRID:%s' % template_meta['RRID']] workflow.__desc__ += """\ *{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format( template=template, template_name=template_meta['Name'], template_refs=', '.join(template_refs)) workflow.__desc__ += (', ', '.')[template == templates[-1][0]] inputnode = pe.Node(niu.IdentityInterface(fields=[ 'moving_image', 'moving_mask', 'moving_segmentation', 'moving_tpms', 'lesion_mask', 'orig_t1w', 'template' ]), name='inputnode') inputnode.iterables = [('template', templates)] out_fields = [ 'standardized', 'anat2std_xfm', 'std2anat_xfm', 'std_mask', 'std_dseg', 'std_tpms', 'template' ] poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name='poutputnode') tf_select = pe.Node(TemplateFlowSelect(resolution=1 + debug), name='tf_select', run_without_submitting=True) # With the improvements from poldracklab/niworkflows#342 this truncation is now necessary trunc_mov = pe.Node(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'), name='trunc_mov') registration = pe.Node(RobustMNINormalization( float=True, flavor=['precise', 'testing'][debug], ), name='registration', n_procs=omp_nthreads, mem_gb=2) # Resample T1w-space inputs tpl_moving = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='LanczosWindowedSinc'), name='tpl_moving') std_mask = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='std_mask') std_dseg = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='std_dseg') std_tpms = pe.MapNode(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='Gaussian'), iterfield=['input_image'], name='std_tpms') workflow.connect([ (inputnode, tf_select, [(('template', _get_name), 'template'), (('template', _get_spec), 'template_spec')]), (inputnode, registration, [(('template', _get_name), 'template'), (('template', _get_spec), 'template_spec') ]), (inputnode, trunc_mov, [('moving_image', 'op1')]), (inputnode, registration, [('moving_mask', 'moving_mask'), ('lesion_mask', 'lesion_mask')]), (inputnode, tpl_moving, [('moving_image', 'input_image')]), (inputnode, std_mask, [('moving_mask', 'input_image')]), (tf_select, tpl_moving, [('t1w_file', 'reference_image')]), (tf_select, std_mask, [('t1w_file', 'reference_image')]), (tf_select, std_dseg, [('t1w_file', 'reference_image')]), (tf_select, std_tpms, [('t1w_file', 'reference_image')]), (trunc_mov, registration, [('output_image', 'moving_image')]), (registration, tpl_moving, [('composite_transform', 'transforms')]), (registration, std_mask, [('composite_transform', 'transforms')]), (inputnode, std_dseg, [('moving_segmentation', 'input_image')]), (registration, std_dseg, [('composite_transform', 'transforms')]), (inputnode, std_tpms, [('moving_tpms', 'input_image')]), (registration, std_tpms, [('composite_transform', 'transforms')]), (registration, poutputnode, [('composite_transform', 'anat2std_xfm'), ('inverse_composite_transform', 'std2anat_xfm')]), (tpl_moving, poutputnode, [('output_image', 'standardized')]), (std_mask, poutputnode, [('output_image', 'std_mask')]), (std_dseg, poutputnode, [('output_image', 'std_dseg')]), (std_tpms, poutputnode, [('output_image', 'std_tpms')]), (inputnode, poutputnode, [('template', 'template')]), ]) # Provide synchronized output outputnode = pe.JoinNode(niu.IdentityInterface(fields=out_fields), name='outputnode', joinsource='inputnode') workflow.connect([ (poutputnode, outputnode, [(f, f) for f in out_fields]), ]) return workflow
from typing import Any, Iterable import numpy as np from nibabel.spatialimages import HeaderDataError from templateflow import api from ...model.metadata import axis_codes, templates from ...utils import logger from .direction import canonicalize_direction_code from .niftiheader import NiftiheaderLoader from .slicetiming import str_slice_timing template_origin_sets = { template: set( tuple(value["origin"]) for value in api.get_metadata(template).get("res", dict()).values()) for template in templates } class NiftiheaderMetadataLoader: cache: dict[str, Any] = dict() @staticmethod def load(niftifile): return NiftiheaderLoader.load(niftifile) def __init__(self, loader): self.loader = loader def fill(self, fileobj, key):
def init_anat_preproc_wf( bids_root, freesurfer, fs_spaces, hires, longitudinal, omp_nthreads, output_dir, num_t1w, reportlets_dir, skull_strip_template, template, debug=False, name='anat_preproc_wf', skull_strip_fixed_seed=False): r""" This workflow controls the anatomical preprocessing stages of smriprep. This includes: - Creation of a structural template - Skull-stripping and bias correction - Tissue segmentation - Normalization - Surface reconstruction with FreeSurfer .. workflow:: :graph2use: orig :simple_form: yes from smriprep.workflows.anatomical import init_anat_preproc_wf wf = init_anat_preproc_wf( bids_root='.', freesurfer=True, fs_spaces=['T1w', 'fsnative', 'template', 'fsaverage5'], hires=True, longitudinal=False, omp_nthreads=1, output_dir='.', num_t1w=1, reportlets_dir='.', skull_strip_template='MNI152NLin2009cAsym', template='MNI152NLin2009cAsym', ) **Parameters** bids_root : str Path of the input BIDS dataset root debug : bool Enable debugging outputs freesurfer : bool Enable FreeSurfer surface reconstruction (increases runtime by 6h, at the very least) fs_spaces : list List of output spaces functional images are to be resampled to. Some pipeline components will only be instantiated for some output spaces. Valid spaces: - T1w - template - fsnative - fsaverage (or other pre-existing FreeSurfer templates) hires : bool Enable sub-millimeter preprocessing in FreeSurfer longitudinal : bool Create unbiased structural template, regardless of number of inputs (may increase runtime) name : str, optional Workflow name (default: anat_preproc_wf) omp_nthreads : int Maximum number of threads an individual process may use output_dir : str Directory in which to save derivatives reportlets_dir : str Directory in which to save reportlets skull_strip_fixed_seed : bool Do not use a random seed for skull-stripping - will ensure run-to-run replicability when used with --omp-nthreads 1 (default: ``False``) skull_strip_template : str Name of ANTs skull-stripping template ('MNI152NLin2009cAsym', 'OASIS30ANTs' or 'NKI') template : str Name of template targeted by ``template`` output space **Inputs** t1w List of T1-weighted structural images t2w List of T2-weighted structural images flair List of FLAIR images subjects_dir FreeSurfer SUBJECTS_DIR **Outputs** t1_preproc Bias-corrected structural template, defining T1w space t1_brain Skull-stripped ``t1_preproc`` t1_mask Mask of the skull-stripped template image t1_seg Segmentation of preprocessed structural image, including gray-matter (GM), white-matter (WM) and cerebrospinal fluid (CSF) t1_tpms List of tissue probability maps in T1w space t1_2_mni T1w template, normalized to MNI space t1_2_mni_forward_transform ANTs-compatible affine-and-warp transform file t1_2_mni_reverse_transform ANTs-compatible affine-and-warp transform file (inverse) mni_mask Mask of skull-stripped template, in MNI space mni_seg Segmentation, resampled into MNI space mni_tpms List of tissue probability maps in MNI space subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID t1_2_fsnative_forward_transform LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space t1_2_fsnative_reverse_transform LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w surfaces GIFTI surfaces (gray/white boundary, midthickness, pial, inflated) **Subworkflows** * :py:func:`~niworkflows.anat.ants.init_brain_extraction_wf` * :py:func:`~smriprep.workflows.surfaces.init_surface_recon_wf` """ if isinstance(template, list): # THIS SHOULD BE DELETED template = template[0] template_meta = get_metadata(template) template_refs = ['@%s' % template.lower()] if template_meta.get('RRID', None): template_refs += ['RRID:%s' % template_meta['RRID']] workflow = Workflow(name=name) workflow.__postdesc__ = """\ Spatial normalization to the *{template_name}* [{template_refs}] was performed through nonlinear registration with `antsRegistration` (ANTs {ants_ver}), using brain-extracted versions of both T1w volume and template. Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM) and gray-matter (GM) was performed on the brain-extracted T1w using `fast` [FSL {fsl_ver}, RRID:SCR_002823, @fsl_fast]. """.format( ants_ver=ANTsInfo.version() or '<ver>', fsl_ver=fsl.FAST().version or '<ver>', template_name=template_meta['Name'], template_refs=', '.join(template_refs), ) desc = """Anatomical data preprocessing : """ desc += """\ A total of {num_t1w} T1-weighted (T1w) images were found within the input BIDS dataset. All of them were corrected for intensity non-uniformity (INU) """ if num_t1w > 1 else """\ The T1-weighted (T1w) image was corrected for intensity non-uniformity (INU) """ desc += """\ with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} \ [@ants, RRID:SCR_004757]""" desc += '.\n' if num_t1w > 1 else ", and used as T1w-reference throughout the workflow.\n" desc += """\ The T1w-reference was then skull-stripped with a *Nipype* implementation of the `antsBrainExtraction.sh` workflow (from ANTs), using {skullstrip_tpl} as target template. """.format(skullstrip_tpl=skull_strip_template) workflow.__desc__ = desc.format( num_t1w=num_t1w, ants_ver=ANTsInfo.version() or '<ver>' ) inputnode = pe.Node( niu.IdentityInterface(fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['t1_preproc', 't1_brain', 't1_mask', 't1_seg', 't1_tpms', 't1_2_mni', 't1_2_mni_forward_transform', 't1_2_mni_reverse_transform', 'mni_mask', 'mni_seg', 'mni_tpms', 'template_transforms', 'subjects_dir', 'subject_id', 't1_2_fsnative_forward_transform', 't1_2_fsnative_reverse_transform', 'surfaces', 't1_aseg', 't1_aparc']), name='outputnode') buffernode = pe.Node(niu.IdentityInterface( fields=['t1_brain', 't1_mask']), name='buffernode') anat_template_wf = init_anat_template_wf(longitudinal=longitudinal, omp_nthreads=omp_nthreads, num_t1w=num_t1w) # 3. Skull-stripping # Bias field correction is handled in skull strip workflows. brain_extraction_wf = init_brain_extraction_wf( in_template=skull_strip_template, atropos_use_random_seed=not skull_strip_fixed_seed, omp_nthreads=omp_nthreads, normalization_quality='precise' if not debug else 'testing') workflow.connect([ (inputnode, anat_template_wf, [('t1w', 'inputnode.t1w')]), (anat_template_wf, brain_extraction_wf, [ ('outputnode.t1_template', 'inputnode.in_files')]), (brain_extraction_wf, outputnode, [ ('outputnode.bias_corrected', 't1_preproc')]), (anat_template_wf, outputnode, [ ('outputnode.template_transforms', 't1_template_transforms')]), (buffernode, outputnode, [('t1_brain', 't1_brain'), ('t1_mask', 't1_mask')]), ]) # 4. Surface reconstruction if freesurfer: surface_recon_wf = init_surface_recon_wf(name='surface_recon_wf', omp_nthreads=omp_nthreads, hires=hires) applyrefined = pe.Node(fsl.ApplyMask(), name='applyrefined') workflow.connect([ (inputnode, surface_recon_wf, [ ('t2w', 'inputnode.t2w'), ('flair', 'inputnode.flair'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id')]), (anat_template_wf, surface_recon_wf, [('outputnode.t1_template', 'inputnode.t1w')]), (brain_extraction_wf, surface_recon_wf, [ (('outputnode.out_file', _pop), 'inputnode.skullstripped_t1'), ('outputnode.out_segm', 'inputnode.ants_segs'), (('outputnode.bias_corrected', _pop), 'inputnode.corrected_t1')]), (brain_extraction_wf, applyrefined, [ (('outputnode.bias_corrected', _pop), 'in_file')]), (surface_recon_wf, applyrefined, [ ('outputnode.out_brainmask', 'mask_file')]), (surface_recon_wf, outputnode, [ ('outputnode.subjects_dir', 'subjects_dir'), ('outputnode.subject_id', 'subject_id'), ('outputnode.t1_2_fsnative_forward_transform', 't1_2_fsnative_forward_transform'), ('outputnode.t1_2_fsnative_reverse_transform', 't1_2_fsnative_reverse_transform'), ('outputnode.surfaces', 'surfaces'), ('outputnode.out_aseg', 't1_aseg'), ('outputnode.out_aparc', 't1_aparc')]), (applyrefined, buffernode, [('out_file', 't1_brain')]), (surface_recon_wf, buffernode, [ ('outputnode.out_brainmask', 't1_mask')]), ]) else: workflow.connect([ (brain_extraction_wf, buffernode, [ (('outputnode.out_file', _pop), 't1_brain'), ('outputnode.out_mask', 't1_mask')]), ]) # 5. Segmentation t1_seg = pe.Node(fsl.FAST(segments=True, no_bias=True, probability_maps=True), name='t1_seg', mem_gb=3) workflow.connect([ (buffernode, t1_seg, [('t1_brain', 'in_files')]), (t1_seg, outputnode, [('tissue_class_map', 't1_seg'), ('probability_maps', 't1_tpms')]), ]) # 6. Spatial normalization (T1w to MNI registration) t1_2_mni = pe.Node( RobustMNINormalizationRPT( float=True, generate_report=True, flavor='testing' if debug else 'precise', ), name='t1_2_mni', n_procs=omp_nthreads, mem_gb=2 ) # Resample the brain mask and the tissue probability maps into mni space mni_mask = pe.Node( ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='mni_mask' ) mni_seg = pe.Node( ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='mni_seg' ) mni_tpms = pe.MapNode( ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='Linear'), iterfield=['input_image'], name='mni_tpms' ) # TODO isolate the spatial normalization workflow ############# ref_img = str(get_template(template, resolution=1, desc=None, suffix='T1w', extensions=['.nii', '.nii.gz'])) t1_2_mni.inputs.template = template mni_mask.inputs.reference_image = ref_img mni_seg.inputs.reference_image = ref_img mni_tpms.inputs.reference_image = ref_img workflow.connect([ (inputnode, t1_2_mni, [('roi', 'lesion_mask')]), (brain_extraction_wf, t1_2_mni, [ (('outputnode.bias_corrected', _pop), 'moving_image')]), (buffernode, t1_2_mni, [('t1_mask', 'moving_mask')]), (buffernode, mni_mask, [('t1_mask', 'input_image')]), (t1_2_mni, mni_mask, [('composite_transform', 'transforms')]), (t1_seg, mni_seg, [('tissue_class_map', 'input_image')]), (t1_2_mni, mni_seg, [('composite_transform', 'transforms')]), (t1_seg, mni_tpms, [('probability_maps', 'input_image')]), (t1_2_mni, mni_tpms, [('composite_transform', 'transforms')]), (t1_2_mni, outputnode, [ ('warped_image', 't1_2_mni'), ('composite_transform', 't1_2_mni_forward_transform'), ('inverse_composite_transform', 't1_2_mni_reverse_transform')]), (mni_mask, outputnode, [('output_image', 'mni_mask')]), (mni_seg, outputnode, [('output_image', 'mni_seg')]), (mni_tpms, outputnode, [('output_image', 'mni_tpms')]), ]) # spatial normalization ends here ############################### seg_rpt = pe.Node(ROIsPlot(colors=['magenta', 'b'], levels=[1.5, 2.5]), name='seg_rpt') anat_reports_wf = init_anat_reports_wf( reportlets_dir=reportlets_dir, template=template, freesurfer=freesurfer) workflow.connect([ (inputnode, anat_reports_wf, [ (('t1w', fix_multi_T1w_source_name), 'inputnode.source_file')]), (anat_template_wf, anat_reports_wf, [ ('outputnode.out_report', 'inputnode.t1_conform_report')]), (anat_template_wf, seg_rpt, [ ('outputnode.t1_template', 'in_file')]), (t1_seg, seg_rpt, [('tissue_class_map', 'in_rois')]), (outputnode, seg_rpt, [('t1_mask', 'in_mask')]), (seg_rpt, anat_reports_wf, [('out_report', 'inputnode.seg_report')]), (t1_2_mni, anat_reports_wf, [('out_report', 'inputnode.t1_2_mni_report')]), ]) if freesurfer: workflow.connect([ (surface_recon_wf, anat_reports_wf, [ ('outputnode.out_report', 'inputnode.recon_report')]), ]) anat_derivatives_wf = init_anat_derivatives_wf( bids_root=bids_root, freesurfer=freesurfer, output_dir=output_dir, template=template, ) workflow.connect([ (anat_template_wf, anat_derivatives_wf, [ ('outputnode.t1w_valid_list', 'inputnode.source_files')]), (outputnode, anat_derivatives_wf, [ ('t1_template_transforms', 'inputnode.t1_template_transforms'), ('t1_preproc', 'inputnode.t1_preproc'), ('t1_mask', 'inputnode.t1_mask'), ('t1_seg', 'inputnode.t1_seg'), ('t1_tpms', 'inputnode.t1_tpms'), ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform'), ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform'), ('t1_2_mni', 'inputnode.t1_2_mni'), ('mni_mask', 'inputnode.mni_mask'), ('mni_seg', 'inputnode.mni_seg'), ('mni_tpms', 'inputnode.mni_tpms'), ('t1_2_fsnative_forward_transform', 'inputnode.t1_2_fsnative_forward_transform'), ('surfaces', 'inputnode.surfaces'), ]), ]) if freesurfer: workflow.connect([ (surface_recon_wf, anat_derivatives_wf, [ ('outputnode.out_aseg', 'inputnode.t1_fs_aseg'), ('outputnode.out_aparc', 'inputnode.t1_fs_aparc'), ]), ]) return workflow
def init_anat_norm_wf( debug, omp_nthreads, reportlets_dir, template_list, template_specs=None, ): """ An individual spatial normalization workflow using ``antsRegistration``. .. workflow :: :graph2use: orig :simple_form: yes from smriprep.workflows.norm import init_anat_norm_wf wf = init_anat_norm_wf( debug=False, omp_nthreads=1, reportlets_dir='.', template_list=['MNI152NLin2009cAsym', 'MNI152NLin6Asym'], ) **Parameters** debug : bool Apply sloppy arguments to speed up processing. Use with caution, registration processes will be very inaccurate. omp_nthreads : int Maximum number of threads an individual process may use. reportlets_dir : str Directory in which to save reportlets. template_list : list of str List of TemplateFlow identifiers (e.g. ``MNI152NLin6Asym``) that specifies the target template for spatial normalization. In the future, this parameter should accept also paths to custom/private templates with TemplateFlow's organization. **Inputs** moving_image The input image that will be normalized to standard space. moving_mask A precise brain mask separating skull/skin/fat from brain structures. moving_segmentation A brain tissue segmentation of the ``moving_image``. moving_tpms tissue probability maps (TPMs) corresponding to the ``moving_segmentation``. lesion_mask (optional) A mask to exclude regions from the cost-function input domain to enable standardization of lesioned brains. orig_t1w The original T1w image from the BIDS structure. **Outputs** warped The T1w after spatial normalization, in template space. forward_transform The T1w-to-template transform. reverse_transform The template-to-T1w transform. tpl_mask The ``moving_mask`` in template space (matches ``warped`` output). tpl_seg The ``moving_segmentation`` in template space (matches ``warped`` output). tpl_tpms The ``moving_tpms`` in template space (matches ``warped`` output). template The input parameter ``template`` for further use in nodes depending on this workflow. """ if not isinstance(template_list, (list, tuple)): template_list = [template_list] templateflow = templates() if any(template not in templateflow for template in template_list): raise NotImplementedError( 'This is embarrassing - custom templates are not (yet) supported.' 'Please make sure none of the options already available via TemplateFlow ' 'fit your needs.') workflow = Workflow('anat_norm_wf') workflow.__desc__ = """\ Volume-based spatial normalization to {targets} ({targets_id}) was performed through nonlinear registration with `antsRegistration` (ANTs {ants_ver}), using brain-extracted versions of both T1w reference and the T1w template. The following template{tpls} selected for spatial normalization: """.format(ants_ver=ANTsInfo.version() or '(version unknown)', targets='%s standard space%s' % (defaultdict('several'.format, { 1: 'one', 2: 'two', 3: 'three', 4: 'four' })[len(template_list)], 's' * (len(template_list) != 1)), targets_id=', '.join(template_list), tpls=(' was', 's were')[len(template_list) != 1]) if not template_specs: template_specs = [{}] * len(template_list) if len(template_list) != len(template_specs): raise RuntimeError( 'Number of templates (%d) doesn\'t match the number of specs ' '(%d) provided.' % (len(template_list), len(template_specs))) # Append template citations to description for template in template_list: template_meta = get_metadata(template) template_refs = ['@%s' % template.lower()] if template_meta.get('RRID', None): template_refs += ['RRID:%s' % template_meta['RRID']] workflow.__desc__ += """\ *{template_name}* [{template_refs}; TemplateFlow ID: {template}]""".format( template=template, template_name=template_meta['Name'], template_refs=', '.join(template_refs)) workflow.__desc__ += (', ', '.')[template == template_list[-1]] inputnode = pe.Node(niu.IdentityInterface(fields=[ 'moving_image', 'moving_mask', 'moving_segmentation', 'moving_tpms', 'lesion_mask', 'orig_t1w', 'template' ]), name='inputnode') inputnode.iterables = [('template', template_list)] out_fields = [ 'warped', 'forward_transform', 'reverse_transform', 'tpl_mask', 'tpl_seg', 'tpl_tpms', 'template' ] poutputnode = pe.Node(niu.IdentityInterface(fields=out_fields), name='poutputnode') tpl_specs = pe.Node(niu.Function(function=_select_specs), name='tpl_specs', run_without_submitting=True) tpl_specs.inputs.template_list = template_list tpl_specs.inputs.template_specs = template_specs tpl_select = pe.Node(niu.Function(function=_get_template), name='tpl_select', run_without_submitting=True) # With the improvements from poldracklab/niworkflows#342 this truncation is now necessary trunc_mov = pe.Node(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'), name='trunc_mov') registration = pe.Node(RobustMNINormalization( float=True, flavor=['precise', 'testing'][debug], ), name='registration', n_procs=omp_nthreads, mem_gb=2) # Resample T1w-space inputs tpl_moving = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='LanczosWindowedSinc'), name='tpl_moving') tpl_mask = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='tpl_mask') tpl_seg = pe.Node(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='MultiLabel'), name='tpl_seg') tpl_tpms = pe.MapNode(ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='Gaussian'), iterfield=['input_image'], name='tpl_tpms') workflow.connect([ (inputnode, tpl_specs, [('template', 'template')]), (inputnode, tpl_select, [('template', 'template')]), (inputnode, registration, [('template', 'template')]), (inputnode, trunc_mov, [('moving_image', 'op1')]), (inputnode, registration, [('moving_mask', 'moving_mask'), ('lesion_mask', 'lesion_mask')]), (inputnode, tpl_moving, [('moving_image', 'input_image')]), (inputnode, tpl_mask, [('moving_mask', 'input_image')]), (tpl_specs, tpl_select, [('out', 'template_spec')]), (tpl_specs, registration, [(('out', _drop_res), 'template_spec')]), (tpl_select, tpl_moving, [('out', 'reference_image')]), (tpl_select, tpl_mask, [('out', 'reference_image')]), (tpl_select, tpl_seg, [('out', 'reference_image')]), (tpl_select, tpl_tpms, [('out', 'reference_image')]), (trunc_mov, registration, [('output_image', 'moving_image')]), (registration, tpl_moving, [('composite_transform', 'transforms')]), (registration, tpl_mask, [('composite_transform', 'transforms')]), (inputnode, tpl_seg, [('moving_segmentation', 'input_image')]), (registration, tpl_seg, [('composite_transform', 'transforms')]), (inputnode, tpl_tpms, [('moving_tpms', 'input_image')]), (registration, tpl_tpms, [('composite_transform', 'transforms')]), (registration, poutputnode, [('composite_transform', 'forward_transform'), ('inverse_composite_transform', 'reverse_transform')]), (tpl_moving, poutputnode, [('output_image', 'warped')]), (tpl_mask, poutputnode, [('output_image', 'tpl_mask')]), (tpl_seg, poutputnode, [('output_image', 'tpl_seg')]), (tpl_tpms, poutputnode, [('output_image', 'tpl_tpms')]), (inputnode, poutputnode, [('template', 'template')]), ]) # Generate and store report msk_select = pe.Node(niu.Function( function=_get_template, input_names=['template', 'template_spec', 'suffix', 'desc']), name='msk_select', run_without_submitting=True) msk_select.inputs.desc = 'brain' msk_select.inputs.suffix = 'mask' norm_msk = pe.Node(niu.Function( function=_rpt_masks, output_names=['before', 'after'], input_names=['mask_file', 'before', 'after', 'after_mask']), name='norm_msk') norm_rpt = pe.Node(SimpleBeforeAfter(), name='norm_rpt', mem_gb=0.1) norm_rpt.inputs.after_label = 'Participant' # after ds_t1_2_tpl_report = pe.Node(DerivativesDataSink( base_directory=reportlets_dir, keep_dtype=True), name='ds_t1_2_tpl_report', run_without_submitting=True) workflow.connect([ (inputnode, msk_select, [('template', 'template')]), (inputnode, norm_rpt, [('template', 'before_label')]), (tpl_mask, norm_msk, [('output_image', 'after_mask')]), (tpl_specs, msk_select, [('out', 'template_spec')]), (msk_select, norm_msk, [('out', 'mask_file')]), (tpl_select, norm_msk, [('out', 'before')]), (tpl_moving, norm_msk, [('output_image', 'after')]), (norm_msk, norm_rpt, [('before', 'before'), ('after', 'after')]), (inputnode, ds_t1_2_tpl_report, [('template', 'space'), ('orig_t1w', 'source_file')]), (norm_rpt, ds_t1_2_tpl_report, [('out_report', 'in_file')]), ]) # Provide synchronized output outputnode = pe.JoinNode(niu.IdentityInterface(fields=out_fields), name='outputnode', joinsource='inputnode') workflow.connect([ (poutputnode, outputnode, [(f, f) for f in out_fields]), ]) return workflow