def structural_registration(template, num_threads=4): registration = pe.Node(ants.Registration(), name="s_register") registration.inputs.fixed_image = path.abspath(path.expanduser(template)) registration.inputs.output_transform_prefix = "output_" registration.inputs.transforms = ['Affine', 'SyN'] ## registration.inputs.transform_parameters = [(1.0, ), (1.0, 3.0, 5.0)] ## registration.inputs.number_of_iterations = [[2000, 1000, 500], [100, 100, 100]] # registration.inputs.dimension = 3 registration.inputs.write_composite_transform = True registration.inputs.collapse_output_transforms = True registration.inputs.initial_moving_transform_com = True # Tested on Affine transform: CC takes too long; Demons does not tilt, but moves the slices too far caudally; GC tilts too much on; MI and MeanSquares seem equally good registration.inputs.metric = ['MeanSquares', 'Mattes'] registration.inputs.metric_weight = [1, 1] registration.inputs.radius_or_number_of_bins = [16, 32] # registration.inputs.sampling_strategy = ['Random', None] registration.inputs.sampling_percentage = [0.3, 0.3] registration.inputs.convergence_threshold = [1.e-11, 1.e-8] # registration.inputs.convergence_window_size = [20, 20] registration.inputs.smoothing_sigmas = [[4, 2, 1], [4, 2, 1]] registration.inputs.sigma_units = ['vox', 'vox'] registration.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]] registration.inputs.use_estimate_learning_rate_once = [True, True] # if the fixed_image is not acquired similarly to the moving_image (e.g. RARE to histological (e.g. AMBMC)) this should be False registration.inputs.use_histogram_matching = [False, False] registration.inputs.winsorize_lower_quantile = 0.005 registration.inputs.winsorize_upper_quantile = 0.995 registration.inputs.args = '--float' registration.inputs.num_threads = num_threads f_warp = pe.Node(ants.ApplyTransforms(), name="f_warp") f_warp.inputs.reference_image = path.abspath(path.expanduser(template)) f_warp.inputs.input_image_type = 3 f_warp.inputs.interpolation = 'Linear' f_warp.inputs.invert_transform_flags = [False] f_warp.inputs.terminal_output = 'file' f_warp.num_threads = num_threads s_warp = pe.Node(ants.ApplyTransforms(), name="s_warp") s_warp.inputs.reference_image = path.abspath(path.expanduser(template)) s_warp.inputs.input_image_type = 3 s_warp.inputs.interpolation = 'Linear' s_warp.inputs.invert_transform_flags = [False] s_warp.inputs.terminal_output = 'file' s_warp.num_threads = num_threads return registration, s_warp, f_warp
def _compose_tfms(args): """Create a composite transform from inputs.""" in_file, in_xform, ifargs, index, newpath = args out_file = fname_presuffix(in_file, suffix='_xform-%05d' % index, newpath=newpath, use_ext=True) xfm = ants.ApplyTransforms(input_image=in_file, transforms=in_xform, output_image=out_file, print_out_composite_warp_file=True, interpolation='LanczosWindowedSinc', **ifargs) xfm.terminal_output = 'allatonce' xfm.resource_monitor = False runtime = xfm.run().runtime LOGGER.info(runtime.cmdline) # Force floating point precision nii = nb.load(out_file, mmap=False) nii.set_data_dtype(np.dtype('float32')) nii.to_filename(out_file) # Get just the affine Transforms affines = [transform for transform in in_xform if '.nii' not in transform] out_affine = fname_presuffix(in_file, suffix='_affine_xform-%05d.mat' % index, newpath=newpath, use_ext=False) affine_file, affine_cmd = compose_affines(ifargs['reference_image'], affines, out_affine) return (out_file, runtime.cmdline, affine_file, affine_cmd)
def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name='ants_registration') # inputnode inputnode = Node(util.IdentityInterface( fields=['denoised_ts', 'composite_transform', 'ref']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg_ts', ]), name='outputnode') ants_reg = Node(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg') ants_registration.connect([ (inputnode, ants_reg, [('denoised_ts', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, ants_reg, [('composite_transform', 'transforms')]), (ants_reg, outputnode, [('output_image', 'ants_reg_ts')]) ]) return ants_registration
def pe2mni_ants( pe, mni2anat_hd5, affine_matrix, workdir, standard='/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz' ): """ Transform a 3D statistical image to MNI space with ANTS given pre-existing transformation matrices (in our case, from GLM/nipype working directory). This is very much mimicking the behavior of the 'warpall' node in our analysis pipeline. """ # path to output file outfile = join(workdir, 'pe2mni.nii.gz') # apply both affine and non-linear transform to parameter estimate file # TODO: Not sure about the order in which ANTS wants these transformation files ... mni2anat = ants.ApplyTransforms(input_image=pe, reference_image=standard, output_image=outfile, transforms=[mni2anat_hd5, affine_matrix], dimension=3, interpolation='Linear', terminal_output='file') mni2anat.run() # return path to resulting image return outfile
def airmsk_wf(name='AirMaskWorkflow'): """Implements the Step 1 of [Mortamet2009]_.""" workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'in_file', 'in_mask', 'head_mask', 'reverse_transforms', 'reverse_invert_flags' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['out_file', 'artifact_msk']), name='outputnode') invt = pe.Node(ants.ApplyTransforms(dimension=3, default_value=0, interpolation='NearestNeighbor'), name='invert_xfm') invt.inputs.input_image = op.join(get_mni_icbm152_nlin_asym_09c(), '1mm_headmask.nii.gz') qi1 = pe.Node(ArtifactMask(), name='ArtifactMask') workflow.connect([(inputnode, qi1, [('in_file', 'in_file'), ('head_mask', 'head_mask')]), (inputnode, invt, [('in_mask', 'reference_image'), ('reverse_transforms', 'transforms'), ('reverse_invert_flags', 'invert_transform_flags')]), (invt, qi1, [('output_image', 'nasion_post_mask')]), (qi1, outputnode, [('out_air_msk', 'out_file'), ('out_art_msk', 'artifact_msk')])]) return workflow
def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name='ants_registration') # inputnode inputnode = Node(util.IdentityInterface( fields=['corr_Z', 'ants_affine', 'ants_warp', 'ref']), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg_corr_Z', ]), name='outputnode') #also transform to mni space collect_transforms = Node(interface=util.Merge(2), name='collect_transforms') ants_reg = MapNode(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg', iterfield='input_image') ants_registration.connect([ (inputnode, ants_reg, [('corr_Z', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('ants_affine', 'in1')]), (inputnode, collect_transforms, [('ants_warp', 'in2')]), (collect_transforms, ants_reg, [('out', 'transforms')]), (ants_reg, outputnode, [('output_image', 'ants_reg_corr_Z')]) ]) return ants_registration
def transform_volume(vol, transform, label_img=False, outputpath=None, ref_img=None): """Transform volume """ if not ref_img: ref_img = TEMPLATE_VOLUME transforms = [] for _transform in ensure_list(transform): transforms.append(decompress_file(_transform)) if outputpath: result = outputpath else: result = TEMP_FOLDER_PATH + get_basename(basename(vol)) + '_reg.nii.gz' apply_transforms = ants.ApplyTransforms() if label_img: apply_transforms.inputs.interpolation = 'NearestNeighbor' else: apply_transforms.inputs.interpolation = 'Linear' apply_transforms.inputs.dimension = 3 apply_transforms.inputs.input_image = vol apply_transforms.inputs.reference_image = ref_img apply_transforms.inputs.output_image = result apply_transforms.inputs.default_value = 0 apply_transforms.inputs.transforms = transforms apply_transforms.inputs.invert_transform_flags = [False] * len(transforms) apply_transforms.run() return apply_transforms.inputs.output_image
def copes1_2_anat_func(fixed, cope1_10Hz_r1, cope1_10Hz_r2, cope1_10Hz_r3, func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3, mask_brain): import os import re import nipype.interfaces.ants as ants import nipype.interfaces.fsl as fsl cwd = os.getcwd() copes1 = [cope1_10Hz_r1, cope1_10Hz_r2, cope1_10Hz_r3] trans = [ func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3 ] copes1_2_anat = [] FEtdof_t1_2_anat = [] for i in range(len(copes1)): moving = copes1[i] transform = trans[i] ants_apply = ants.ApplyTransforms() ants_apply.inputs.dimension = 3 ants_apply.inputs.input_image = moving ants_apply.inputs.reference_image = fixed ants_apply.inputs.transforms = transform ants_apply.inputs.output_image = 'cope1_2_anat_10Hz_r{0}.nii.gz'.format( i + 1) ants_apply.run() copes1_2_anat.append( os.path.abspath('cope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) dof = fsl.ImageMaths() dof.inputs.in_file = 'cope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1) dof.inputs.op_string = '-mul 0 -add 147 -mas' dof.inputs.in_file2 = mask_brain dof.inputs.out_file = 'FEtdof_t1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1) dof.run() FEtdof_t1_2_anat.append( os.path.abspath('FEtdof_t1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) merge = fsl.Merge() merge.inputs.dimension = 't' merge.inputs.in_files = copes1_2_anat merge.inputs.merged_file = 'copes1_2_anat_10Hz.nii.gz' merge.run() merge.inputs.in_files = FEtdof_t1_2_anat merge.inputs.merged_file = 'dofs_t1_2_anat_10Hz.nii.gz' merge.run() copes1_2_anat = os.path.abspath('copes1_2_anat_10Hz.nii.gz') dofs_t1_2_anat = os.path.abspath('dofs_t1_2_anat_10Hz.nii.gz') return copes1_2_anat, dofs_t1_2_anat
def _resample_atlas(input_atlas, output_atlas, transform, ref_image): xform = ants.ApplyTransforms(transforms=[transform], reference_image=ref_image, input_image=input_atlas, output_image=output_atlas, interpolation="MultiLabel") result = xform.run() return result.runtime.cmdline
def register(warped_dir, atlas_image, moving_images, n_jobs): input_spec = pe.Node( utility.IdentityInterface(fields=['moving_image', 'fixed_image']), iterables=[('moving_image', moving_images)], synchronize=True, name='input_spec') # set input_spec input_spec.inputs.moving_image = moving_images input_spec.inputs.fixed_image = atlas_image ''' CC[x, x, 1, 8]: [fixed, moving, weight, radius] -t SyN[0.25]: Syn transform with a gradient step of 0.25 -r Gauss[3, 0]: sigma 0 -I 30x50x20 use - Histogram - Matching number - of - affine - iterations 10000x10000x10000x10000: 4 level image pyramid with 10000 iterations at each level MI - option 32x16000: 32 bins, 16000 samples ''' reg = pe.Node( ants.Registration( dimension=3, output_transform_prefix="output_", #interpolation='BSpline', transforms=['Affine', 'SyN'], transform_parameters=[(2.0, ), (0.25, )], #default values syn shrink_factors=[[8, 4, 2, 1], [4, 2, 1]], smoothing_sigmas=[[3, 2, 1, 0], [2, 1, 0]], #None for Syn? sigma_units=['vox'] * 2, sampling_percentage=[0.05, None], #just use default? sampling_strategy=['Random', 'None'], number_of_iterations=[[10000, 10000, 10000, 10000], [30, 50, 20]], metric=['MI', 'CC'], metric_weight=[1, 1], radius_or_number_of_bins=[(32), (8)], #winsorize_lower_quantile=0.05, #winsorize_upper_quantile=0.95, verbose=True, use_histogram_matching=[True, True]), name='calc_registration') applytransforms = pe.Node(ants.ApplyTransforms(dimension=3), name='apply_warpfield') wf = pe.Workflow(name='wf', base_dir=warped_dir) wf.connect([(input_spec, reg, [('fixed_image', 'fixed_image'), ('moving_image', 'moving_image')]), (input_spec, applytransforms, [('moving_image', 'input_image'), ('fixed_image', 'reference_image')])]) wf.connect(reg, 'forward_transforms', applytransforms, 'transforms') wf.config['execution']['parameterize_dirs'] = False wf.write_graph() output = wf.run(plugin='MultiProc', plugin_args={'n_procs': n_jobs})
def _downsample(in_file): result = ants.ApplyTransforms(dimension=3, input_image_type=0, input_image=in_file, reference_image=mni_downsampled, interpolation="NearestNeighbor", transforms=["identity"]).run() return result.outputs.output_image
def mask2subjspace_real(mnimask, anat, pe, mni2anat_hd5, affine_matrix, workdir): """ Simulate data based on transformation matrices already obtained from analysis of real data. (This is the same as mask2pe_ants in extract_pe.py for extracting parameter estimates). """ """ paths for temporary files and output file """ # temporary nifti file mni2anat_out_name = join(workdir, 'mnimask2anat.nii.gz') # inverse affine transform affine_matrix_inverse = join(workdir, 'anat2pe.txt') # path to output file outfile = join(workdir, 'mask2pe.nii.gz') """ Invert affine and apply both transforms """ # invert affine transmatrix (output from glm analysis) inverse = ConvertXFM( in_file=affine_matrix, out_file=affine_matrix_inverse, invert_xfm=True) inverse.run() # apply inverse transform from mni to anat space # (output from glm analysis) mni2anat = ants.ApplyTransforms( input_image=mnimask, reference_image=anat, output_image=mni2anat_out_name, transforms=[mni2anat_hd5], dimension=3, interpolation='NearestNeighbor', terminal_output='file') mni2anat.run() # apply inverse affine transform from anat to pe estimate space mni2pe = fsl.FLIRT( interp='nearestneighbour', apply_xfm=True, in_matrix_file=affine_matrix_inverse, out_matrix_file=join(workdir, 'anat2pe_flirt.mat'), # the above output matrix is redundant. But if we don't specify, # it just gets stored next to the script (and overwritten when # iterating over subjects!). We want it tidy. in_file=mni2anat_out_name, reference=pe, out_file=outfile) mni2pe.run() # return path to projected mask return outfile
def applytransform(in_file,reference,out_file,transformfile,interpolation='Linear'): at=ants.ApplyTransforms() at.inputs.dimension = 3 at.inputs.input_image = in_file at.inputs.reference_image = reference at.inputs.output_image = out_file at.inputs.interpolation =interpolation at.inputs.transforms = transformfile at.run() return at.inputs.output_image
def apply_warp_ants(strWarpPath, strInFile=None, strOutPath=None, bIsTimeseries=False, strRefFile=None, strAntsPath=None): ''' Apply a warp field (in ITK displacement field format) to an image :param strWarpPath: path to a warp field (.nii.gz) :type strWarpPath: str :param strInFile: path to input image. Defaults to MNI T1 template. :type strInFile: str :param strOutPath: output path for warped image. Will create a temp path if not specified. :type strOutPath: str :param strAntsPath: path to ANTS binary installation :type strAntsPath: str :return: if strOutPath is not specified, returns the warped image as a Nibabel Nifti1 object :rtype: ''' if strInFile is None: strInFile = strTemplatePath if strOutPath is None: bTempDir = True tempdir = tempfile.TemporaryDirectory() strOutPath = os.path.join(str(tempdir.name), 'warped.nii.gz') else: bTempDir = False if strAntsPath == None: strAntsPath = strAntsInstallPath nImType = 3 if bIsTimeseries else 0 if strRefFile is None: strRefFile = strInFile transform = ants.ApplyTransforms( input_image=strInFile, reference_image=strRefFile, transforms=strWarpPath, input_image_type=nImType, dimension=3, interpolation='BSpline', output_image=strOutPath, environ={'PATH': '$PATH:{}'.format(strAntsPath)}) out = transform.run() # os.system('{}/antsApplyTransforms -i {} -r {} -o {} -t {} -n BSpline --input-image-type {}'.format(strAntsPath, # strInFile, strInFile, # strOutPath, strWarpPath, # nImType)) if bTempDir: img = nil.image.load_img(strOutPath) tempdir.cleanup() return img
def airmsk_wf(name='AirMaskWorkflow'): """ Implements the Step 1 of [Mortamet2009]_. .. workflow:: from mriqc.workflows.anatomical import airmsk_wf from mriqc.testing import mock_config with mock_config(): wf = airmsk_wf() """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'in_file', 'in_mask', 'head_mask', 'inverse_composite_transform' ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['hat_mask', 'air_mask', 'art_mask', 'rot_mask']), name='outputnode') rotmsk = pe.Node(RotationMask(), name='RotationMask') invt = pe.Node(ants.ApplyTransforms(dimension=3, default_value=0, interpolation='MultiLabel', float=True), name='invert_xfm') invt.inputs.input_image = str( get_template('MNI152NLin2009cAsym', resolution=1, desc='head', suffix='mask')) qi1 = pe.Node(ArtifactMask(), name='ArtifactMask') workflow.connect([(inputnode, rotmsk, [('in_file', 'in_file')]), (inputnode, qi1, [('in_file', 'in_file'), ('head_mask', 'head_mask')]), (rotmsk, qi1, [('out_file', 'rot_mask')]), (inputnode, invt, [('in_mask', 'reference_image'), ('inverse_composite_transform', 'transforms')]), (invt, qi1, [('output_image', 'nasion_post_mask')]), (qi1, outputnode, [('out_hat_msk', 'hat_mask'), ('out_air_msk', 'air_mask'), ('out_art_msk', 'art_mask')]), (rotmsk, outputnode, [('out_file', 'rot_mask')])]) return workflow
def functional_registration(template, mask="/usr/share/mouse-brain-atlases/dsurqec_200micron_mask.nii", num_threads=4, phase_dictionary=GENERIC_PHASES, f_phases=["s_rigid","affine","syn"], ): template = path.abspath(path.expanduser(template)) f_parameters = [phase_dictionary[selection] for selection in f_phases] f_registration = pe.Node(ants.Registration(), name="f_register") f_registration.inputs.fixed_image = template f_registration.inputs.output_transform_prefix = "output_" f_registration.inputs.transforms = [i["transforms"] for i in f_parameters] ## f_registration.inputs.transform_parameters = [i["transform_parameters"] for i in f_parameters] ## f_registration.inputs.number_of_iterations = [i["number_of_iterations"] for i in f_parameters] # f_registration.inputs.dimension = 3 f_registration.inputs.write_composite_transform = True f_registration.inputs.collapse_output_transforms = True f_registration.inputs.initial_moving_transform_com = True f_registration.inputs.metric = [i["metric"] for i in f_parameters] f_registration.inputs.metric_weight = [i["metric_weight"] for i in f_parameters] f_registration.inputs.radius_or_number_of_bins = [i["radius_or_number_of_bins"] for i in f_parameters] f_registration.inputs.sampling_strategy = [i["sampling_strategy"] for i in f_parameters] f_registration.inputs.sampling_percentage = [i["sampling_percentage"] for i in f_parameters] f_registration.inputs.convergence_threshold = [i["convergence_threshold"] for i in f_parameters] f_registration.inputs.convergence_window_size = [i["convergence_window_size"] for i in f_parameters] f_registration.inputs.smoothing_sigmas = [i["smoothing_sigmas"] for i in f_parameters] f_registration.inputs.sigma_units = [i["sigma_units"] for i in f_parameters] f_registration.inputs.shrink_factors = [i["shrink_factors"] for i in f_parameters] f_registration.inputs.use_estimate_learning_rate_once = [i["use_estimate_learning_rate_once"] for i in f_parameters] f_registration.inputs.use_histogram_matching = [i["use_histogram_matching"] for i in f_parameters] f_registration.inputs.winsorize_lower_quantile = 0.05 f_registration.inputs.winsorize_upper_quantile = 0.95 f_registration.inputs.args = '--float' if mask: f_registration.inputs.fixed_image_masks = [path.abspath(path.expanduser(mask))] f_registration.inputs.num_threads = num_threads warp = pe.Node(ants.ApplyTransforms(), name="f_warp") warp.inputs.reference_image = template warp.inputs.input_image_type = 3 warp.inputs.interpolation = 'NearestNeighbor' warp.inputs.invert_transform_flags = [False] warp.inputs.terminal_output = 'file' warp.num_threads = 4 return f_registration, warp
def init_fmap_unwarp_report_wf(reportlets_dir, name='fmap_unwarp_report_wf'): from nipype.interfaces import ants from nipype.interfaces import utility as niu from niworkflows.interfaces import SimpleBeforeAfter def _getwm(in_seg, wm_label=3): import os.path as op import nibabel as nb import numpy as np nii = nb.load(in_seg) data = np.zeros(nii.shape, dtype=np.uint8) data[nii.get_data() == wm_label] = 1 hdr = nii.header.copy() hdr.set_data_dtype(np.uint8) nb.Nifti1Image(data, nii.affine, hdr).to_filename('wm.nii.gz') return op.abspath('wm.nii.gz') workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface( fields=['in_pre', 'in_post', 'in_seg', 'in_xfm', 'name_source']), name='inputnode') map_seg = pe.Node(ants.ApplyTransforms( dimension=3, float=True, interpolation='NearestNeighbor'), name='map_seg') sel_wm = pe.Node(niu.Function(function=_getwm), name='sel_wm') epi_rpt = pe.Node(SimpleBeforeAfter(), name='epi_rpt') epi_rpt_ds = pe.Node( DerivativesDataSink(base_directory=reportlets_dir, suffix='variant-hmcsdc_preproc'), name='epi_rpt_ds' ) workflow.connect([ (inputnode, epi_rpt, [('in_post', 'after'), ('in_pre', 'before')]), (inputnode, epi_rpt_ds, [('name_source', 'source_file')]), (epi_rpt, epi_rpt_ds, [('out_report', 'in_file')]), (inputnode, map_seg, [('in_post', 'reference_image'), ('in_seg', 'input_image'), ('in_xfm', 'transforms')]), (map_seg, sel_wm, [('output_image', 'in_seg')]), (sel_wm, epi_rpt, [('out', 'wm_seg')]), ]) return workflow
def airmsk_wf(name='AirMaskWorkflow'): """ Implements the Step 1 of [Mortamet2009]_. .. workflow:: from mriqc.workflows.anatomical import airmsk_wf wf = airmsk_wf() """ workflow = pe.Workflow(name=name) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'in_file', 'in_mask', 'head_mask', 'inverse_composite_transform' ]), name='inputnode') outputnode = pe.Node(niu.IdentityInterface( fields=['hat_mask', 'air_mask', 'art_mask', 'rot_mask']), name='outputnode') rotmsk = pe.Node(RotationMask(), name='RotationMask') invt = pe.Node(ants.ApplyTransforms(dimension=3, default_value=0, interpolation='Linear', float=True), name='invert_xfm') invt.inputs.input_image = str( Path(get_mni_icbm152_nlin_asym_09c()) / '1mm_headmask.nii.gz') binarize = pe.Node(niu.Function(function=_binarize), name='Binarize') qi1 = pe.Node(ArtifactMask(), name='ArtifactMask') workflow.connect([(inputnode, rotmsk, [('in_file', 'in_file')]), (inputnode, qi1, [('in_file', 'in_file'), ('head_mask', 'head_mask')]), (rotmsk, qi1, [('out_file', 'rot_mask')]), (inputnode, invt, [('in_mask', 'reference_image'), ('inverse_composite_transform', 'transforms')]), (invt, binarize, [('output_image', 'in_file')]), (binarize, qi1, [('out', 'nasion_post_mask')]), (qi1, outputnode, [('out_hat_msk', 'hat_mask'), ('out_air_msk', 'air_mask'), ('out_art_msk', 'art_mask')]), (rotmsk, outputnode, [('out_file', 'rot_mask')])]) return workflow
def ants_apply_transform(ffMOV, ffREF, ffOUT, transformations, interpolation='Linear', run=False): """ Apply transformations to a volume using given transformations. Parameters ---------- ffMOV: str ffREF: str ffOUT: str transformations: list of str full file paths to all transformation that should be applied Note: transformations are applied from right to left (like matrix multiplication) interpolation: str 'Linear' or 'NearestNeighbor' or 'CosineWindowedSinc' or 'WelchWindowedSinc' or 'HammingWindowedSinc' or 'LanczosWindowedSinc' or 'MultiLabel' or 'Gaussian' or 'BSpline', nipype default value: Linear run: bool, If True, files will be written to disk at specified target location. Returns ------- at: ants output at ffOUT """ at = ants.ApplyTransforms() # FILTER / nipype module at.inputs.dimension = 3 at.inputs.input_image = ffMOV at.inputs.reference_image = ffREF at.inputs.output_image = ffOUT at.inputs.interpolation = 'MultiLabel' # at.inputs.interpolation_parameters = (5,) # at.inputs.default_value = 0 at.inputs.num_threads = 4 at.inputs.transforms = transformations # at.inputs.invert_transform_flags = [False] if run: at.run() return at
def anat_apply_transforms(moving_fn, ref_fn, transform_fn, trans_type='desc'): ''' Applies transforms calculated by ANTS Input: moving_fn - Path to file to be transformed ref_fn - Path to reference image (eg. if transform is from subj > MNI, reference is MNI template) transform_fn - The transform output from ANTs (usually in h5 format) trans_type - String of either desc or label. Desc denotes an anatomical scan, label a labelled image (eg. lesion mask, tissue prob mask) Output: out_fn - Path to transformed image ''' space = re.findall('to-((.*)_(.*_))', transform_fn)[0][1] #Find which space transform is to out_path, trans_name = os.path.split( transform_fn) #Places output in same directory as transform file in_fn = os.path.split(moving_fn)[-1] if trans_type == 'desc': pre_split, post_split = in_fn.split('desc-') out_fn = pre_split + 'space-' + space + '_desc-' + post_split elif trans_type == 'label': pre_split, post_split = in_fn.split('label-') out_fn = pre_split + 'space-' + space + '_label-' + post_split else: raise Exception( 'Only desc or label identifiers are currently supported') out_name = os.path.join(out_path, out_fn) print(out_name) subj2mni = ants.ApplyTransforms() subj2mni.inputs.input_image = moving_fn subj2mni.inputs.reference_image = ref_fn subj2mni.inputs.transforms = transform_fn if trans_type == 'desc': subj2mni.inputs.interpolation = 'Linear' else: subj2mni.inputs.interpolation = 'NearestNeighbor' #Don't want mask smoothed! subj2mni.inputs.output_image = out_name return (out_name)
def varcopes1_2_anat_func(fixed, varcope1_10Hz_r1, varcope1_10Hz_r2, varcope1_10Hz_r3, func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3): import os import re import nipype.interfaces.ants as ants import nipype.interfaces.fsl as fsl cwd = os.getcwd() varcopes1 = [varcope1_10Hz_r1, varcope1_10Hz_r2, varcope1_10Hz_r3] trans = [ func_2_anat_trans_10Hz_r1, func_2_anat_trans_10Hz_r2, func_2_anat_trans_10Hz_r3 ] varcopes1_2_anat = [] for i in range(len(varcopes1)): moving = varcopes1[i] transform = trans[i] ants_apply = ants.ApplyTransforms() ants_apply.inputs.dimension = 3 ants_apply.inputs.input_image = moving ants_apply.inputs.reference_image = fixed ants_apply.inputs.transforms = transform ants_apply.inputs.output_image = 'varcope1_2_anat_10Hz_r{0}.nii.gz'.format( i + 1) ants_apply.run() varcopes1_2_anat.append( os.path.abspath('varcope1_2_anat_10Hz_r{0}.nii.gz'.format(i + 1))) merge = fsl.Merge() merge.inputs.dimension = 't' merge.inputs.in_files = varcopes1_2_anat merge.inputs.merged_file = 'varcopes1_2_anat_10Hz.nii.gz' merge.run() varcopes1_2_anat = os.path.abspath('varcopes1_2_anat_10Hz.nii.gz') return varcopes1_2_anat
def functional_registration(template): registration = pe.Node(ants.Registration(), name="register") registration.inputs.fixed_image = template registration.inputs.output_transform_prefix = "output_" registration.inputs.transforms = ['Affine', 'SyN'] registration.inputs.transform_parameters = [(0.1, ), (3.0, 3.0, 5.0)] registration.inputs.number_of_iterations = [[10000, 10000, 10000], [100, 100, 100]] registration.inputs.dimension = 3 registration.inputs.write_composite_transform = True registration.inputs.collapse_output_transforms = True registration.inputs.initial_moving_transform_com = True registration.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']] registration.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]] registration.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]] registration.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]] registration.inputs.sampling_percentage = [0.3] * 2 + [[None, None]] registration.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01] registration.inputs.convergence_window_size = [20] * 2 + [5] registration.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] registration.inputs.sigma_units = ['vox'] * 3 registration.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] registration.inputs.use_estimate_learning_rate_once = [True] * 3 registration.inputs.use_histogram_matching = [False] * 2 + [True] registration.inputs.winsorize_lower_quantile = 0.005 registration.inputs.winsorize_upper_quantile = 0.995 registration.inputs.args = '--float' registration.inputs.output_warped_image = 'output_warped_image.nii.gz' registration.inputs.num_threads = 4 registration.plugin_args = { 'qsub_args': '-pe orte 4', 'sbatch_args': '--mem=6G -c 4' } warp = pe.Node(ants.ApplyTransforms(), name="f_warp") warp.inputs.reference_image = template warp.inputs.input_image_type = 3 warp.inputs.interpolation = 'Linear' warp.inputs.invert_transform_flags = [False] warp.inputs.terminal_output = 'file' warp.num_threads = 4 return registration, warp
def mask_transform(mask_list, ref, transmat, output_dir): ''' Transforms masks from MNI to subj space masks = list of eye + temporal mask images transmat = mapping from MNI > subj space ''' subj = os.path.split(output_dir)[-1] dir_files = os.listdir(output_dir) if fnmatch.filter(dir_files, '*mask_subj*'): print('Mask transforms already run. Not re-running for subj {}'.format( subj)) subj_trans_masks = glob.glob( os.path.join(output_dir, '*mask_subj.nii.gz')) subj_trans_masks.sort(key=len) print(subj, 'subj masks', subj_trans_masks) else: subj_trans_masks = [] for mask in mask_list: image_file = os.path.split(mask)[1] image_name = image_file.split('.')[0] print(image_name) mni2subj = ants.ApplyTransforms() mni2subj.inputs.input_image = mask mni2subj.inputs.reference_image = ref mni2subj.inputs.transforms = transmat mni2subj.inputs.interpolation = 'NearestNeighbor' mni2subj.inputs.output_image = os.path.join( output_dir, image_name + '_subj.nii.gz') mni2subj_results = mni2subj.run() output_image = mni2subj_results.outputs.get()['output_image'] subj_trans_masks.append(output_image) return (subj_trans_masks)
def create_warp_transform(name='warpmultitransform'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow warp = Workflow(name='warp') # inputnode inputnode = MapNode(util.IdentityInterface(fields=[ 'input_image', 'atlas_aff2template', 'atlas_warp2template', 'atlas2target_composite', 'template2target_inverse', 'ref' ]), name='inputnode', iterfield=['input_image', 'ref']) # outputnode outputnode = Node(util.IdentityInterface(fields=[ 'ants_reg', ]), name='outputnode') collect_transforms = Node(interface=util.Merge(4), name='collect_transforms') ants_reg = MapNode(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='apply_ants_reg', iterfield=['input_image', 'reference_image']) ants_reg.inputs.invert_transform_flags = [False, False, False, False] warp.connect([ (inputnode, ants_reg, [('input_image', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('atlas_aff2template', 'in4')]), (inputnode, collect_transforms, [('atlas_warp2template', 'in3')]), (inputnode, collect_transforms, [('atlas2target_composite', 'in2')]), (inputnode, collect_transforms, [('template2target_inverse', 'in1')]), (collect_transforms, ants_reg, [ ('out', 'transforms') ]), #for WarpImageMultiTransform:transformation_series (ants_reg, outputnode, [('output_image', 'ants_reg')]) ]) return warp
def create_ants_registration_pipeline(name='ants_registration'): # set fsl output type fsl.FSLCommand.set_default_output_type('NIFTI_GZ') # initiate workflow ants_registration = Workflow(name=name) # inputnode inputnode = Node(util.IdentityInterface(fields=['denoised_ts', 'ants_affine', 'ants_warp', 'ref', 'tr_sec' ]), name='inputnode') # outputnode outputnode = Node(util.IdentityInterface(fields=['ants_reg_ts', ]), name='outputnode') # also transform to mni space collect_transforms = Node(interface=util.Merge(2), name='collect_transforms') ants_reg = Node(ants.ApplyTransforms(input_image_type=3, dimension=3, interpolation='Linear'), name='ants_reg') # ants does something strange with headers. TR seems to be preserved (nibabel, fslinfo) but mri_info does not # display correct TR fix_tr = Node(util.Function(input_names=['in_file', 'TR_sec'], output_names=['out_file'], function=fix_TR_fs), name='fix_tr') ants_registration.connect(inputnode, 'tr_sec', fix_tr, 'TR_sec') ants_registration.connect(ants_reg, 'output_image', fix_tr, 'in_file') ants_registration.connect([ (inputnode, ants_reg, [('denoised_ts', 'input_image')]), (inputnode, ants_reg, [('ref', 'reference_image')]), (inputnode, collect_transforms, [('ants_affine', 'in1')]), (inputnode, collect_transforms, [('ants_warp', 'in2')]), (collect_transforms, ants_reg, [('out', 'transforms')]), #(ants_reg, outputnode, [('output_image', 'ants_reg_ts')]) (fix_tr, outputnode, [('out_file', 'ants_reg_ts')]) ]) return ants_registration
def init_transform_to_first_image_wf(name='transform_images', n_images=2): wf = pe.Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface(fields=['in_files', 'transforms']), name='inputnode') split = pe.Node(niu.Split(splits=[1, n_images - 1]), name='split') wf.connect(inputnode, 'in_files', split, 'inlist') apply_sinc = pe.MapNode( ants.ApplyTransforms(interpolation='LanczosWindowedSinc'), iterfield=['input_image'], name='apply_sinc') wf.connect(inputnode, 'transforms', apply_sinc, 'transforms') wf.connect(split, ('out1', _pickone), apply_sinc, 'reference_image') wf.connect(split, 'out2', apply_sinc, 'input_image') merge_lists = pe.Node(niu.Merge(2), name='merge_lists') wf.connect(split, 'out1', merge_lists, 'in1') wf.connect(apply_sinc, 'output_image', merge_lists, 'in2') merge_niftis = pe.Node(fsl.Merge(dimension='t'), name='merge_niftis') wf.connect(merge_lists, 'out', merge_niftis, 'in_files') mean_image = pe.Node(fsl.MeanImage(dimension='T'), name='mean_image') wf.connect(merge_niftis, 'merged_file', mean_image, 'in_file') outputnode = pe.Node( niu.IdentityInterface(fields=['mean_image', 'transformed_images']), name='outputnode') wf.connect(mean_image, 'out_file', outputnode, 'mean_image') wf.connect(merge_lists, 'out', outputnode, 'transformed_images') return wf
def composite_registration(template, num_threads=4): f_registration = pe.Node(ants.Registration(), name="f_register") f_registration.inputs.output_transform_prefix = "output_" f_registration.inputs.transforms = ['Rigid'] f_registration.inputs.transform_parameters = [(0.1, )] f_registration.inputs.number_of_iterations = [[40, 20, 10]] f_registration.inputs.dimension = 3 f_registration.inputs.write_composite_transform = True f_registration.inputs.collapse_output_transforms = True f_registration.inputs.initial_moving_transform_com = True f_registration.inputs.metric = ['MeanSquares'] f_registration.inputs.metric_weight = [1] f_registration.inputs.radius_or_number_of_bins = [16] f_registration.inputs.sampling_strategy = ["Regular"] f_registration.inputs.sampling_percentage = [0.3] f_registration.inputs.convergence_threshold = [1.e-2] f_registration.inputs.convergence_window_size = [8] f_registration.inputs.smoothing_sigmas = [[4, 2, 1]] # f_registration.inputs.sigma_units = ['vox'] f_registration.inputs.shrink_factors = [[3, 2, 1]] f_registration.inputs.use_estimate_learning_rate_once = [True] f_registration.inputs.use_histogram_matching = [False] f_registration.inputs.winsorize_lower_quantile = 0.005 f_registration.inputs.winsorize_upper_quantile = 0.995 f_registration.inputs.args = '--float' f_registration.inputs.num_threads = num_threads f_warp = pe.Node(ants.ApplyTransforms(), name="f_warp") f_warp.inputs.reference_image = path.abspath(path.expanduser(template)) f_warp.inputs.input_image_type = 3 f_warp.inputs.interpolation = 'Linear' f_warp.inputs.invert_transform_flags = [False, False] f_warp.inputs.terminal_output = 'file' f_warp.num_threads = num_threads return f_registration, f_warp
def subj2mni(moving=None, ref=None, transmat=None, output_dir=None): subj = os.path.split(output_dir)[-1] print('Warping {} to MNI space'.format(subj)) subj2mni = ants.ApplyTransforms() subj2mni.inputs.dimension = 3 subj2mni.inputs.input_image = moving subj2mni.inputs.reference_image = ref subj2mni.inputs.transforms = transmat if output_dir != None: subj2mni.inputs.output_image = os.path.join(output_dir, 'myelin_map_mni.nii.gz') else: subj2mni.inputs.output_image = ('myelin_map_mni.nii.gz') subj2mni.inputs.interpolation = 'NearestNeighbor' subj2mni_results = subj2mni.run() subj2mni_output = subj2mni_results.outputs.get() subj2mni_im = subj2mni_output['output_image'] return (subj2mni_output, subj2mni_im)
# composite transform dictWarps = {'transforms': [], 'outpath': []} for strWarpDir in lsWarpDirs: strAffinePath = glob.glob(os.path.join(strWarpDir, 'out_matrix', '*.mat'))[0] # Remove rigid body components (translation and rotation) which don't # contribute meaningful variation strAffinePath = remove_rigidbody(strAffinePath) # We use the inverse warp field, which contains the nonlinear transformation from MNI->subject strNonlinearPath = glob.glob( os.path.join(strWarpDir, 'inverse_warp_field', '*.nii.gz'))[0] dictWarps['transforms'].append([strNonlinearPath, strAffinePath]) dictWarps['outpath'].append( os.path.join(strWarpDir, 'composite_to_mni.nii.gz')) # Use ANTs ApplyTransforms to compose the transforms antstool = MapNode(ants.ApplyTransforms(input_image=TEMPLATE, reference_image=TEMPLATE, interpolation='BSpline', invert_transform_flags=[False, True], print_out_composite_warp_file=True), name='applytransforms', iterfield=['output_image', 'transforms']) antstool.inputs.output_image = dictWarps['outpath'] antstool.inputs.transforms = dictWarps['transforms'] # Create and run nipype workflow wf = Workflow('composite_transforms') wf.add_nodes([antstool]) wf.run(plugin='MultiProc', plugin_args={'n_procs': PIPELINE_JOBS})
def blip_distcor_wf(wf_name='blip_distcor'): """Execute AFNI 3dQWarp to calculate the distortion "unwarp" for phase encoding direction EPI field map distortion correction. 1. Skull-strip the opposite-direction phase encoding EPI. 2. Transform the opposite-direction phase encoding EPI to the skull-stripped functional and pass this as the base_file to AFNI 3dQWarp (plus-minus). 3. If there is a same-direction phase encoding EPI, also skull-strip this, and transform it to the skull-stripped functional. Then, pass this as the in_file to AFNI 3dQWarp (plus-minus). 4. If there isn't a same-direction, pass the functional in as the in_file of AFNI 3dQWarp (plus-minus). 5. Convert the 3dQWarp transforms to ANTs/ITK format. 6. Use antsApplyTransforms, with the original functional as both the input and the reference, and apply the warp from 3dQWarp. The output of this can then proceed to func_preproc. :param wf_name: :return: """ wf = pe.Workflow(name=wf_name) input_node = pe.Node(util.IdentityInterface( fields=['func_mean', 'opposite_pe_epi', 'same_pe_epi']), name='inputspec') output_node = pe.Node(util.IdentityInterface(fields=[ 'blip_warp', 'blip_warp_inverse', 'new_func_mean', 'new_func_mask' ]), name='outputspec') skullstrip_opposite_pe = skullstrip_functional( skullstrip_tool='afni', wf_name="{0}_skullstrip_opp_pe".format(wf_name)) wf.connect(input_node, 'opposite_pe_epi', skullstrip_opposite_pe, 'inputspec.func') opp_pe_to_func = pe.Node(interface=fsl.FLIRT(), name='opp_pe_to_func') opp_pe_to_func.inputs.cost = 'corratio' wf.connect(skullstrip_opposite_pe, 'outputspec.func_brain', opp_pe_to_func, 'in_file') wf.connect(input_node, 'func_mean', opp_pe_to_func, 'reference') prep_qwarp_input_imports = ['import os', 'import subprocess'] prep_qwarp_input = \ pe.Node(function.Function(input_names=['same_pe_epi', 'func_mean'], output_names=['qwarp_input'], function=same_pe_direction_prep, imports=prep_qwarp_input_imports), name='prep_qwarp_input') wf.connect(input_node, 'same_pe_epi', prep_qwarp_input, 'same_pe_epi') wf.connect(input_node, 'func_mean', prep_qwarp_input, 'func_mean') calc_blip_warp = pe.Node(afni.QwarpPlusMinus(), name='calc_blip_warp') calc_blip_warp.inputs.plusminus = True calc_blip_warp.inputs.outputtype = "NIFTI_GZ" calc_blip_warp.inputs.out_file = os.path.abspath("Qwarp.nii.gz") wf.connect(opp_pe_to_func, 'out_file', calc_blip_warp, 'base_file') wf.connect(prep_qwarp_input, 'qwarp_input', calc_blip_warp, 'in_file') convert_afni_warp_imports = ['import os', 'import nibabel as nb'] convert_afni_warp = \ pe.Node(function.Function(input_names=['afni_warp'], output_names=['ants_warp'], function=convert_afni_to_ants, imports=convert_afni_warp_imports), name='convert_afni_warp') wf.connect(calc_blip_warp, 'source_warp', convert_afni_warp, 'afni_warp') # TODO: inverse source_warp (node:source_warp_inverse) # wf.connect(### # output_node, 'blip_warp_inverse') undistort_func_mean = pe.Node(interface=ants.ApplyTransforms(), name='undistort_func_mean', mem_gb=.1) undistort_func_mean.inputs.out_postfix = '_antswarp' undistort_func_mean.interface.num_threads = 1 undistort_func_mean.inputs.interpolation = "LanczosWindowedSinc" undistort_func_mean.inputs.dimension = 3 undistort_func_mean.inputs.input_image_type = 0 wf.connect(input_node, 'func_mean', undistort_func_mean, 'input_image') wf.connect(input_node, 'func_mean', undistort_func_mean, 'reference_image') wf.connect(convert_afni_warp, 'ants_warp', undistort_func_mean, 'transforms') create_new_mask = skullstrip_functional( skullstrip_tool='afni', wf_name="{0}_new_func_mask".format(wf_name)) wf.connect(undistort_func_mean, 'output_image', create_new_mask, 'inputspec.func') wf.connect(convert_afni_warp, 'ants_warp', output_node, 'blip_warp') wf.connect(undistort_func_mean, 'output_image', output_node, 'new_func_mean') wf.connect(create_new_mask, 'outputspec.func_brain_mask', output_node, 'new_func_mask') return wf