def test_dartelnorm2mni(): yield assert_equal, spm.DARTELNorm2MNI._jobtype, 'tools' yield assert_equal, spm.DARTELNorm2MNI._jobname, 'dartel' input_map = dict( apply_to_files=dict( copyfile=False, mandatory=True, field='mni_norm.data.subjs.images', ), bounding_box=dict(field='mni_norm.bb', ), flowfield_files=dict( field='mni_norm.data.subjs.flowfields', mandatory=True, ), fwhm=dict(field='mni_norm.fwhm', ), ignore_exception=dict(usedefault=True, ), matlab_cmd=dict(), mfile=dict(usedefault=True, ), modulate=dict(field='mni_norm.preserve', ), paths=dict(), template_file=dict( copyfile=False, mandatory=True, field='mni_norm.template', ), use_mcr=dict(), voxel_size=dict(field='mni_norm.vox', ), ) instance = spm.DARTELNorm2MNI() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(instance.inputs.traits()[key], metakey), value
def build_core_nodes(self): """Build and connect an output node to the pipeline.""" import nipype.interfaces.spm as spm import nipype.interfaces.spm.utils as spmutils import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from nipype.interfaces.petpvc import PETPVC from clinica.utils.filemanip import unzip_nii from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone from .pet_volume_utils import ( apply_binary_mask, atlas_statistics, create_binary_mask, create_pvc_mask, get_from_list, init_input_node, normalize_to_reference, pet_pvc_name, ) if spm_standalone_is_available(): use_spm_standalone() # Initialize pipeline # =================== init_node = npe.Node( interface=nutil.Function( input_names=["pet_nii"], output_names=["pet_nii"], function=init_input_node, ), name="init_pipeline", ) # Unzipping # ========= unzip_pet_image = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_pet_image", ) unzip_t1_image_native = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_t1_image_native", ) unzip_flow_fields = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_flow_fields", ) unzip_dartel_template = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_dartel_template", ) unzip_reference_mask = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_reference_mask", ) unzip_mask_tissues = npe.MapNode( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_mask_tissues", iterfield=["in_file"], ) # Coregister PET into T1 native space # =================================== coreg_pet_t1 = npe.Node(spm.Coregister(), name="coreg_pet_t1") # Spatially normalize PET into MNI # ================================ dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name="dartel_mni_reg") dartel_mni_reg.inputs.modulate = False dartel_mni_reg.inputs.fwhm = 0 # Reslice reference region mask into PET # ====================================== reslice = npe.Node(spmutils.Reslice(), name="reslice") # Normalize PET values according to reference region # ================================================== norm_to_ref = npe.Node( nutil.Function( input_names=["pet_image", "region_mask"], output_names=["suvr_pet_path"], function=normalize_to_reference, ), name="norm_to_ref", ) # Create binary mask from segmented tissues # ========================================= binary_mask = npe.Node( nutil.Function( input_names=["tissues", "threshold"], output_names=["out_mask"], function=create_binary_mask, ), name="binary_mask", ) binary_mask.inputs.threshold = self.parameters["mask_threshold"] # Mask PET image # ============== apply_mask = npe.Node( nutil.Function( input_names=["image", "binary_mask"], output_names=["masked_image_path"], function=apply_binary_mask, ), name="apply_mask", ) # Smoothing # ========= if self.parameters["smooth"] is not None and len( self.parameters["smooth"]) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name="smoothing_node", iterfield=["fwhm", "out_prefix"]) smoothing_node.inputs.fwhm = [[x, x, x] for x in self.parameters["smooth"]] smoothing_node.inputs.out_prefix = [ "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"] ] # fmt: off self.connect([ (apply_mask, smoothing_node, [("masked_image_path", "in_files") ]), (smoothing_node, self.output_node, [("smoothed_files", "pet_suvr_masked_smoothed")]), ]) # fmt: on else: self.output_node.inputs.pet_suvr_masked_smoothed = [[]] # Atlas Statistics # ================ atlas_stats_node = npe.MapNode( nutil.Function( input_names=["in_image", "in_atlas_list"], output_names=["atlas_statistics"], function=atlas_statistics, ), name="atlas_stats_node", iterfield=["in_image"], ) atlas_stats_node.inputs.in_atlas_list = self.parameters["atlases"] # Connection # ========== # fmt: off self.connect([ (self.input_node, init_node, [("pet_image", "pet_nii")]), (init_node, unzip_pet_image, [("pet_nii", "in_file")]), (self.input_node, unzip_t1_image_native, [("t1_image_native", "in_file")]), (self.input_node, unzip_flow_fields, [("flow_fields", "in_file")]), (self.input_node, unzip_dartel_template, [("dartel_template", "in_file")]), (self.input_node, unzip_reference_mask, [("reference_mask", "in_file")]), (self.input_node, unzip_mask_tissues, [("mask_tissues", "in_file") ]), (unzip_pet_image, coreg_pet_t1, [("out_file", "source")]), (unzip_t1_image_native, coreg_pet_t1, [("out_file", "target")]), (unzip_flow_fields, dartel_mni_reg, [("out_file", "flowfield_files")]), (unzip_dartel_template, dartel_mni_reg, [("out_file", "template_file")]), (unzip_reference_mask, reslice, [("out_file", "in_file")]), (unzip_mask_tissues, binary_mask, [("out_file", "tissues")]), (coreg_pet_t1, dartel_mni_reg, [("coregistered_source", "apply_to_files")]), (dartel_mni_reg, reslice, [("normalized_files", "space_defining") ]), (dartel_mni_reg, norm_to_ref, [("normalized_files", "pet_image")]), (reslice, norm_to_ref, [("out_file", "region_mask")]), (norm_to_ref, apply_mask, [("suvr_pet_path", "image")]), (binary_mask, apply_mask, [("out_mask", "binary_mask")]), (norm_to_ref, atlas_stats_node, [("suvr_pet_path", "in_image")]), (coreg_pet_t1, self.output_node, [("coregistered_source", "pet_t1_native")]), (dartel_mni_reg, self.output_node, [("normalized_files", "pet_mni") ]), (norm_to_ref, self.output_node, [("suvr_pet_path", "pet_suvr")]), (binary_mask, self.output_node, [("out_mask", "binary_mask")]), (apply_mask, self.output_node, [("masked_image_path", "pet_suvr_masked")]), (atlas_stats_node, self.output_node, [("atlas_statistics", "atlas_statistics")]), ]) # fmt: on # PVC # ========== if self.parameters["apply_pvc"]: # Unzipping # ========= unzip_pvc_mask_tissues = npe.MapNode( nutil.Function( input_names=["in_file"], output_names=["out_file"], function=unzip_nii, ), name="unzip_pvc_mask_tissues", iterfield=["in_file"], ) # Creating Mask to use in PVC # =========================== pvc_mask = npe.Node( nutil.Function( input_names=["tissues"], output_names=["out_mask"], function=create_pvc_mask, ), name="pvc_mask", ) # PET PVC # ======= petpvc = npe.Node(PETPVC(), name="pvc") petpvc.inputs.pvc = "RBV" petpvc.inputs.out_file = "pvc.nii" # Spatially normalize PET into MNI # ================================ dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(), name="dartel_mni_reg_pvc") dartel_mni_reg_pvc.inputs.modulate = False dartel_mni_reg_pvc.inputs.fwhm = 0 # Reslice reference region mask into PET # ====================================== reslice_pvc = npe.Node(spmutils.Reslice(), name="reslice_pvc") # Normalize PET values according to reference region # ================================================== norm_to_ref_pvc = npe.Node( nutil.Function( input_names=["pet_image", "region_mask"], output_names=["suvr_pet_path"], function=normalize_to_reference, ), name="norm_to_ref_pvc", ) # Mask PET image # ============== apply_mask_pvc = npe.Node( nutil.Function( input_names=["image", "binary_mask"], output_names=["masked_image_path"], function=apply_binary_mask, ), name="apply_mask_pvc", ) # Smoothing # ========= if (self.parameters["smooth"] is not None and len(self.parameters["smooth"]) > 0): smoothing_pvc = npe.MapNode(spm.Smooth(), name="smoothing_pvc", iterfield=["fwhm", "out_prefix"]) smoothing_pvc.inputs.fwhm = [[x, x, x] for x in self.parameters["smooth"] ] smoothing_pvc.inputs.out_prefix = [ "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"] ] # fmt: off self.connect([ (apply_mask_pvc, smoothing_pvc, [("masked_image_path", "in_files")]), (smoothing_pvc, self.output_node, [("smoothed_files", "pet_pvc_suvr_masked_smoothed")]), ]) # fmt: on else: self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]] # Atlas Statistics # ================ atlas_stats_pvc = npe.MapNode( nutil.Function( input_names=["in_image", "in_atlas_list"], output_names=["atlas_statistics"], function=atlas_statistics, ), name="atlas_stats_pvc", iterfield=["in_image"], ) atlas_stats_pvc.inputs.in_atlas_list = self.parameters["atlases"] # Connection # ========== # fmt: off self.connect([ (self.input_node, unzip_pvc_mask_tissues, [("pvc_mask_tissues", "in_file")]), (unzip_pvc_mask_tissues, pvc_mask, [("out_file", "tissues")]), (unzip_flow_fields, dartel_mni_reg_pvc, [("out_file", "flowfield_files")]), (unzip_dartel_template, dartel_mni_reg_pvc, [("out_file", "template_file")]), (unzip_reference_mask, reslice_pvc, [("out_file", "in_file")]), (coreg_pet_t1, petpvc, [("coregistered_source", "in_file"), (("coregistered_source", pet_pvc_name, "RBV"), "out_file")]), (pvc_mask, petpvc, [("out_mask", "mask_file")]), (self.input_node, petpvc, [(("psf", get_from_list, 0), "fwhm_x"), (("psf", get_from_list, 1), "fwhm_y"), (("psf", get_from_list, 2), "fwhm_z")]), (petpvc, dartel_mni_reg_pvc, [("out_file", "apply_to_files")]), (dartel_mni_reg_pvc, reslice_pvc, [("normalized_files", "space_defining")]), (dartel_mni_reg_pvc, norm_to_ref_pvc, [("normalized_files", "pet_image")]), (reslice_pvc, norm_to_ref_pvc, [("out_file", "region_mask")]), (norm_to_ref_pvc, apply_mask_pvc, [("suvr_pet_path", "image") ]), (binary_mask, apply_mask_pvc, [("out_mask", "binary_mask")]), (norm_to_ref_pvc, atlas_stats_pvc, [("suvr_pet_path", "in_image")]), (petpvc, self.output_node, [("out_file", "pet_pvc")]), (dartel_mni_reg_pvc, self.output_node, [("normalized_files", "pet_pvc_mni")]), (norm_to_ref_pvc, self.output_node, [("suvr_pet_path", "pet_pvc_suvr")]), (apply_mask_pvc, self.output_node, [("masked_image_path", "pet_pvc_suvr_masked")]), (atlas_stats_pvc, self.output_node, [("atlas_statistics", "pvc_atlas_statistics")]), ]) # fmt: on else: self.output_node.inputs.pet_pvc = [[]] self.output_node.inputs.pet_pvc_mni = [[]] self.output_node.inputs.pet_pvc_suvr = [[]] self.output_node.inputs.pet_pvc_suvr_masked = [[]] self.output_node.inputs.pvc_atlas_statistics = [[]] self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
"""Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' """Normalize and smooth functional data using DARTEL template """ normalize_and_smooth_func = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='normalize_and_smooth_func') fwhmlist = [4] normalize_and_smooth_func.iterables = ('fwhm', fwhmlist) """Normalize structural data using DARTEL template """ normalize_struct = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='normalize_struct') normalize_struct.inputs.fwhm = 2 preproc.connect([ (realign, coregister, [('mean_image', 'source'), ('realigned_files', 'apply_to_files')]), (coregister, normalize_and_smooth_func, [('coregistered_files', 'apply_to_files')]),
def build_core_nodes(self): """Build and connect the core nodes of the pipelines. """ import os import platform import nipype.interfaces.spm as spm import nipype.interfaces.matlab as mlab import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as seg_utils import clinica.pipelines.t1_volume_create_dartel.t1_volume_create_dartel_utils as dartel_utils import clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils as dartel2mni_utils from clinica.utils.io import unzip_nii spm_home = os.getenv("SPM_HOME") mlab_home = os.getenv("MATLABCMD") mlab.MatlabCommand.set_default_matlab_cmd(mlab_home) mlab.MatlabCommand.set_default_paths(spm_home) if 'SPMSTANDALONE_HOME' in os.environ: if 'MCR_HOME' in os.environ: matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'], 'run_spm12.sh') \ + ' ' + os.environ['MCR_HOME'] \ + ' script' spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True) version = spm.SPMCommand().version else: raise EnvironmentError('MCR_HOME variable not in environnement. Althought, ' + 'SPMSTANDALONE_HOME has been found') else: version = spm.Info.getinfo() if version: if isinstance(version, dict): spm_path = version['path'] if version['name'] == 'SPM8': print('You are using SPM version 8. The recommended version to use with Clinica is SPM 12. ' + 'Please upgrade your SPM toolbox.') tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii') elif version['name'] == 'SPM12': tissue_map = os.path.join(spm_path, 'tpm/TPM.nii') else: raise RuntimeError('SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.') if isinstance(version, str): if float(version) >= 12.7169: if platform.system() == 'Darwin': tissue_map = os.path.join(str(spm_home), 'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii') else: tissue_map = os.path.join(str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii') else: raise RuntimeError('SPM standalone version not supported. Please upgrade SPM standalone.') else: raise RuntimeError('SPM could not be found. Please verify your SPM_HOME environment variable.') # Unzipping # =============================== unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_node', iterfield=['in_file']) # Unified Segmentation # =============================== new_segment = npe.MapNode(spm.NewSegment(), name='new_segment', iterfield=['channel_files']) if self.parameters['affine_regularization'] is not None: new_segment.inputs.affine_regularization = self.parameters['affine_regularization'] if self.parameters['channel_info'] is not None: new_segment.inputs.channel_info = self.parameters['channel_info'] if self.parameters['sampling_distance'] is not None: new_segment.inputs.sampling_distance = self.parameters['sampling_distance'] if self.parameters['warping_regularization'] is not None: new_segment.inputs.warping_regularization = self.parameters['warping_regularization'] # Check if we need to save the forward transformation for registering the T1 to the MNI space if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']: if self.parameters['write_deformation_fields'] is not None: self.parameters['write_deformation_fields'][1] = True else: self.parameters['write_deformation_fields'] = [False, True] if self.parameters['write_deformation_fields'] is not None: new_segment.inputs.write_deformation_fields = self.parameters['write_deformation_fields'] if self.parameters['tpm'] is not None: tissue_map = self.parameters['tpm'] new_segment.inputs.tissues = seg_utils.get_tissue_tuples(tissue_map, self.parameters['tissue_classes'], self.parameters['dartel_tissues'], self.parameters['save_warped_unmodulated'], self.parameters['save_warped_modulated']) # Apply segmentation deformation to T1 (into MNI space) # ======================================================== if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']: t1_to_mni = npe.MapNode(seg_utils.ApplySegmentationDeformation(), name='t1_to_mni', iterfield=['deformation_field', 'in_files']) self.connect([ (unzip_node, t1_to_mni, [('out_file', 'in_files')]), (new_segment, t1_to_mni, [('forward_deformation_field', 'deformation_field')]), (t1_to_mni, self.output_node, [('out_files', 't1_mni')]) ]) # DARTEL template # =============================== dartel_template = npe.Node(spm.DARTEL(), name='dartel_template') if self.parameters['iteration_parameters'] is not None: dartel_template.inputs.iteration_parameters = self.parameters['iteration_parameters'] if self.parameters['optimization_parameters'] is not None: dartel_template.inputs.optimization_parameters = self.parameters['optimization_parameters'] if self.parameters['regularization_form'] is not None: dartel_template.inputs.regularization_form = self.parameters['regularization_form'] # DARTEL2MNI Registration # ======================= dartel2mni_node = npe.MapNode(spm.DARTELNorm2MNI(), name='dartel2MNI', iterfield=['apply_to_files', 'flowfield_files']) if self.parameters['bounding_box'] is not None: dartel2mni_node.inputs.bounding_box = self.parameters['bounding_box'] if self.parameters['voxel_size'] is not None: dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size'] dartel2mni_node.inputs.modulate = self.parameters['modulation'] dartel2mni_node.inputs.fwhm = 0 # Smoothing # ========= if self.parameters['fwhm'] is not None and len(self.parameters['fwhm']) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name='smoothing_node', iterfield=['in_files']) smoothing_node.iterables = [('fwhm', [[x, x, x] for x in self.parameters['fwhm']]), ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])] smoothing_node.synchronize = True join_smoothing_node = npe.JoinNode(interface=nutil.Function(input_names=['smoothed_normalized_files'], output_names=['smoothed_normalized_files'], function=dartel2mni_utils.join_smoothed_files), joinsource='smoothing_node', joinfield='smoothed_normalized_files', name='join_smoothing_node') self.connect([ (dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]), (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]), (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')]) ]) else: self.output_node.inputs.smoothed_normalized_files = [] # Atlas Statistics # ================ atlas_stats_node = npe.MapNode(nutil.Function(input_names=['in_image', 'in_atlas_list'], output_names=['atlas_statistics'], function=dartel2mni_utils.atlas_statistics), name='atlas_stats_node', iterfield=['in_image']) atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list'] # Connection # ========== self.connect([ (self.input_node, unzip_node, [('input_images', 'in_file')]), (unzip_node, new_segment, [('out_file', 'channel_files')]), (new_segment, self.output_node, [('bias_corrected_images', 'bias_corrected_images'), ('bias_field_images', 'bias_field_images'), ('dartel_input_images', 'dartel_input_images'), ('forward_deformation_field', 'forward_deformation_field'), ('inverse_deformation_field', 'inverse_deformation_field'), ('modulated_class_images', 'modulated_class_images'), ('native_class_images', 'native_class_images'), ('normalized_class_images', 'normalized_class_images'), ('transformation_mat', 'transformation_mat')]), (new_segment, dartel_template, [(('dartel_input_images', dartel_utils.get_class_images, self.parameters['dartel_tissues']), 'image_files')]), (dartel_template, self.output_node, [('dartel_flow_fields', 'dartel_flow_fields'), ('final_template_file', 'final_template_file'), ('template_files', 'template_files')]), (new_segment, dartel2mni_node, [(('native_class_images', seg_utils.group_nested_images_by_subject), 'apply_to_files')]), (dartel_template, dartel2mni_node, [(('dartel_flow_fields', dartel2mni_utils.prepare_flowfields, self.parameters['tissue_classes']), 'flowfield_files')]), (dartel_template, dartel2mni_node, [('final_template_file', 'template_file')]), (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]), (dartel2mni_node, atlas_stats_node, [(('normalized_files', dartel2mni_utils.select_gm_images), 'in_image')]), (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')]) ])
def build_core_nodes(self): """Build and connect the core nodes of the pipelines. """ import os import nipype.interfaces.spm as spm import nipype.interfaces.matlab as mlab import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.io import unzip_nii from clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils import prepare_flowfields, join_smoothed_files, atlas_statistics, select_gm_images spm_home = os.getenv("SPM_HOME") mlab_home = os.getenv("MATLABCMD") mlab.MatlabCommand.set_default_matlab_cmd(mlab_home) mlab.MatlabCommand.set_default_paths(spm_home) if 'SPMSTANDALONE_HOME' in os.environ: if 'MCR_HOME' in os.environ: matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'], 'run_spm12.sh') \ + ' ' + os.environ['MCR_HOME'] \ + ' script' spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True) version = spm.SPMCommand().version else: raise EnvironmentError( 'MCR_HOME variable not in environnement. Althought, ' + 'SPMSTANDALONE_HOME has been found') else: version = spm.Info.getinfo() if version: if isinstance(version, dict): spm_path = version['path'] if version['name'] == 'SPM8': print( 'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. ' + 'Please upgrade your SPM toolbox.') tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii') elif version['name'] == 'SPM12': tissue_map = os.path.join(spm_path, 'tpm/TPM.nii') else: raise RuntimeError( 'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.' ) if isinstance(version, str): if float(version) >= 12.7169: tissue_map = os.path.join( str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii') else: raise RuntimeError( 'SPM standalone version not supported. Please upgrade SPM standalone.' ) else: raise RuntimeError( 'SPM could not be found. Please verify your SPM_HOME environment variable.' ) # Unzipping # ========= unzip_tissues_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_tissues_node', iterfield=['in_file']) unzip_flowfields_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_flowfields_node', iterfield=['in_file']) unzip_template_node = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_template_node') # DARTEL2MNI Registration # ======================= dartel2mni_node = npe.MapNode( spm.DARTELNorm2MNI(), name='dartel2MNI', iterfield=['apply_to_files', 'flowfield_files']) if self.parameters['bounding_box'] is not None: dartel2mni_node.inputs.bounding_box = self.parameters[ 'bounding_box'] if self.parameters['voxel_size'] is not None: dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size'] dartel2mni_node.inputs.modulate = self.parameters['modulation'] dartel2mni_node.inputs.fwhm = 0 # Smoothing # ========= if self.parameters['fwhm'] is not None and len( self.parameters['fwhm']) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name='smoothing_node', iterfield=['in_files']) smoothing_node.iterables = [ ('fwhm', [[x, x, x] for x in self.parameters['fwhm']]), ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']]) ] smoothing_node.synchronize = True join_smoothing_node = npe.JoinNode( interface=nutil.Function( input_names=['smoothed_normalized_files'], output_names=['smoothed_normalized_files'], function=join_smoothed_files), joinsource='smoothing_node', joinfield='smoothed_normalized_files', name='join_smoothing_node') self.connect([(dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]), (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]), (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])]) else: self.output_node.inputs.smoothed_normalized_files = [] # Atlas Statistics # ================ atlas_stats_node = npe.MapNode(nutil.Function( input_names=['in_image', 'in_atlas_list'], output_names=['atlas_statistics'], function=atlas_statistics), name='atlas_stats_node', iterfield=['in_image']) atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list'] # Connection # ========== self.connect([(self.input_node, unzip_tissues_node, [('apply_to_files', 'in_file')]), (self.input_node, unzip_flowfields_node, [('flowfield_files', 'in_file')]), (self.input_node, unzip_template_node, [('template_file', 'in_file')]), (unzip_tissues_node, dartel2mni_node, [('out_file', 'apply_to_files')]), (unzip_flowfields_node, dartel2mni_node, [(('out_file', prepare_flowfields, self.parameters['tissues']), 'flowfield_files')]), (unzip_template_node, dartel2mni_node, [('out_file', 'template_file')]), (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]), (dartel2mni_node, atlas_stats_node, [(('normalized_files', select_gm_images), 'in_image')]), (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')])])
def build_core_nodes(self): """Build and connect an output node to the pipeline.""" import nipype.interfaces.spm as spm import nipype.interfaces.spm.utils as spmutils from nipype.interfaces.petpvc import PETPVC import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.utils.filemanip import unzip_nii from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone import clinica.pipelines.pet_volume.pet_volume_utils as utils if spm_standalone_is_available(): use_spm_standalone() # Initialize pipeline # =================== init_node = npe.Node(interface=nutil.Function( input_names=['pet_nii'], output_names=['pet_nii'], function=utils.init_input_node), name='init_pipeline') # Unzipping # ========= unzip_pet_image = npe.Node(nutil.Function(input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_pet_image') unzip_t1_image_native = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_t1_image_native') unzip_flow_fields = npe.Node(nutil.Function(input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_flow_fields') unzip_dartel_template = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_dartel_template') unzip_reference_mask = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_reference_mask') unzip_mask_tissues = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_mask_tissues', iterfield=['in_file']) # Coregister PET into T1 native space # =================================== coreg_pet_t1 = npe.Node(spm.Coregister(), name='coreg_pet_t1') # Spatially normalize PET into MNI # ================================ dartel_mni_reg = npe.Node(spm.DARTELNorm2MNI(), name='dartel_mni_reg') dartel_mni_reg.inputs.modulate = False dartel_mni_reg.inputs.fwhm = 0 # Reslice reference region mask into PET # ====================================== reslice = npe.Node(spmutils.Reslice(), name='reslice') # Normalize PET values according to reference region # ================================================== norm_to_ref = npe.Node(nutil.Function( input_names=['pet_image', 'region_mask'], output_names=['suvr_pet_path'], function=utils.normalize_to_reference), name='norm_to_ref') # Create binary mask from segmented tissues # ========================================= binary_mask = npe.Node(nutil.Function( input_names=['tissues', 'threshold'], output_names=['out_mask'], function=utils.create_binary_mask), name='binary_mask') binary_mask.inputs.threshold = self.parameters['mask_threshold'] # Mask PET image # ============== apply_mask = npe.Node(nutil.Function( input_names=['image', 'binary_mask'], output_names=['masked_image_path'], function=utils.apply_binary_mask), name='apply_mask') # Smoothing # ========= if self.parameters['smooth'] is not None and len( self.parameters['smooth']) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name='smoothing_node', iterfield=['fwhm', 'out_prefix']) smoothing_node.inputs.fwhm = [[x, x, x] for x in self.parameters['smooth']] smoothing_node.inputs.out_prefix = [ 'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth'] ] self.connect([(apply_mask, smoothing_node, [('masked_image_path', 'in_files')]), (smoothing_node, self.output_node, [('smoothed_files', 'pet_suvr_masked_smoothed')])]) else: self.output_node.inputs.pet_suvr_masked_smoothed = [[]] # Atlas Statistics # ================ atlas_stats_node = npe.MapNode(nutil.Function( input_names=['in_image', 'in_atlas_list'], output_names=['atlas_statistics'], function=utils.atlas_statistics), name='atlas_stats_node', iterfield=['in_image']) atlas_stats_node.inputs.in_atlas_list = self.parameters['atlases'] # Connection # ========== self.connect([ (self.input_node, init_node, [('pet_image', 'pet_nii')]), (init_node, unzip_pet_image, [('pet_nii', 'in_file')]), (self.input_node, unzip_t1_image_native, [('t1_image_native', 'in_file')]), (self.input_node, unzip_flow_fields, [('flow_fields', 'in_file')]), (self.input_node, unzip_dartel_template, [('dartel_template', 'in_file')]), (self.input_node, unzip_reference_mask, [('reference_mask', 'in_file')]), (self.input_node, unzip_mask_tissues, [('mask_tissues', 'in_file') ]), (unzip_pet_image, coreg_pet_t1, [('out_file', 'source')]), (unzip_t1_image_native, coreg_pet_t1, [('out_file', 'target')]), (unzip_flow_fields, dartel_mni_reg, [('out_file', 'flowfield_files')]), (unzip_dartel_template, dartel_mni_reg, [('out_file', 'template_file')]), (unzip_reference_mask, reslice, [('out_file', 'in_file')]), (unzip_mask_tissues, binary_mask, [('out_file', 'tissues')]), (coreg_pet_t1, dartel_mni_reg, [('coregistered_source', 'apply_to_files')]), (dartel_mni_reg, reslice, [('normalized_files', 'space_defining') ]), (dartel_mni_reg, norm_to_ref, [('normalized_files', 'pet_image')]), (reslice, norm_to_ref, [('out_file', 'region_mask')]), (norm_to_ref, apply_mask, [('suvr_pet_path', 'image')]), (binary_mask, apply_mask, [('out_mask', 'binary_mask')]), (norm_to_ref, atlas_stats_node, [('suvr_pet_path', 'in_image')]), (coreg_pet_t1, self.output_node, [('coregistered_source', 'pet_t1_native')]), (dartel_mni_reg, self.output_node, [('normalized_files', 'pet_mni') ]), (norm_to_ref, self.output_node, [('suvr_pet_path', 'pet_suvr')]), (binary_mask, self.output_node, [('out_mask', 'binary_mask')]), (apply_mask, self.output_node, [('masked_image_path', 'pet_suvr_masked')]), (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')]) ]) # PVC # ========== if self.parameters['apply_pvc']: # Unzipping # ========= unzip_pvc_mask_tissues = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_pvc_mask_tissues', iterfield=['in_file']) # Creating Mask to use in PVC # =========================== pvc_mask = npe.Node(nutil.Function(input_names=['tissues'], output_names=['out_mask'], function=utils.create_pvc_mask), name='pvc_mask') # PET PVC # ======= petpvc = npe.Node(PETPVC(), name='pvc') petpvc.inputs.pvc = 'RBV' petpvc.inputs.out_file = 'pvc.nii' # Spatially normalize PET into MNI # ================================ dartel_mni_reg_pvc = npe.Node(spm.DARTELNorm2MNI(), name='dartel_mni_reg_pvc') dartel_mni_reg_pvc.inputs.modulate = False dartel_mni_reg_pvc.inputs.fwhm = 0 # Reslice reference region mask into PET # ====================================== reslice_pvc = npe.Node(spmutils.Reslice(), name='reslice_pvc') # Normalize PET values according to reference region # ================================================== norm_to_ref_pvc = npe.Node(nutil.Function( input_names=['pet_image', 'region_mask'], output_names=['suvr_pet_path'], function=utils.normalize_to_reference), name='norm_to_ref_pvc') # Mask PET image # ============== apply_mask_pvc = npe.Node(nutil.Function( input_names=['image', 'binary_mask'], output_names=['masked_image_path'], function=utils.apply_binary_mask), name='apply_mask_pvc') # Smoothing # ========= if self.parameters['smooth'] is not None and len( self.parameters['smooth']) > 0: smoothing_pvc = npe.MapNode(spm.Smooth(), name='smoothing_pvc', iterfield=['fwhm', 'out_prefix']) smoothing_pvc.inputs.fwhm = [[x, x, x] for x in self.parameters['smooth'] ] smoothing_pvc.inputs.out_prefix = [ 'fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth'] ] self.connect([(apply_mask_pvc, smoothing_pvc, [('masked_image_path', 'in_files')]), (smoothing_pvc, self.output_node, [('smoothed_files', 'pet_pvc_suvr_masked_smoothed')])]) else: self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]] # Atlas Statistics # ================ atlas_stats_pvc = npe.MapNode(nutil.Function( input_names=['in_image', 'in_atlas_list'], output_names=['atlas_statistics'], function=utils.atlas_statistics), name='atlas_stats_pvc', iterfield=['in_image']) atlas_stats_pvc.inputs.in_atlas_list = self.parameters['atlases'] # Connection # ========== self.connect([ (self.input_node, unzip_pvc_mask_tissues, [('pvc_mask_tissues', 'in_file')]), (unzip_pvc_mask_tissues, pvc_mask, [('out_file', 'tissues')]), (unzip_flow_fields, dartel_mni_reg_pvc, [('out_file', 'flowfield_files')]), (unzip_dartel_template, dartel_mni_reg_pvc, [('out_file', 'template_file')]), (unzip_reference_mask, reslice_pvc, [('out_file', 'in_file')]), (coreg_pet_t1, petpvc, [('coregistered_source', 'in_file'), (('coregistered_source', utils.pet_pvc_name, 'RBV'), 'out_file')]), (pvc_mask, petpvc, [('out_mask', 'mask_file')]), (self.input_node, petpvc, [(('psf', utils.get_from_list, 0), 'fwhm_x'), (('psf', utils.get_from_list, 1), 'fwhm_y'), (('psf', utils.get_from_list, 2), 'fwhm_z')]), (petpvc, dartel_mni_reg_pvc, [('out_file', 'apply_to_files')]), (dartel_mni_reg_pvc, reslice_pvc, [('normalized_files', 'space_defining')]), (dartel_mni_reg_pvc, norm_to_ref_pvc, [('normalized_files', 'pet_image')]), (reslice_pvc, norm_to_ref_pvc, [('out_file', 'region_mask')]), (norm_to_ref_pvc, apply_mask_pvc, [('suvr_pet_path', 'image') ]), (binary_mask, apply_mask_pvc, [('out_mask', 'binary_mask')]), (norm_to_ref_pvc, atlas_stats_pvc, [('suvr_pet_path', 'in_image')]), (petpvc, self.output_node, [('out_file', 'pet_pvc')]), (dartel_mni_reg_pvc, self.output_node, [('normalized_files', 'pet_pvc_mni')]), (norm_to_ref_pvc, self.output_node, [('suvr_pet_path', 'pet_pvc_suvr')]), (apply_mask_pvc, self.output_node, [('masked_image_path', 'pet_pvc_suvr_masked')]), (atlas_stats_pvc, self.output_node, [('atlas_statistics', 'pvc_atlas_statistics')]) ]) else: self.output_node.inputs.pet_pvc = [[]] self.output_node.inputs.pet_pvc_mni = [[]] self.output_node.inputs.pet_pvc_suvr = [[]] self.output_node.inputs.pet_pvc_suvr_masked = [[]] self.output_node.inputs.pvc_atlas_statistics = [[]] self.output_node.inputs.pet_pvc_suvr_masked_smoothed = [[]]
def build_core_nodes(self): """Build and connect the core nodes of the pipeline.""" import nipype.interfaces.spm as spm import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.filemanip import unzip_nii from ..t1_volume_dartel2mni import t1_volume_dartel2mni_utils as dartel2mni_utils # Unzipping # ========= unzip_tissues_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_tissues_node', iterfield=['in_file']) unzip_flowfields_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_flowfields_node', iterfield=['in_file']) unzip_template_node = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_template_node') # DARTEL2MNI Registration # ======================= dartel2mni_node = npe.MapNode( spm.DARTELNorm2MNI(), name='dartel2MNI', iterfield=['apply_to_files', 'flowfield_files']) if self.parameters['voxel_size'] is not None: dartel2mni_node.inputs.voxel_size = tuple( self.parameters['voxel_size']) dartel2mni_node.inputs.modulate = self.parameters['modulate'] dartel2mni_node.inputs.fwhm = 0 # Smoothing # ========= if self.parameters['smooth'] is not None and len( self.parameters['smooth']) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name='smoothing_node', iterfield=['in_files']) smoothing_node.iterables = [ ('fwhm', [[x, x, x] for x in self.parameters['smooth']]), ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['smooth']]) ] smoothing_node.synchronize = True join_smoothing_node = npe.JoinNode( interface=nutil.Function( input_names=['smoothed_normalized_files'], output_names=['smoothed_normalized_files'], function=dartel2mni_utils.join_smoothed_files), joinsource='smoothing_node', joinfield='smoothed_normalized_files', name='join_smoothing_node') self.connect([(dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]), (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]), (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])]) else: self.output_node.inputs.smoothed_normalized_files = [] # Connection # ========== self.connect([(self.input_node, unzip_tissues_node, [('native_segmentations', 'in_file')]), (self.input_node, unzip_flowfields_node, [('flowfield_files', 'in_file')]), (self.input_node, unzip_template_node, [('template_file', 'in_file')]), (unzip_tissues_node, dartel2mni_node, [('out_file', 'apply_to_files')]), (unzip_flowfields_node, dartel2mni_node, [(('out_file', dartel2mni_utils.prepare_flowfields, self.parameters['tissues']), 'flowfield_files')]), (unzip_template_node, dartel2mni_node, [('out_file', 'template_file')]), (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')])])
def create_vbm_preproc(name='vbmpreproc'): """Create a vbm workflow that generates DARTEL-based warps to MNI space Based on: http://www.fil.ion.ucl.ac.uk/~john/misc/VBMclass10.pdf Example ------- >>> preproc = create_vbm_preproc() >>> preproc.inputs.inputspec.fwhm = 8 >>> preproc.inputs.inputspec.structural_files = [ ... os.path.abspath('s1.nii'), os.path.abspath('s3.nii')] >>> preproc.inputs.inputspec.template_prefix = 'Template' >>> preproc.run() # doctest: +SKIP Inputs:: inputspec.structural_files : structural data to be used to create templates inputspec.fwhm: single of triplet for smoothing when normalizing to MNI space inputspec.template_prefix : prefix for dartel template Outputs:: outputspec.normalized_files : normalized gray matter files outputspec.template_file : DARTEL template outputspec.icv : intracranial volume (cc - assuming dimensions in mm) """ workflow = pe.Workflow(name=name) """ Define the inputs to this workflow """ inputnode = pe.Node(niu.IdentityInterface( fields=['structural_files', 'fwhm', 'template_prefix']), name='inputspec') dartel_template = create_DARTEL_template() workflow.connect(inputnode, 'template_prefix', dartel_template, 'inputspec.template_prefix') workflow.connect(inputnode, 'structural_files', dartel_template, 'inputspec.structural_files') norm2mni = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='norm2mni') workflow.connect(dartel_template, 'outputspec.template_file', norm2mni, 'template_file') workflow.connect(dartel_template, 'outputspec.flow_fields', norm2mni, 'flowfield_files') def getclass1images(class_images): class1images = [] for session in class_images: class1images.extend(session[0]) return class1images workflow.connect(dartel_template, ('segment.native_class_images', getclass1images), norm2mni, 'apply_to_files') workflow.connect(inputnode, 'fwhm', norm2mni, 'fwhm') def compute_icv(class_images): from nibabel import load from numpy import prod icv = [] for session in class_images: voxel_volume = prod(load(session[0][0]).header.get_zooms()) img = load(session[0][0]).get_data() + \ load(session[1][0]).get_data() + \ load(session[2][0]).get_data() img_icv = (img > 0.5).astype(int).sum() * voxel_volume * 1e-3 icv.append(img_icv) return icv calc_icv = pe.Node(niu.Function(function=compute_icv, input_names=['class_images'], output_names=['icv']), name='calc_icv') workflow.connect(dartel_template, 'segment.native_class_images', calc_icv, 'class_images') """ Define the outputs of the workflow and connect the nodes to the outputnode """ outputnode = pe.Node(niu.IdentityInterface( fields=["normalized_files", "template_file", "icv"]), name="outputspec") workflow.connect([ (dartel_template, outputnode, [('outputspec.template_file', 'template_file')]), (norm2mni, outputnode, [("normalized_files", "normalized_files")]), (calc_icv, outputnode, [("icv", "icv")]), ]) return workflow
def build_core_nodes(self): """Build and connect the core nodes of the pipeline.""" import nipype.interfaces.spm as spm import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe from clinica.utils.filemanip import unzip_nii from clinica.utils.spm import spm_standalone_is_available, use_spm_standalone from ..t1_volume_dartel2mni import ( t1_volume_dartel2mni_utils as dartel2mni_utils, ) if spm_standalone_is_available(): use_spm_standalone() # Unzipping # ========= unzip_tissues_node = npe.MapNode( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_tissues_node", iterfield=["in_file"], ) unzip_flowfields_node = npe.MapNode( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_flowfields_node", iterfield=["in_file"], ) unzip_template_node = npe.Node( nutil.Function(input_names=["in_file"], output_names=["out_file"], function=unzip_nii), name="unzip_template_node", ) # DARTEL2MNI Registration # ======================= dartel2mni_node = npe.MapNode( spm.DARTELNorm2MNI(), name="dartel2MNI", iterfield=["apply_to_files", "flowfield_files"], ) if self.parameters["voxel_size"] is not None: dartel2mni_node.inputs.voxel_size = tuple( self.parameters["voxel_size"]) dartel2mni_node.inputs.modulate = self.parameters["modulate"] dartel2mni_node.inputs.fwhm = 0 # Smoothing # ========= if self.parameters["smooth"] is not None and len( self.parameters["smooth"]) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name="smoothing_node", iterfield=["in_files"]) smoothing_node.iterables = [ ("fwhm", [[x, x, x] for x in self.parameters["smooth"]]), ( "out_prefix", [ "fwhm-" + str(x) + "mm_" for x in self.parameters["smooth"] ], ), ] smoothing_node.synchronize = True join_smoothing_node = npe.JoinNode( interface=nutil.Function( input_names=["smoothed_normalized_files"], output_names=["smoothed_normalized_files"], function=dartel2mni_utils.join_smoothed_files, ), joinsource="smoothing_node", joinfield="smoothed_normalized_files", name="join_smoothing_node", ) # fmt: off self.connect([ (dartel2mni_node, smoothing_node, [("normalized_files", "in_files")]), (smoothing_node, join_smoothing_node, [("smoothed_files", "smoothed_normalized_files")]), (join_smoothing_node, self.output_node, [("smoothed_normalized_files", "smoothed_normalized_files")]), ]) # fmt: on else: self.output_node.inputs.smoothed_normalized_files = [] # Connection # ========== # fmt: off self.connect([ (self.input_node, unzip_tissues_node, [("native_segmentations", "in_file")]), (self.input_node, unzip_flowfields_node, [("flowfield_files", "in_file")]), (self.input_node, unzip_template_node, [("template_file", "in_file")]), (unzip_tissues_node, dartel2mni_node, [("out_file", "apply_to_files")]), (unzip_flowfields_node, dartel2mni_node, [(("out_file", dartel2mni_utils.prepare_flowfields, self.parameters["tissues"]), "flowfield_files")]), (unzip_template_node, dartel2mni_node, [("out_file", "template_file")]), (dartel2mni_node, self.output_node, [("normalized_files", "normalized_files")]), ])
def build_core_nodes(self): """Build and connect the core nodes of the pipelines. """ import os import nipype.interfaces.spm as spm import nipype.interfaces.matlab as mlab import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.io import unzip_nii from t1_volume_dartel2mni_utils import prepare_flowfields, join_smoothed_files, atlas_statistics, select_gm_images spm_home = os.getenv("SPM_HOME") mlab_home = os.getenv("MATLABCMD") mlab.MatlabCommand.set_default_matlab_cmd(mlab_home) mlab.MatlabCommand.set_default_paths(spm_home) version = spm.Info.version() if version: if version['name'] == 'SPM8': print 'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. ' \ 'Please upgrade your SPM toolbox.' elif version['name'] != 'SPM12': raise RuntimeError( 'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.' ) else: raise RuntimeError( 'SPM could not be found. Please verify your SPM_HOME environment variable.' ) # Unzipping # ========= unzip_tissues_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_tissues_node', iterfield=['in_file']) unzip_flowfields_node = npe.MapNode(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_flowfields_node', iterfield=['in_file']) unzip_template_node = npe.Node(nutil.Function( input_names=['in_file'], output_names=['out_file'], function=unzip_nii), name='unzip_template_node') # DARTEL2MNI Registration # ======================= dartel2mni_node = npe.MapNode( spm.DARTELNorm2MNI(), name='dartel2MNI', iterfield=['apply_to_files', 'flowfield_files']) if self.parameters['bounding_box'] is not None: dartel2mni_node.inputs.bounding_box = self.parameters[ 'bounding_box'] if self.parameters['voxel_size'] is not None: dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size'] dartel2mni_node.inputs.modulate = self.parameters['modulation'] dartel2mni_node.inputs.fwhm = 0 # Smoothing # ========= if self.parameters['fwhm'] is not None and len( self.parameters['fwhm']) > 0: smoothing_node = npe.MapNode(spm.Smooth(), name='smoothing_node', iterfield=['in_files']) smoothing_node.iterables = [ ('fwhm', [[x, x, x] for x in self.parameters['fwhm']]), ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']]) ] smoothing_node.synchronize = True join_smoothing_node = npe.JoinNode( interface=nutil.Function( input_names=['smoothed_normalized_files'], output_names=['smoothed_normalized_files'], function=join_smoothed_files), joinsource='smoothing_node', joinfield='smoothed_normalized_files', name='join_smoothing_node') self.connect([(dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]), (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]), (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])]) else: self.output_node.inputs.smoothed_normalized_files = [] # Atlas Statistics # ================ atlas_stats_node = npe.MapNode(nutil.Function( input_names=['in_image', 'in_atlas_list'], output_names=['atlas_statistics'], function=atlas_statistics), name='atlas_stats_node', iterfield=['in_image']) atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list'] # Connection # ========== self.connect([(self.input_node, unzip_tissues_node, [('apply_to_files', 'in_file')]), (self.input_node, unzip_flowfields_node, [('flowfield_files', 'in_file')]), (self.input_node, unzip_template_node, [('template_file', 'in_file')]), (unzip_tissues_node, dartel2mni_node, [('out_file', 'apply_to_files')]), (unzip_flowfields_node, dartel2mni_node, [(('out_file', prepare_flowfields, self.parameters['tissues']), 'flowfield_files')]), (unzip_template_node, dartel2mni_node, [('out_file', 'template_file')]), (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]), (dartel2mni_node, atlas_stats_node, [(('normalized_files', select_gm_images), 'in_image')]), (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')])])