def test_Normalize12_inputs(): input_map = dict( affine_regularization_type=dict(field="eoptions.affreg"), apply_to_files=dict(copyfile=True, field="subj.resample"), bias_fwhm=dict(field="eoptions.biasfwhm"), bias_regularization=dict(field="eoptions.biasreg"), deformation_file=dict(copyfile=False, field="subj.def", mandatory=True, xor=["image_to_align", "tpm"]), ignore_exception=dict(nohash=True, usedefault=True), image_to_align=dict(copyfile=True, field="subj.vol", mandatory=True, xor=["deformation_file"]), jobtype=dict(usedefault=True), matlab_cmd=dict(), mfile=dict(usedefault=True), paths=dict(), sampling_distance=dict(field="eoptions.samp"), smoothness=dict(field="eoptions.fwhm"), tpm=dict(copyfile=False, field="eoptions.tpm", mandatory=False, xor=["deformation_file"]), use_mcr=dict(), use_v8struct=dict(min_ver="8", usedefault=True), warping_regularization=dict(field="eoptions.reg"), write_bounding_box=dict(field="woptions.bb"), write_interp=dict(field="woptions.interp"), write_voxel_sizes=dict(field="woptions.vox"), ) inputs = Normalize12.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Normalize12_outputs(): output_map = dict(deformation_field=dict(), normalized_files=dict(), normalized_image=dict()) outputs = Normalize12.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def send_atlases_to_single_subject_space(self, original_atlases): """Send atlas(es) in single subject space using previously calculated spm-like deformation field ("iy_*") Parameters ---------- original_atlases : list with abolute path(s) of the nifti file(s) of the atlas(es) to be registered The function return number of atlases * number of deformation fields nii file named [atlas]_[subject] """ nrm = Normalize12() nrm.inputs.apply_to_files = original_atlases def_fields = sorted(glob("{}/iy*".format(self.volume_dir))) t1_dir = str(Path(self.volume_dir).parents[0]) for n, def_field in enumerate(def_fields): subject_name = Path(def_field).stem.split("iy_")[1] t1_file = check_for_multiple_match_ask_input("{}/*{}*".format(t1_dir, subject_name)) if t1_file is None: continue print("working on subject {}, {}% treated".format(subject_name, round(((n + 1)/len(def_fields))*100, 2))) nrm.inputs.deformation_file = def_field nrm.inputs.jobtype = "write" nrm.inputs.write_interp = 0 nrm.run() for atlas in original_atlases: atlas_dir = "/".join(Path(atlas).parts[0:len(Path(atlas).parts) -1]) atlas_name = Path(atlas).stem.split("[.]")[0] res_atlas = resample_to_img(load_img("{}/w{}.nii".format(atlas_dir, atlas_name)), load_img(t1_file), interpolation = "nearest") res_atlas.to_filename("{}/{}_{}.nii".format(self.atlas_dir, atlas_name, subject_name))
def test_Normalize12_outputs(): output_map = dict( deformation_field=dict(), normalized_files=dict(), normalized_image=dict(), ) outputs = Normalize12.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_Normalize12_inputs(): input_map = dict( affine_regularization_type=dict(field='eoptions.affreg', ), apply_to_files=dict( copyfile=True, field='subj.resample', ), bias_fwhm=dict(field='eoptions.biasfwhm', ), bias_regularization=dict(field='eoptions.biasreg', ), deformation_file=dict( copyfile=False, field='subj.def', mandatory=True, xor=['image_to_align', 'tpm'], ), ignore_exception=dict( nohash=True, usedefault=True, ), image_to_align=dict( copyfile=True, field='subj.vol', mandatory=True, xor=['deformation_file'], ), jobtype=dict(usedefault=True, ), matlab_cmd=dict(), mfile=dict(usedefault=True, ), paths=dict(), sampling_distance=dict(field='eoptions.samp', ), smoothness=dict(field='eoptions.fwhm', ), tpm=dict( copyfile=False, field='eoptions.tpm', mandatory=False, xor=['deformation_file'], ), use_mcr=dict(), use_v8struct=dict( min_ver='8', usedefault=True, ), warping_regularization=dict(field='eoptions.reg', ), write_bounding_box=dict(field='woptions.bb', ), write_interp=dict(field='woptions.interp', ), write_voxel_sizes=dict(field='woptions.vox', ), ) inputs = Normalize12.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def send_voxel_to_voxel_to_single_subject_space(self): for analysis in self.voxel_to_voxel_dirs: analysis_name = Path(analysis).stem files_to_treat = glob( "{}/BETA_*_Component001.nii".format(analysis)) for name, img, iy in zip(self.subjects_name, files_to_treat, self.inverse_warp): print("Treating analysis {} of subject {}".format( analysis_name, name)) nrml = Normalize12() nrml.inputs.jobtype = "write" nrml.inputs.deformation_file = iy nrml.inputs.apply_to_files = img nrml.run() move( "{}/w{}.nii".format(Path(img).parents[0], Path(img).stem), "{}/{}_{}_single_subject.nii".format( Path(img).parents[0], name, analysis_name))
def genSpmNormalizeDwiWF(name='spmNormalizeDwi', spm_standalone=None, mcr=None): # Setup for SPM standalone matlab_cmd = ' '.join([spm_standalone, mcr, 'batch', 'script']) spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True) # Generate WF wf = Workflow(name=name) # InputNode inputNode = Node(IdentityInterface( fields=['ref_T1', 'forward_deformation_field', 'ref_dwi', 'apply_to']), name='inputNode') # spm Coregister coreg = Node(Coregister(), name='coreg') coreg.inputs.use_mcr = True coreg.inputs.mfile = False coreg.inputs.cost_function = 'nmi' coreg.inputs.jobtype = 'estimate' wf.connect(inputNode, "ref_T1", coreg, "target") wf.connect(inputNode, "ref_dwi", coreg, "source") wf.connect(inputNode, "apply_to", coreg, "apply_to_files") # spm Normalize12 # Node: spmWarpDtiMetricsToStandard spmWarpToStd111 = Node(Normalize12(), name="spmWarpToStd111") spmWarpToStd111.inputs.ignore_exception = False spmWarpToStd111.inputs.jobtype = 'write' spmWarpToStd111.inputs.use_v8struct = True spmWarpToStd111.inputs.write_bounding_box = [[-90.0, -126.0, -72.0], [90.0, 90.0, 108.0]] spmWarpToStd111.inputs.write_interp = 3 spmWarpToStd111.inputs.use_mcr = True spmWarpToStd111.inputs.mfile = False spmWarpToStd111.inputs.write_voxel_sizes = [1.0, 1.0, 1.0] wf.connect(inputNode, "forward_deformation_field", spmWarpToStd111, "deformation_file") wf.connect(coreg, "coregistered_files", spmWarpToStd111, "apply_to_files") # Node: spmWarpB0ToStandard spmWarpb0ToStd111 = Node(Normalize12(), name="spmWarpb0ToStd111") spmWarpb0ToStd111.inputs.ignore_exception = False spmWarpb0ToStd111.inputs.jobtype = 'write' spmWarpb0ToStd111.inputs.use_v8struct = True spmWarpb0ToStd111.inputs.write_bounding_box = [[-90.0, -126.0, -72.0], [90.0, 90.0, 108.0]] spmWarpb0ToStd111.inputs.write_interp = 3 spmWarpb0ToStd111.inputs.use_mcr = True spmWarpb0ToStd111.inputs.mfile = False spmWarpb0ToStd111.inputs.write_voxel_sizes = [1.0, 1.0, 1.0] wf.connect(inputNode, "forward_deformation_field", spmWarpb0ToStd111, "deformation_file") wf.connect(coreg, "coregistered_source", spmWarpb0ToStd111, "apply_to_files") # OutputNode outputNode = Node( IdentityInterface(fields=["normalized_files", "normalized_b0"]), name="outputNode") wf.connect(spmWarpToStd111, "normalized_files", outputNode, "normalized_files") wf.connect(spmWarpb0ToStd111, "normalized_files", outputNode, "normalized_b0") return wf