def get_resamplers(static_image, dynamic_array, dvf_array, output_path): resamplers = [] static_image_path = "{0}/temp_static.nii".format(output_path) dynamic_array_path = "{0}/temp_dynamic.nii".format(output_path) dvf_array_path = "{0}/temp_dvf.nii".format(output_path) for j in range(len(dynamic_array)): resampler = reg.NiftyResample() static_image.write(static_image_path) dynamic_array[j].write(dynamic_array_path) dvf_array[j].write(dvf_array_path) temp_static = reg.NiftiImageData(static_image_path) temp_dynamic = reg.NiftiImageData(dynamic_array_path) temp_dvf = reg.NiftiImageData3DDeformation(dvf_array_path) resampler.set_reference_image(temp_static) resampler.set_floating_image(temp_dynamic) resampler.add_transformation(temp_dvf) resampler.set_interpolation_type_to_linear() resamplers.append(resampler) return resamplers
def objective_function(optimise_array, static_image, dynamic_path, dvf_path, weighted_normalise, dynamic_data_magnitude): static_image.fill( np.reshape(optimise_array, static_image.as_array().astype(np.double).shape)) objective_value = 0.0 for i in range(len(dynamic_path)): dynamic_image = reg.NiftiImageData(dynamic_path[i]) dvf_image = reg.NiftiImageData3DDeformation(dvf_path[i]) resampler = reg.NiftyResample() resampler.set_reference_image(static_image) resampler.set_floating_image(dynamic_image) resampler.add_transformation(dvf_image) resampler.set_interpolation_type_to_cubic_spline() objective_value = objective_value + (np.nansum( np.square(dynamic_image.as_array().astype(np.double) - ((np.nansum(dynamic_image.as_array().astype(np.double), dtype=np.double) / dynamic_data_magnitude) * warp_image_forward(resampler, static_image)), dtype=np.double), dtype=np.double) * weighted_normalise[i]) print("Objective function value: {0}".format(str(objective_value))) return objective_value
def get_dynamic_data_magnitude(dynamic_path): dynamic_data_magnitude = 0.0 for i in range(len(dynamic_path)): dynamic_data_magnitude = dynamic_data_magnitude + np.nansum( reg.NiftiImageData(dynamic_path[i]).as_array().astype(np.double), dtype=np.double) return dynamic_data_magnitude
def op_test(static_image, output_path): static_image_path = "{0}/temp_static.nii".format(output_path) static_image.write(static_image_path) temp_static = reg.NiftiImageData(static_image_path) temp_at = reg.AffineTransformation() temp_at_array = temp_at.as_array() temp_at_array[0][0] = 1.25 temp_at_array[1][1] = 1.25 temp_at_array[2][2] = 1.25 temp_at_array[3][3] = 1.25 temp_at = reg.AffineTransformation(temp_at_array) resampler = reg.NiftyResample() resampler.set_reference_image(temp_static) resampler.set_floating_image(temp_static) resampler.add_transformation(temp_at) resampler.set_interpolation_type_to_linear() warp = warp_image_forward(resampler, temp_static) warped_image = static_image.clone() warped_image.fill(warp) warped_image.write("{0}/op_test_warp_forward.nii".format(output_path)) difference = temp_static.as_array().astype(np.double) - warp difference_image = temp_static.clone() difference_image.fill(difference) difference_image.write( "{0}/op_test_warp_forward_difference.nii".format(output_path)) warp = warp_image_adjoint(resampler, temp_static) warped_image = temp_static.clone() warped_image.fill(warp) warped_image.write("{0}/op_test_warp_adjoint.nii".format(output_path)) difference = temp_static.as_array().astype(np.double) - warp difference_image = temp_static.clone() difference_image.fill(difference) difference_image.write( "{0}/warp_adjoint_difference.nii".format(output_path)) return True
def gradient_function(optimise_array, resampler, dynamic_images, static_image, output_path): static_image.fill( np.reshape(optimise_array, static_image.as_array().astype(np.double).shape)) gradient_value = static_image.clone() gradient_value.fill(0.0) adjoint_image = static_image.clone() for i in range(len(dynamic_images)): static_image.write("{0}/temp_static.nii".format(output_path)) dynamic_images[i].write("{0}/temp_dynamic.nii".format(output_path)) temp_static = reg.NiftiImageData( "{0}/temp_static.nii".format(output_path)) temp_dynamic = reg.NiftiImageData( "{0}/temp_dynamic.nii".format(output_path)) adjoint_image.fill( warp_image_forward(resampler[i], temp_static) - temp_dynamic.as_array().astype(np.double)) gradient_value.fill((gradient_value.as_array().astype(np.double) + warp_image_adjoint(resampler[i], adjoint_image))) gradient_value.write("{0}/gradient.nii".format(output_path)) print( "Max gradient value: {0}, Min gradient value: {1}, Mean gradient value: {2}, Gradient norm: {3}" .format( str(gradient_value.as_array().astype(np.double).max()), str(gradient_value.as_array().astype(np.double).min()), str( np.nanmean(gradient_value.as_array().astype(np.double), dtype=np.double)), str(np.linalg.norm(gradient_value.as_array().astype(np.double))))) return np.ravel(gradient_value.as_array().astype(np.double)).astype( np.double)
def output_input(static_image, dynamic_path, dvf_path, output_path): static_image.write("{0}/static_image.nii".format(output_path)) for i in range(len(dynamic_path)): dynamic_image = reg.NiftiImageData(dynamic_path[i]) dvf_image = reg.NiftiImageData3DDeformation(dvf_path[i]) dynamic_image.write("{0}/dynamic_image_{1}.nii".format( output_path, str(i))) dvf_image.write("{0}/dvf_image_{1}.nii".format(output_path, str(i))) return True
def test_for_adj(static_image, dvf_array, output_path): static_image_path = "{0}/temp_static.nii".format(output_path) dvf_array_path = "{0}/temp_dvf.nii".format(output_path) for i in range(len(dvf_array)): static_image.write(static_image_path) dvf_array[i].write(dvf_array_path) temp_static = reg.NiftiImageData(static_image_path) temp_dvf = reg.NiftiImageData3DDeformation(dvf_array_path) resampler = reg.NiftyResample() resampler.set_reference_image(temp_static) resampler.set_floating_image(temp_static) resampler.add_transformation(temp_dvf) resampler.set_interpolation_type_to_linear() warp = warp_image_forward(resampler, temp_static) warped_image = static_image.clone() warped_image.fill(warp) warped_image.write("{0}/warp_forward_{1}.nii".format( output_path, str(i))) difference = temp_static.as_array().astype(np.double) - warp difference_image = temp_static.clone() difference_image.fill(difference) difference_image.write("{0}/warp_forward_difference_{1}.nii".format( output_path, str(i))) warp = warp_image_adjoint(resampler, temp_static) warped_image = temp_static.clone() warped_image.fill(warp) warped_image.write("{0}/warp_adjoint_{1}.nii".format( output_path, str(i))) difference = temp_static.as_array().astype(np.double) - warp difference_image = temp_static.clone() difference_image.fill(difference) difference_image.write("{0}/warp_adjoint_difference_{1}.nii".format( output_path, str(i))) return True
def gradient_function(optimise_array, static_image, dynamic_path, dvf_path, weighted_normalise, dynamic_data_magnitude): static_image.fill( np.reshape(optimise_array, static_image.as_array().astype(np.double).shape)) gradient_value = static_image.clone() gradient_value.fill(0.0) adjoint_image = static_image.clone() for i in range(len(dynamic_path)): dynamic_image = reg.NiftiImageData(dynamic_path[i]) dvf_image = reg.NiftiImageData3DDeformation(dvf_path[i]) resampler = reg.NiftyResample() resampler.set_reference_image(static_image) resampler.set_floating_image(dynamic_image) resampler.add_transformation(dvf_image) resampler.set_interpolation_type_to_cubic_spline() adjoint_image.fill(( (np.nansum(dynamic_image.as_array().astype(np.double), dtype=np.double) / dynamic_data_magnitude) * warp_image_forward(resampler, static_image)) - dynamic_image.as_array().astype(np.double)) gradient_value.fill((gradient_value.as_array().astype(np.double) + (warp_image_adjoint(resampler, adjoint_image) * weighted_normalise[i]))) # gradient_value.write("{0}/gradient.nii".format(output_path)) print( "Max gradient value: {0}, Mean gradient value: {1}, Gradient norm: {2}" .format( str(np.amax(gradient_value.as_array().astype(np.double))), str( np.nanmean( np.abs(gradient_value.as_array().astype(np.double), dtype=np.double))), str(np.linalg.norm(gradient_value.as_array().astype(np.double))))) return np.ravel(gradient_value.as_array().astype(np.double)).astype( np.double)
def back_warp(static_path, dvf_path, output_path): if not os.path.exists(output_path): os.makedirs(output_path, mode=0o770) for i in range(len(dvf_path)): static_image = reg.NiftiImageData(static_path) dvf_image = reg.NiftiImageData3DDeformation(dvf_path[i]) resampler = reg.NiftyResample() resampler.set_reference_image(static_image) resampler.set_floating_image(static_image) resampler.add_transformation(dvf_image) resampler.set_interpolation_type_to_cubic_spline() warped_static_image = warp_image_forward(resampler, static_image) static_image.fill(warped_static_image) static_image.write("{0}/back_warped_{1}.nii".format( output_path, str(i))) return True
def optimise(input_data_path, data_split, weighted_normalise_path, input_dvf_path, dvf_split, output_path, do_op_test, do_reg, do_test_for_adj, do_blind_start, do_opt, do_back_warp, prefix): if not os.path.exists(output_path): os.makedirs(output_path, mode=0o770) new_dvf_path = "{0}/new_dvfs/".format(output_path) if not os.path.exists(new_dvf_path): os.makedirs(new_dvf_path, mode=0o770) # get static and dynamic paths dynamic_path = get_data_path(input_data_path, data_split) dynamic_data_magnitude = get_dynamic_data_magnitude(dynamic_path) static_path = "{0}/static_image.nii".format(output_path) # load static object for dvf registration static_image = reg.NiftiImageData(dynamic_path[0]) static_image.write(static_path) if do_op_test: op_test(static_image, output_path) dvf_path = None if do_test_for_adj or do_opt or do_back_warp: # if do reg the calc dvf if not load if do_reg: dvf_path = register_data(static_path, dynamic_path, output_path) else: dvf_path = get_dvf_path(input_dvf_path, dvf_split) # fix dvf header and load dvf objects dvf_path = edit_header(dvf_path, new_dvf_path) # sum the dynamic data into the static data for i in range(1, len(dynamic_path)): static_image.fill( static_image.as_array().astype(np.double) + reg.NiftiImageData(dynamic_path[i]).as_array().astype(np.double)) static_image.write(static_path) # test for adj if do_test_for_adj: test_for_adj(static_image, dvf_path, output_path) output_input(static_image, dynamic_path, dvf_path, output_path) # initial static image initial_static_image = static_image.clone() if do_blind_start: initial_static_image.fill(1.0) initial_static_image.write("{0}/initial_static_image_{1}.nii".format( output_path, prefix)) # array to optimise optimise_array = initial_static_image.as_array().astype(np.double) # array bounds bounds = [] for j in range(len(np.ravel(optimise_array))): bounds.append((0.01, 10.0)) tol = 0.000000000009 if do_opt: weighted_normalise = parser.parser(weighted_normalise_path, "weighted_normalise:=") if weighted_normalise is None: weighted_normalise = parser.parser(weighted_normalise_path, "normalise_array:=") for i in range(len(weighted_normalise)): weighted_normalise[i] = float(weighted_normalise[i]) # optimise optimise_array = np.reshape( scipy.optimize.minimize(objective_function, np.ravel(optimise_array), args=(static_image, dynamic_path, dvf_path, weighted_normalise, dynamic_data_magnitude), method="L-BFGS-B", jac=gradient_function, bounds=bounds, tol=tol, options={ "disp": True }).x, optimise_array.shape) # output static_image.fill(optimise_array) static_image.write("{0}/optimiser_output_{1}.nii".format( output_path, prefix)) difference = static_image.as_array().astype( np.double) - initial_static_image.as_array().astype(np.double) difference_image = initial_static_image.clone() difference_image.fill(difference) static_image.write("{0}/optimiser_output_difference_{1}.nii".format( output_path, prefix)) if do_back_warp: back_warp(static_path, dvf_path, "{0}/back_warp/".format(output_path)) multiple = 1.0 nan_optimise_array = optimise_array nan_optimise_array[nan_optimise_array < 0.01] = np.nan nan_optimise_array = nan_optimise_array - np.nanmin(nan_optimise_array) # array bounds bounds = [(0.01, 10.0)] # optimise multiple = scipy.optimize.minimize(suv_objective_function, np.asarray(multiple), args=(nan_optimise_array), method="L-BFGS-B", tol=tol, bounds=bounds, options={ "disp": True }).x[0] # output nan_optimise_array = nan_optimise_array - np.nanmin(nan_optimise_array) nan_optimise_array = np.nan_to_num(nan_optimise_array) nan_optimise_array[nan_optimise_array < 0.01] = 0.0 nan_optimise_array = nan_optimise_array * multiple static_image.fill(nan_optimise_array) static_image.write("{0}/suv_optimiser_output_{1}.nii".format( output_path, prefix)) naive_suv_optimise_array = optimise_array / 0.25 static_image.fill(naive_suv_optimise_array) static_image.write("{0}/naive_suv_optimiser_output_{1}.nii".format( output_path, prefix))
def main(): # Make output folder if necessary if not os.path.isdir(data_path): os.makedirs(data_path) os.chdir(data_path) # Download the data print("downloading brainweb data...") [FDG_arr, uMap_arr, T1_arr] = download_data() # Get template PET image from template raw template_PET_raw = pet.AcquisitionData(template_PET_raw_path) template_PET_im = pet.ImageData(template_PET_raw) # Get template MR image from template raw template_MR_raw = mr.AcquisitionData(template_MR_raw_path) template_MR_raw.sort_by_time() template_MR_raw = mr.preprocess_acquisition_data(template_MR_raw) template_MR_im = simple_mr_recon(template_MR_raw) # Number voxels in (x,y) directions - nxy (dictated by MR image) nxy = template_MR_im.get_geometrical_info().get_size()[0] if nxy != template_MR_im.get_geometrical_info().get_size()[1]: raise AssertionError("Expected square image in (x,y) direction") if template_MR_im.get_geometrical_info().get_size()[2] > 1: raise AssertionError("Only currently designed for 2D image") # Create PET image dim = (1, nxy, nxy) size = FDG_arr.shape z_slice = size[0] // 2 xy_min = (size[1] - nxy) // 2 xy_max = xy_min + nxy voxel_size = template_PET_im.voxel_sizes() template_PET_im.initialise(dim, voxel_size) # Reorient template MR image with template PET image such that it's compatible with both template_MR_im.reorient(template_PET_im.get_geometrical_info()) ############################################################################################ # Crop brainweb image to right size ############################################################################################ # Convert brainweb's (127,344,344) to desired size print("Cropping brainweb images to size...") [FDG, uMap, T1] = [crop_brainweb(template_MR_im, im_arr, z_slice, xy_min, xy_max) \ for im_arr in [FDG_arr, uMap_arr, T1_arr]] ############################################################################################ # Apply motion ############################################################################################ print("Resampling images to different motion states...") FDGs = [0] * num_ms uMaps = [0] * num_ms T1s = [0] * num_ms for ind in range(num_ms): # Get TM for given motion state tm = get_and_save_tm(ind) # Get resampler res = get_resampler_from_tm(tm, template_MR_im) # Resample for im, modality in zip([FDG, uMap, T1], ['FDG', 'uMap', 'T1']): resampled = res.forward(im) if modality == 'FDG': FDGs[ind] = resampled elif modality == 'uMap': uMaps[ind] = resampled elif modality == 'T1': T1s[ind] = resampled else: raise AssertionError("Unknown modality") reg.NiftiImageData(resampled).write(modality + '_ms' + str(ind)) ############################################################################################ # MR: create k-space data for motion states ############################################################################################ # Create coil sensitivity data print("Calculating coil sensitivity map...") csm = mr.CoilSensitivityData() csm.smoothness = 500 csm.calculate(template_MR_raw) # Create interleaved sampling print("Creating raw k-space data for MR motion states...") mvec = [] for ind in range(num_ms): mvec.append(np.arange(ind, template_MR_raw.number(), num_ms)) # Go through motion states and create k-space for ind in range(num_ms): acq_ms = template_MR_raw.new_acquisition_data(empty=True) # Set first two (??) acquisition acq_ms.append_acquisition(template_MR_raw.acquisition(0)) acq_ms.append_acquisition(template_MR_raw.acquisition(1)) # Add motion resolved data for jnd in range(len(mvec[ind])): if mvec[ind][jnd] < template_MR_raw.number() - 1 and mvec[ind][ jnd] > 1: # Ensure first and last are not added twice cacq = template_MR_raw.acquisition(mvec[ind][jnd]) acq_ms.append_acquisition(cacq) # Set last acquisition acq_ms.append_acquisition( template_MR_raw.acquisition(template_MR_raw.number() - 1)) # Create acquisition model AcqMod = mr.AcquisitionModel(acq_ms, T1s[ind]) AcqMod.set_coil_sensitivity_maps(csm) # Forward project! acq_ms_sim = AcqMod.forward(T1s[ind]) # Save print("writing: " + 'raw_T1_ms' + str(ind) + '.h5') acq_ms_sim.write('raw_T1_ms' + str(ind) + '.h5') ############################################################################################ # PET: create sinograms ############################################################################################ print("Creating singorams for PET motion states...") stir_uMap = template_PET_im.clone() stir_FDG = template_PET_im.clone() for ind in range(num_ms): stir_uMap.fill(uMaps[ind].as_array()) stir_FDG.fill(FDGs[ind].as_array()) am = get_acquisition_model(stir_uMap, template_PET_raw) FDG_sino = am.forward(stir_FDG) FDG_sino = add_noise(0.25, FDG_sino) FDG_sino.write('raw_FDG_ms' + str(ind))
def main(): # file paths to data input_data_path = parser.parser(sys.argv[1], "data_path:=") data_split = parser.parser(sys.argv[1], "data_split:=") input_dvf_path = parser.parser(sys.argv[1], "dvf_path:=") dvf_split = parser.parser(sys.argv[1], "dvf_split:=") output_path = parser.parser(sys.argv[1], "output_path:=") do_op_test = parser.parser(sys.argv[1], "do_op_test:=") do_reg = parser.parser(sys.argv[1], "do_reg:=") do_test_for_adj = parser.parser(sys.argv[1], "do_test_for_adj:=") for i in range(len(input_data_path)): if not os.path.exists(output_path[i]): os.makedirs(output_path[i], mode=0o770) new_dvf_path = "{0}/new_dvfs/".format(output_path[i]) if not os.path.exists(new_dvf_path): os.makedirs(new_dvf_path, mode=0o770) # get static and dynamic paths dynamic_path = get_data_path(input_data_path[i], data_split[i]) # load dynamic objects dynamic_array = [] for j in range(len(dynamic_path)): dynamic_array.append(reg.NiftiImageData(dynamic_path[j])) static_path = "{0}/static_path.nii".format(output_path[i]) # load static objects static_image = reg.NiftiImageData(dynamic_path[0]) for j in range(1, len(dynamic_path)): static_image.fill(static_image.as_array().astype(np.double) + dynamic_array[j].as_array().astype(np.double)) static_image.write(static_path) if bool(distutils.util.strtobool(do_op_test[i])): op_test(static_image, output_path[i]) # if do reg the calc dvf if not load if bool(distutils.util.strtobool(do_reg[i])): dvf_path = register_data(static_path, dynamic_path, output_path[i]) else: dvf_path = get_dvf_path(input_dvf_path[i], dvf_split[i]) # fix dvf header and load dvf objects dvf_path = edit_header(dvf_path, new_dvf_path) dvf_array = [] for j in range(len(dvf_path)): dvf_array.append(reg.NiftiImageData3DDeformation(dvf_path[j])) # create object to get forward and adj resamplers = get_resamplers(static_image, dynamic_array, dvf_array, output_path[i]) # test for adj if bool(distutils.util.strtobool(do_test_for_adj[i])): test_for_adj(static_image, dvf_array, output_path[i]) output_input(static_image, dynamic_array, dvf_array, output_path[i]) # initial static image initial_static_image = static_image.clone() # array to optimise optimise_array = static_image.as_array().astype(np.double) # array bounds bounds = [] for j in range(len(np.ravel(optimise_array.copy()))): bounds.append((-np.inf, np.inf)) # optimise optimise_array = np.reshape( scipy.optimize.minimize(objective_function, np.ravel(optimise_array).astype(np.double), args=(resamplers, dynamic_array, static_image, output_path[i]), method="L-BFGS-B", jac=gradient_function, bounds=bounds, tol=0.0000000001, options={ "disp": True }).x, optimise_array.shape) # output static_image.fill(optimise_array) static_image.write("{0}/optimiser_output_{1}.nii".format( output_path[i], str(i))) difference = static_image.as_array().astype( np.double) - initial_static_image.as_array().astype(np.double) difference_image = initial_static_image.clone() difference_image.fill(difference) static_image.write("{0}/optimiser_output_difference_{1}.nii".format( output_path[i], str(i)))
randoms_pet = Pet.AcquisitionData(path_rando + random) print(random) # reconstruct the data (without mu-map) obj_fun = Pet.make_Poisson_loglikelihood(sino_pet) acq_model.set_background_term(randoms_pet) recon.set_objective_function(obj_fun) initial_image = sino_pet.create_uniform_image(1.0, nxny) image = initial_image recon.set_up(image) recon.set_current_estimate(image) recon.process() # save recon images recon_image = Reg.NiftiImageData(recon.get_output()) recon_image.write(path_NAC + 'NAC_' + str(i)) image = recon.get_output() # apply gaussian filter with 3mm fwhm gaussian_filter = Pet.SeparableGaussianImageFilter() gaussian_filter.set_fwhms((3, 3, 3)) #gaussian_filter.set_max_kernel_sizes((10, 10, 2)) gaussian_filter.set_normalise() gaussian_filter.set_up(image) gaussian_filter.apply(image) # save Image as .nii smoothed_image = Reg.NiftiImageData(image) smoothed_image.write(path_smooth + 'smooth_' + str(i))
def callback_save(iteration, objective_value, solution): """Callback function to save images""" if (iteration + 1) % save_interval == 0: out = solution if not nifti else reg.NiftiImageData(solution) out.write(outp_file + "_iters" + str(iteration + 1))
print('Begin resampling: {}'.format(image)) resampler_im.clear_transformations() resampler_im.set_floating_image(flo) resampler_im.add_transformation(tm) new_im = resampler_im.forward(flo) new_im.write(working_folder + '/moco/moco_'+str(num)) print('Resampling successful: {}'.format(image)) tprint('Finish Resampling') #%% define RTA method # define initial image (first image, first frame) initial = Reg.NiftiImageData(working_folder + '/moco/moco_0.nii') initial_array = initial.as_array() # sum over all images (as array) for image in sorted_alphanumeric(os.listdir(path_moco))[1:]: print(image) array = Reg.NiftiImageData(path_moco + image).as_array() initial_array += array # create image final_image = Reg.NiftiImageData(working_folder + '/moco/moco_0.nii') final_image.fill(initial_array) final_image.write('final_image_RTA.nii') tprint('DONE!')
acq_model.set_background_term(randoms) obj_fun.set_acquisition_model(acq_model) recon.set_objective_function(obj_fun) initial_image = acq_data.create_uniform_image(1.0) image = initial_image recon.set_up(image) recon.set_current_estimate(image) recon.process() # save recon images recon_image = recon.get_output() recon_image.write(working_folder + '/recon/recon' + str(i)) # save Image as .nii recon_image = Reg.NiftiImageData(recon.get_output()) recon_image.write(working_folder + '/floates/recon'+str(i)) print('Reconstruction successful: Frame {}'.format(i)) tprint('Finish Recon') #%% create folder for motion corrected images path_moco = working_folder + '/moco/' if not os.path.exists(path_moco): os.makedirs(path_moco, mode=0o770) print('Create Folder: {}'.format(path_moco)) #%% convert a array to a SIRF transformation matrix and then resample the float image
for i, sino, random in zip(range(len(path_sino)), sorted_alphanumeric(list_sino), sorted_alphanumeric(list_rando)): sino_pet = Pet.AcquisitionData(path_sino + sino) print(sino) randoms_pet = Pet.AcquisitionData(path_rando + random) print(random) # reconstruct the data (without mu-map) obj_fun = Pet.make_Poisson_loglikelihood(sino_pet) acq_model.set_background_term(randoms_pet) recon.set_objective_function(obj_fun) initial_image = sino_pet.create_uniform_image(1.0) image = initial_image recon.set_up(image) recon.set_current_estimate(image) recon.process() # save recon images recon_image = recon.get_output() recon_image.write(path_NAC + 'NAC_' + str(i)) # save Image as .nii recon_image = Reg.NiftiImageData(recon.get_output()) recon_image.write(path_NAC + 'NAC_' + str(i)) print('Reconstruction successful: Frame {}'.format(i)) tprint('Finish NAC Recon')
# reconstruct the data (without mu-map) obj_fun = Pet.make_Poisson_loglikelihood(acq_data) recon.set_objective_function(obj_fun) initial_image = acq_data.create_uniform_image(1.0) image = initial_image recon.set_up(image) recon.set_current_estimate(image) recon.process() # save recon images recon_image = recon.get_output() recon_image.write(path_NAC + '/NAC_' + str(i)) # save Image as .nii recon_image = Reg.NiftiImageData(recon.get_output()) recon_image.write(path_NAC + '/nii/NAC_' + str(i)) print('Reconstruction successful: Frame {}'.format(i)) tprint('Finish Recon NAC') #%% SPM registration NAC # define reference image (first image) and float-path, NAC ref_file = path_NAC + '/nii/' + 'NAC_0.nii' ref = Eng_ref.ImageData(ref_file) flo_path = path_NAC + '/nii/' tprint('Start Reg for NAC')