def evaluate(reg): tikhonov_kernel = kernel + 1e6 precondioner = np.abs(np.divide(1, tikhonov_kernel)) precondioner /= precondioner.max() tikhonov = np.divide(complex_data, tikhonov_kernel) reco = np.copy(tikhonov) # The scales produce gradients of order 1 ADVERSARIAL_SCALE = (96**(-0.5)) DATA_SCALE = 1 / (10 * 96**3) IMAGING_SCALE = 96 for k in range(70): STEP_SIZE = 1.0 * 1 / np.sqrt(1 + k / 20) gradient = regularizer.evaluate(reco) g1 = reg * gradient * ADVERSARIAL_SCALE # print(l2(gradient)) g2 = DATA_SCALE * (np.multiply(reco, tikhonov_kernel) - complex_data) g = g1 + g2 # reco = reco - STEP_SIZE * 0.02 * g reco = reco - STEP_SIZE * precondioner * g reco = np.fft.rfftn(np.maximum(0, np.fft.irfftn(reco))) return l2_gt(irfft(reco))
def get_image(noise_level, method, data_dict, eval_data): data_list = data_dict[noise_level][method] adv_path = random.choice(data_list) if method == 'div': # print('adv_path', adv_path) # raise Exception star_file = load_star(adv_path) with mrcfile.open( cleanStarPath( adv_path, star_file['external_reconstruct_general'] ['rlnExtReconsDataReal'])) as mrc: data_real = mrc.data with mrcfile.open( cleanStarPath( adv_path, star_file['external_reconstruct_general'] ['rlnExtReconsDataImag'])) as mrc: data_im = mrc.data with mrcfile.open( cleanStarPath( adv_path, star_file['external_reconstruct_general'] ['rlnExtReconsWeight'])) as mrc: kernel = mrc.data adv = np.divide(data_real + 1j * data_im, kernel + 1e-3) adv = irfft(adv) else: with mrcfile.open(adv_path) as mrc: adv = mrc.data with mrcfile.open(locate_gt(adv_path, noise_level, eval_data=eval_data)) as mrc: gt = mrc.data # print(locate_gt(adv_path, eval_data=eval_data)) # print(star_file) # raise Exception return gt, adv
#reco = np.fft.rfftn(np.maximum(0, np.fft.irfftn(reco))) # The scales produce gradients of order 1 #ADVERSARIAL_SCALE=(96**(-0.5)) #DATA_SCALE=1/(10*96**3) #IMAGING_SCALE=96 #for k in range(70): # STEP_SIZE=1.0 * 1 / np.sqrt(1 + k / 20) # gradient = regularizer.evaluate(reco) # g1 = ADVERSARIAL_REGULARIZATION * gradient * ADVERSARIAL_SCALE # print(l2(gradient)) # g2 = DATA_SCALE*(np.multiply(reco, tikhonov_kernel) - complex_data) # g = g1 + g2 # reco = reco - STEP_SIZE * 0.02 * g # reco = reco - STEP_SIZE * precondioner * g # reco = np.fft.rfftn(np.maximum(0, np.fft.irfftn(reco))) # write final reconstruction to file reco_real = irfft(reco) with mrcfile.new(target_path, overwrite=True) as mrc: mrc.set_data(reco_real.astype(np.float32)) mrc.voxel_size = 1.5
def vis(data, fourier=True): if fourier: data = irfft(data, scaling=NUM_VOX**2) slice_n = int(data.shape[0] // 2) plt.imshow(IMAGING_SCALE * data.squeeze()[..., slice_n])
file = load_star(path) with mrcfile.open( file['external_reconstruct_general']['rlnExtReconsDataReal']) as mrc: data_real = mrc.data.copy() with mrcfile.open( file['external_reconstruct_general']['rlnExtReconsDataImag']) as mrc: data_im = mrc.data.copy() with mrcfile.open( file['external_reconstruct_general']['rlnExtReconsWeight']) as mrc: kernel = mrc.data.copy() complex_data = data_real + 1j * data_im #### Rescaling kernels complex_data_norm = np.mean(irfft(complex_data, scaling=NUM_VOX**2)) complex_data /= complex_data_norm kernel /= complex_data_norm tikhonov_kernel = kernel + TIKHONOV_REGULARIZATION print(np.max(np.abs(kernel)), np.min(np.abs(kernel))) print(np.max(np.abs(tikhonov_kernel)), np.min(np.abs(tikhonov_kernel))) print(np.mean(np.abs(tikhonov_kernel)), np.mean(np.abs(kernel))) tk_ini = np.divide(complex_data, tikhonov_kernel) tk_pos = np.fft.rfftn(np.maximum(0, np.fft.irfftn(tk_ini))) unreg_ini = np.divide(complex_data, kernel + TIKHONOV_REGULARIZATION // 100.0) #print(l2(tk_ini), l2(unreg_ini), l2(naive_ini))
file['external_reconstruct_general']['rlnExtReconsDataReal']) as mrc: data_real = mrc.data.copy() with mrcfile.open( file['external_reconstruct_general']['rlnExtReconsDataImag']) as mrc: data_im = mrc.data.copy() with mrcfile.open( file['external_reconstruct_general']['rlnExtReconsWeight']) as mrc: kernel = mrc.data.copy() target_path = file['external_reconstruct_general']['rlnExtReconsResult'] regularizer = AdversarialRegulariser(SAVES_PATH) complex_data = data_real + 1j * data_im complex_data_norm = np.mean(irfft(complex_data, scaling=NUM_VOX**2)) complex_data /= complex_data_norm kernel /= complex_data_norm tikhonov_kernel = kernel + TIKHONOV_REGULARIZATION #precond = np.abs(np.divide(1, tikhonov_kernel)) #precond /= precond.max() precond = 1 tikhonov = np.divide(complex_data, tikhonov_kernel) reco = np.copy(tikhonov) for k in range(150): STEP_SIZE = STEP_SIZE_NOMINAL / np.sqrt(1 + k / 20) ############### # DOWNSAMPLING
def vis(data, fourier=True, SCALE=100): if fourier: data = irfft(data) plt.imshow(SCALE * data.squeeze()[..., 45])
init = np.divide(complex_data, tikhonov_kernel) elif INI_POINT == 'classical': init = classical_reco.copy() init = rfft(init) if POSITIVITY: init = np.fft.rfftn(np.maximum(0, np.fft.irfftn(init))) reco = init.copy() print('####################') print(PDB_ID, NOISE_LEVEL, IT, INI_POINT, AR_REG_TYPE, POSITIVITY, NUM_GRAD_STEPS, STEP_SIZE_NOMINAL) print('####################') # if EVAL_METRIC == 'masked_FSC': # print('INIT FSC 0.5 crossing: ', fscPointFiveCrossing(irfft(init), gt_path)) if EVAL_METRIC == 'masked_L2': print('INIT L2 (masked): ', masked_L2(irfft(init), gt)) elif EVAL_METRIC == 'L2': print('INIT L2: ', L2(irfft(init), gt)) elif EVAL_METRIC == 'L2_and_SSIM': print('INIT L2: ', L2(irfft(init), gt), '. INIT SSIM: ', ssim(irfft(init), gt, win_size=WIN_SIZE)) if PLOT: plt.figure(0, figsize=(10, 3)) plt.subplot(121) vis(gt, fourier=False) plt.colorbar() plt.subplot(122) vis(init) plt.colorbar() plt.show()
def vis(data, fourier=True): if fourier: data = irfft(data) plt.imshow(data.squeeze()[..., 45])