def _load_ds_if_none(self): hilbert_volume = self.hilbert_volume if hilbert_volume is None: hilbert_volume = load_nib( pjoin( DATA_DIRS['views_36'], f'{self.filename[:-7]}_{self.plane.name.lower()}.nii.gz', )) hilbert_volume = MyDataset._reduce_zdim(hilbert_volume) sparse_volume = self.sparse_volume if sparse_volume is None: sparse_volume = load_nib( pjoin( DATA_DIRS['fdk_360'], self.filename, ))[..., None] sparse_volume = MyDataset._reduce_zdim(sparse_volume) image_volume = self.image_volume if image_volume is None: image_volume = load_nib( pjoin( DATA_DIRS['datasets'], self.filename, ))[..., None] image_volume = MyDataset._reduce_zdim(image_volume) return hilbert_volume, sparse_volume, image_volume
def blend_method(method: str, blur: int = 90, save: bool = True): assert method in ['fdkconv', 's2f_inv', 's2f_inv3', 'inv_sp', 'inv_sp3'] reco_coronal = load_nib(f'testing/{method}_coronal.nii.gz') reco_sagittal = load_nib(f'testing/{method}_sagittal.nii.gz') # non-negativity constraint reco_coronal[reco_coronal < 0] = 0 reco_sagittal[reco_sagittal < 0] = 0 # [x, y, z] -> [z, y, x] reco_coronal = reco_coronal.transpose() reco_sagittal = reco_sagittal.transpose() wedge = create_wedge((512, 512), blur)[None, ...] # spectral blending reco_coronal_fft = np.fft.fftshift(np.fft.fft2(reco_coronal)) reco_sagittal_fft = np.fft.fftshift(np.fft.fft2(reco_sagittal)) blended_fft = wedge * reco_sagittal_fft + (1 - wedge) * reco_coronal_fft blended = np.real(np.fft.ifft2(np.fft.fftshift(blended_fft))) if save: img = nib.Nifti1Image(blended.transpose(), np.eye(4)) nib.save(img, f'testing/{method}_blended.nii.gz') return blended
def calculate_z_depending_metrics(): with open('train_valid.json', 'r') as json_file: json_dict = json.load(json_file) test_files = json_dict['test_files'] gt = load_nib(pjoin(DATA_DIRS['datasets'], test_files[0])) gt = reduce_zdim(gt).transpose() prediction = load_nib('testing/inv_sp3_blended.nii.gz') prediction = prediction.transpose() fdkconv = load_nib('testing/fdkconv_blended.nii.gz') fdkconv = reduce_zdim(fdkconv).transpose() pred_nmse = nmse(prediction, gt, lambda _: _) pred_psnr = psnr(prediction, gt, lambda _: _) pred_ssim = ssim(prediction, gt, lambda _: _) fdkconv_nmse = nmse(fdkconv, gt, lambda _: _) fdkconv_psnr = psnr(fdkconv, gt, lambda _: _) fdkconv_ssim = ssim(fdkconv, gt, lambda _: _) from matplotlib import pyplot as plt plt.plot(pred_nmse / fdkconv_nmse), plt.figure() plt.plot(pred_psnr / fdkconv_psnr), plt.figure() plt.plot(pred_ssim / fdkconv_ssim), plt.show()
def calculate_sparse_fdk_metrics(): with open('train_valid.json', 'r') as json_file: json_dict = json.load(json_file) test_files = json_dict['test_files'] gt = load_nib(pjoin(DATA_DIRS['datasets'], test_files[0])) gt = reduce_zdim(gt).transpose() sparse_fdk = load_nib('sparse_fdk.nii.gz') sparse_fdk = reduce_zdim(sparse_fdk).transpose() return { 'nmse': nmse(sparse_fdk, gt), 'psnr': psnr(sparse_fdk, gt), 'ssim': ssim(sparse_fdk, gt), }
def calculate_metrics(method: str, plane: HilbertPlane): with open('train_valid.json', 'r') as json_file: json_dict = json.load(json_file) test_files = json_dict['test_files'] gt = load_nib(pjoin(DATA_DIRS['datasets'], test_files[0])) gt = reduce_zdim(gt).transpose() prediction = load_nib(f'testing/{method}_{plane.name.lower()}.nii.gz') prediction = prediction.transpose() return { 'nmse': nmse(prediction, gt), 'psnr': psnr(prediction, gt), 'ssim': ssim(prediction, gt), }
def blend_sweep_blur(): with open('train_valid.json', 'r') as json_file: json_dict = json.load(json_file) test_files = json_dict['test_files'] gt = load_nib(pjoin(DATA_DIRS['datasets'], test_files[0])) gt = reduce_zdim(gt).transpose() reco_coronal = load_nib('testing/inv_sp3_coronal.nii.gz') reco_sagittal = load_nib('testing/inv_sp3_sagittal.nii.gz') # non-negativity constraint reco_coronal[reco_coronal < 0] = 0 reco_sagittal[reco_sagittal < 0] = 0 # [x, y, z] -> [z, y, x] reco_coronal = reco_coronal.transpose() reco_sagittal = reco_sagittal.transpose() all_blurs = np.linspace(1, 90, 10) all_metrics = {'nmse': [], 'psnr': [], 'ssim': []} for blur in all_blurs: wedge = create_wedge((512, 512), blur)[None, ...] # spectral blending reco_coronal_fft = np.fft.fftshift(np.fft.fft2(reco_coronal)) reco_sagittal_fft = np.fft.fftshift(np.fft.fft2(reco_sagittal)) blended_fft = (wedge) * reco_sagittal_fft + (1 - wedge) * reco_coronal_fft blended = np.real(np.fft.ifft2(np.fft.fftshift(blended_fft))) nmse_value = nmse(blended, gt) psnr_value = psnr(blended, gt) ssim_value = ssim(blended, gt) print(f'{blur} {nmse_value} {psnr_value} {ssim_value}') all_metrics['nmse'].append(nmse_value) all_metrics['psnr'].append(psnr_value) all_metrics['ssim'].append(ssim_value) with open('spectral_blur_sweep.csv', 'w', newline='') as csv_file: writer = csv.writer(csv_file) writer.writerow(['blur'] + all_blurs) writer.writerow(['nmse'] + all_metrics['nmse']) writer.writerow(['psnr'] + all_metrics['psnr']) writer.writerow(['ssim'] + all_metrics['ssim'])
def _load_ds_if_none(self): sparse_volume = self.sparse_volume if sparse_volume is None: sparse_volume = load_nib( pjoin( DATA_DIRS['fdk_36'], self.filename, ))[..., None] sparse_volume = MyDataset._reduce_zdim(sparse_volume) image_volume = self.image_volume if image_volume is None: image_volume = load_nib( pjoin(DATA_DIRS['datasets'], self.filename), )[..., None] image_volume = MyDataset._reduce_zdim(image_volume) return sparse_volume, image_volume
def _load_ds_if_none(self): sparse_hilbert = self.sparse_hilbert if sparse_hilbert is None: sparse_hilbert = load_nib( pjoin( DATA_DIRS['views_36'], f'{self.filename[:-7]}' f'_{self.plane.name.lower()}.nii.gz')) sparse_hilbert = MyDataset._reduce_zdim(sparse_hilbert) full_hilbert = self.full_hilbert if full_hilbert is None: full_hilbert = load_nib( pjoin( DATA_DIRS['views_360'], f'{self.filename[:-7]}' f'_{self.plane.name.lower()}.nii.gz')) full_hilbert = MyDataset._reduce_zdim(full_hilbert) return sparse_hilbert, full_hilbert