def test_affine(self, input_param, input_data, expected_val): g = Affine(**input_param) result = g(**input_data) if isinstance(result, tuple): result = result[0] self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor)) np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
def test_affine(self, input_param, input_data, expected_val): g = Affine(**input_param) result = g(**input_data) self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) if torch.is_tensor(result): np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) else: np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
def _translate(x, range_x: uniform = 0., range_y: uniform = 0., range_z: uniform = 0.): translate = Affine(rotate_params=None, shear_params=None, translate_params=(range_x, range_y, range_z), scale_params=None, spatial_size=list(x.shape[1:]), padding_mode='zeros') x = translate(x) return torch.Tensor(x)
def __call__(self, image, label, params=None): ''' Affine transformation on image and label Args: image: np array or PIL, [img_rows, img_cols] label: np array or PIL, [img_rows, img_cols] ''' if params is None: params = self._random() self.params = params if self.params['p']: image = Affine(rotate_params=self.params['angle'], translate_params=self.params['translate'], scale_params=self.params['scale'], shear_params=self.params['shear'], mode='bilinear', padding_mode='zeros')(image) label = Affine(rotate_params=self.params['angle'], translate_params=self.params['translate'], scale_params=self.params['scale'], shear_params=self.params['shear'], mode=self.params['label_mode'], padding_mode='zeros')(label) return image, label
def test_affine(self, input_param, input_data, expected_val): g = Affine(**input_param) result = g(**input_data) self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor)) if isinstance(result, torch.Tensor): np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) else: np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
def test_affine(self, input_param, input_data, expected_val): input_copy = deepcopy(input_data["img"]) g = Affine(**input_param) result = g(**input_data) if isinstance(result, tuple): result = result[0] test_local_inversion(g, result, input_copy) assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4, type_test=False) set_track_meta(False) result = g(**input_data) if isinstance(result, tuple): result = result[0] self.assertNotIsInstance(result, MetaTensor) self.assertIsInstance(result, torch.Tensor) set_track_meta(True)
def __init__(self, data: str, split: str, extension: str, classes: int, column: str, variable_type: str, augmentation: bool = False, debug: bool = False): if classes != 1: print('Note: Ensure all labels are of a single type.') self.datapath = wsl_data_dir / data self.data = data self.classes = classes if data in known_extensions.keys(): self.extension = known_extensions[data] else: self.extension = extension df = pd.read_csv(wsl_csv_dir / data / 'info.csv', converters={ column: literal_eval, 'box': literal_eval }) self.df = df df = df.drop_duplicates(subset='Id', keep='first', ignore_index=True) Ids = pd.read_csv(wsl_csv_dir / data / f'{split}.csv').Id.tolist() df = df[df.Id.isin(Ids)] self.names = df.Id.to_list() self.labels = df[column].tolist() self.variable_type = variable_type if debug: self.names = self.names[0:100] self.labels = self.labels[0:100] self.new_size = (224, 224) self.image_transforms = Compose([ Resize(self.new_size), RepeatChannel(repeats=3), CastToType(dtype=np.float32), ToTensor() ]) self.augmentation = augmentation if augmentation: self.augmentation = Affine(rotate_params=np.pi / 6, scale_params=(1.2, 1.2), translate_params=(50, 50), padding_mode='zeros') else: self.augmentation = None if self.variable_type != 'categorical': if classes == 1: self.labels = [[x] for x in self.labels] else: self.class_names = self.labels[0].keys() print('\nClass List: ', self.class_names) self.labels = [list(x.values()) for x in self.labels] # only matters for balanced case for binary variable type self.pos_weight = [ round((len(col) - sum(col)) / sum(col), 2) for col in zip(*self.labels) ]
def test_affine(self, input_param, input_data, expected_val): g = Affine(**input_param) result = g(**input_data) if isinstance(result, tuple): result = result[0] assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)
#%% aa = sample_syn.get('image_1')[0].numpy() print(aa.shape) bb = crop_and_pad(aa[0,...,10], 100, 100) print(bb.shape) cc = crop_and_pad_multiple_x1(aa[0,...,10:26], 100, 100) print(cc.shape) #%% from monai.transforms import Affine #%% affine = Affine( rotate_params=np.pi / 4, scale_params=(1.2, 1.2), translate_params=(200, 40), padding_mode="zeros", ) a0 = aa[...,8] new_img, _ = affine(a0, (192, 192), mode="bilinear") print(new_img.shape) plt.imshow(new_img[0]) #%% aa = sample_syn.get('image').numpy() print(aa.shape) print(np.min(aa), np.max(aa)) aa = np.clip(aa,0,1) print(np.min(aa), np.max(aa)) #%%