def test_load_fake_dataset(self): H, W, C = 32, 32, 3 dataset = data_utils.load_fake_dataset(root=self.dataset_dir, image_size=(C, H, W)) img = dataset[0][0] assert img.shape == (C, H, W)
def test_load_fake_dataset(self): H, W, C = 32, 32, 3 for transform_data in [True, False]: dataset = data_utils.load_fake_dataset( root=self.dataset_dir, image_size=(C, H, W), transform_data=transform_data) img = dataset[0][0] if transform_data: assert img.shape == (C, H, W) else: img = np.asarray(img) assert img.shape == (32, 32, 3 ) # no resizing done, default 32x32.
def get_fake_data_images(num_samples, root='./datasets', size=32, **kwargs): """ Loads fake images, especially for testing. Args: num_samples (int): The number of images to randomly sample. root (str): The root directory where all datasets are stored. size (int): Size of image to resize to. Returns: Tensor: Batch of num_samples images in np array form. """ dataset = data_utils.load_fake_dataset( root=root, image_size=(3, size, size), transform_data=True, convert_tensor=False, # Prevents normalization. **kwargs) images = get_random_images(dataset, num_samples) return images