def test_can_save_and_load_with_meta_file(self): source_dataset = Dataset.from_iterable( [ DatasetItem(id='image_2', subset='test', image=np.ones((32, 32, 3)), annotations=[Label(0)]), DatasetItem( id='image_3', subset='test', image=np.ones((32, 32, 3))), DatasetItem(id='image_4', subset='test', image=np.ones((32, 32, 3)), annotations=[Label(1)]) ], categories=['label_0', 'label_1']) with TestDir() as test_dir: CifarConverter.convert(source_dataset, test_dir, save_images=True, save_dataset_meta=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') self.assertTrue(osp.isfile(osp.join(test_dir, 'dataset_meta.json'))) compare_datasets(self, source_dataset, parsed_dataset, require_images=True)
def test_can_save_and_load_cifar100(self): source_dataset = Dataset.from_iterable( [ DatasetItem(id='image_2', subset='test', image=np.ones((32, 32, 3)), annotations=[Label(0)]), DatasetItem( id='image_3', subset='test', image=np.ones((32, 32, 3))), DatasetItem(id='image_4', subset='test', image=np.ones((32, 32, 3)), annotations=[Label(1)]) ], categories=[['class_0', 'superclass_0'], ['class_1', 'superclass_0']]) with TestDir() as test_dir: CifarConverter.convert(source_dataset, test_dir, save_images=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, source_dataset, parsed_dataset, require_images=True)
def test_can_save_and_load_empty_image(self): dataset = Dataset.from_iterable( [DatasetItem(id='a', annotations=[Label(0)]), DatasetItem(id='b')], categories=['label_0']) with TestDir() as test_dir: CifarConverter.convert(dataset, test_dir, save_images=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, dataset, parsed_dataset, require_images=True)
def test_can_save_and_load_without_saving_images(self): source_dataset = Dataset.from_iterable([ DatasetItem(id='a', subset='train_1', annotations=[Label(0)]), DatasetItem(id='b', subset='train_first', annotations=[Label(1)]), ], categories=['x', 'y']) with TestDir() as test_dir: CifarConverter.convert(source_dataset, test_dir, save_images=False) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, source_dataset, parsed_dataset, require_images=True)
def test_can_save_dataset_with_cyrillic_and_spaces_in_filename(self): source_dataset = Dataset.from_iterable([ DatasetItem(id="кириллица с пробелом", image=np.ones((32, 32, 3)), annotations=[Label(0)]), ], categories=['label_0']) with TestDir() as test_dir: CifarConverter.convert(source_dataset, test_dir, save_images=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, source_dataset, parsed_dataset, require_images=True)
def test_can_save_and_load_with_different_image_size(self): source_dataset = Dataset.from_iterable([ DatasetItem(id='image_1', image=np.ones((10, 8, 3)), annotations=[Label(0)]), DatasetItem(id='image_2', image=np.ones((32, 32, 3)), annotations=[Label(1)]), ], categories=['dog', 'cat']) with TestDir() as test_dir: CifarConverter.convert(source_dataset, test_dir, save_images=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, source_dataset, parsed_dataset, require_images=True)
def test_can_save_and_load_image_with_arbitrary_extension(self): dataset = Dataset.from_iterable([ DatasetItem(id='q/1', image=Image(path='q/1.JPEG', data=np.zeros((32, 32, 3)))), DatasetItem(id='a/b/c/2', image=Image(path='a/b/c/2.bmp', data=np.zeros((32, 32, 3)))), ], categories=[]) with TestDir() as test_dir: CifarConverter.convert(dataset, test_dir, save_images=True) parsed_dataset = Dataset.import_from(test_dir, 'cifar') compare_datasets(self, dataset, parsed_dataset, require_images=True)