def load(file_object, annotations): from pyunpack import Archive from tempfile import TemporaryDirectory from datumaro.plugins.tf_detection_api_format.importer import TfDetectionApiImporter from cvat.apps.dataset_manager.bindings import import_dm_annotations archive_file = file_object if isinstance(file_object, str) else getattr( file_object, "name") with TemporaryDirectory() as tmp_dir: Archive(archive_file).extractall(tmp_dir) dm_project = TfDetectionApiImporter()(tmp_dir) dm_dataset = dm_project.make_dataset() import_dm_annotations(dm_dataset, annotations)
def test_can_detect(self): class TestExtractor(Extractor): def __iter__(self): return iter([ DatasetItem(id=1, subset='train', image=np.ones((16, 16, 3)), annotations=[ Bbox(0, 4, 4, 8, label=2), ]), ]) def categories(self): label_cat = LabelCategories() for label in range(10): label_cat.add('label_' + str(label)) return { AnnotationType.label: label_cat, } def generate_dummy_tfrecord(path): TfDetectionApiConverter()(TestExtractor(), save_dir=path) with TestDir() as test_dir: generate_dummy_tfrecord(test_dir) self.assertTrue(TfDetectionApiImporter.detect(test_dir))
def _test_save_and_load(self, source_dataset, converter, test_dir, target_dataset=None, importer_args=None): converter(source_dataset, test_dir) if importer_args is None: importer_args = {} parsed_dataset = TfDetectionApiImporter()(test_dir, **importer_args) \ .make_dataset() if target_dataset is None: target_dataset = source_dataset compare_datasets(self, expected=target_dataset, actual=parsed_dataset)
def test_can_detect(self): self.assertTrue(TfDetectionApiImporter.detect(DUMMY_DATASET_DIR))