Ejemplo n.º 1
0
 def test_convert_to_coco(self):
     DatasetCatalog.register("test_dataset",
                             lambda: TestConvertCOCO.generate_data())
     MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"])
     convert_to_coco_dict("test_dataset")
     DatasetCatalog.pop("test_dataset")
     MetadataCatalog.pop("test_dataset")
Ejemplo n.º 2
0
    def test(self):
        # Make a dummy dataset.
        mask = make_mask()
        DatasetCatalog.register("test_dataset",
                                lambda: make_dataset_dicts(mask))
        MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"])

        # Dump to json.
        json_dict = convert_to_coco_dict("test_dataset")
        with tempfile.TemporaryDirectory() as tmpdir:
            json_file_name = os.path.join(tmpdir, "test.json")
            with open(json_file_name, "w") as f:
                json.dump(json_dict, f)
            # Load from json.
            dicts = load_coco_json(json_file_name, "")

        # Check the loaded mask matches the original.
        anno = dicts[0]["annotations"][0]
        loaded_mask = mask_util.decode(anno["segmentation"])
        self.assertTrue(np.array_equal(loaded_mask, mask))
        DatasetCatalog.pop("test_dataset")
        MetadataCatalog.pop("test_dataset")
Ejemplo n.º 3
0
 def cleanup(self):
     # remove temporarily registered dataset and json file
     DatasetCatalog.pop(self.new_ds_name, None)
     MetadataCatalog.pop(self.new_ds_name, None)
     if hasattr(self, "tmp_dir"):
         shutil.rmtree(self.tmp_dir)