Esempio n. 1
0
 def test_convert_to_coco(self):
     DatasetCatalog.register("test_dataset",
                             lambda: TestConvertCOCO.generate_data())
     MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"])
     convert_to_coco_dict("test_dataset")
     DatasetCatalog.pop("test_dataset")
     MetadataCatalog.pop("test_dataset")
def write_coco_format_json(cfg, split):
    timer = CodeTimer("writting to coco")
    dataset_name, standard_format_json_file = get_dataset_name_and_json(cfg, split)

    dataset_dicts = DatasetCatalog.get(dataset_name)
    _,filtered_dicts = filter_dataset(dataset_dicts, required_fields=["bbox", "bbox_mode", "segmentation"])
    register_dataset(cfg, split, getter= lambda: filtered_dicts, name=dataset_name+"_for_coco")

    coco_dict = convert_to_coco_dict(dataset_name+"_for_coco")

    json_format_file = standard_format_json_file.replace(".json", "_coco_format.json")
    with open(json_format_file, "w") as f:
        json.dump(coco_dict, f)
    timer.done()
    def test(self):
        # Make a dummy dataset.
        mask = make_mask()
        DatasetCatalog.register("test_dataset",
                                lambda: make_dataset_dicts(mask))
        MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"])

        # Dump to json.
        json_dict = convert_to_coco_dict("test_dataset")
        with tempfile.TemporaryDirectory() as tmpdir:
            json_file_name = os.path.join(tmpdir, "test.json")
            with open(json_file_name, "w") as f:
                json.dump(json_dict, f)
            # Load from json.
            dicts = load_coco_json(json_file_name, "")

        # Check the loaded mask matches the original.
        anno = dicts[0]["annotations"][0]
        loaded_mask = pycocotools.mask.decode(anno["segmentation"])
        self.assertTrue(np.array_equal(loaded_mask, mask))