def test_upload_dataset_with_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG_CONTENTS) path = tmp_path / "sub" path.mkdir() local_path = path / "hello.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("upload dataset with label") dataset = Dataset(dataset_name, gas_client) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert dataset[0][0].label == Label.loads(LABEL) statistics1 = dataset_client.get_label_statistics() assert statistics1 == Statistics(STATISTICS) total_size = dataset_client.get_total_size() assert total_size == TOTALSIZE gas_client.delete_dataset(dataset_name)
def test_catalog(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) with pytest.raises(StatusError): dataset_client.upload_catalog(Catalog.loads(CATALOG)) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) catalog = dataset_client.get_catalog() # todo: match the input and output catalog gas_client.delete_dataset(dataset_name)
def test_upload_dataset_with_mask(self, accesskey, url, tmp_path, mask_file): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG_CONTENTS) path = tmp_path / "sub" path.mkdir() local_path = path / "hello.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) remote_semantic_mask = SemanticMask(str(mask_file)) remote_semantic_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.semantic_mask = remote_semantic_mask instance_mask = InstanceMask(str(mask_file)) instance_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.instance_mask = instance_mask panoptic_mask = PanopticMask(str(mask_file)) panoptic_mask.all_category_ids = {100: 0, 101: 1} data.label.panoptic_mask = panoptic_mask segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("upload dataset with label") dataset = Dataset(dataset_name, gas_client) remote_semantic_mask = dataset[0][0].label.semantic_mask semantic_mask = RemoteSemanticMask.from_response_body(SEMANTIC_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_semantic_mask.path == semantic_mask.path assert remote_semantic_mask.all_attributes == semantic_mask.all_attributes remote_instance_mask = dataset[0][0].label.instance_mask instance_mask = RemoteInstanceMask.from_response_body(INSTANCE_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_instance_mask.path == instance_mask.path assert remote_instance_mask.all_attributes == instance_mask.all_attributes remote_panoptic_mask = dataset[0][0].label.panoptic_mask panoptic_mask = RemotePanopticMask.from_response_body(PANOPTIC_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_panoptic_mask.path == panoptic_mask.path assert remote_panoptic_mask.all_category_ids == panoptic_mask.all_category_ids gas_client.delete_dataset(dataset_name)
def test_copy_data_between_datasets(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name_1 = get_dataset_name() gas_client.create_dataset(dataset_name_1) dataset_1 = Dataset(name=dataset_name_1) segment_1 = dataset_1.create_segment("Segment1") dataset_1._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment_1.append(data) dataset_client_1 = gas_client.upload_dataset(dataset_1) dataset_client_1.commit("upload data") segment_client_1 = dataset_client_1.get_segment("Segment1") dataset_name_2 = dataset_name_1 + "_2" dataset_client_2 = gas_client.create_dataset(dataset_name_2) dataset_client_2.create_draft("draft_2") dataset_client_2.create_segment("Segment1") segment_client_2 = dataset_client_2.get_segment("Segment1") segment_client_2.copy_data("hello0.txt", "hello0.txt", source_client=segment_client_1) segment2 = Segment("Segment1", client=dataset_client_2) assert segment2[0].path == "hello0.txt" assert segment2[0].label gas_client.delete_dataset(dataset_name_1) gas_client.delete_dataset(dataset_name_2)
def test_copy_data(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.get_segment("Segment1") segment_client.copy_data("hello0.txt", "goodbye0.txt") segment_client.copy_data("hello1.txt", "hello10.txt") with pytest.raises(InvalidParamsError): segment_client.copy_data("hello2.txt", "see_you.txt", strategy="push") segment2 = Segment("Segment1", client=dataset_client) assert segment2[0].path == "goodbye0.txt" assert segment2[3].path == "hello10.txt" assert segment2[1].label gas_client.delete_dataset(dataset_name)
def test_copy_segment_skip(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment1 = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment1.append(data) segment2 = dataset.create_segment("Segment2") for i in range(10, 20): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment2.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.copy_segment("Segment1", "Segment2", strategy="skip") segment_copied = Segment("Segment2", client=dataset_client) assert segment_copied[0].path == "hello10.txt" assert segment_copied[0].path == segment2[0].target_remote_path assert segment_copied[0].label gas_client.delete_dataset(dataset_name)
def test_copy_segment_abort(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment1 = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment1.append(data) segment2 = dataset.create_segment("Segment2") for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment2.append(data) dataset_client = gas_client.upload_dataset(dataset) with pytest.raises(InternalServerError): dataset_client.copy_segment("Segment1", "Segment2") gas_client.delete_dataset(dataset_name)
def test_copy_fusion_segment(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, is_fusion=True) dataset = FusionDataset(name=dataset_name) segment = dataset.create_segment("Segment1") segment.sensors.add(Sensor.loads(LIDAR_DATA)) dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) frame[LIDAR_DATA["name"]] = data segment.append(frame) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.copy_segment("Segment1", "Segment2") assert segment_client.name == "Segment2" with pytest.raises(InvalidParamsError): dataset_client.copy_segment("Segment1", "Segment3", strategy="push") segment2 = FusionSegment("Segment2", client=dataset_client) assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt" assert ( segment2[0][LIDAR_DATA["name"]].path == segment[0][LIDAR_DATA["name"]].target_remote_path ) assert segment2[0][LIDAR_DATA["name"]].label gas_client.delete_dataset(dataset_name)
def test_move_data_skip(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT_{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.get_segment("Segment1") segment_client.move_data("hello0.txt", "hello1.txt", strategy="skip") segment_moved = Segment("Segment1", client=dataset_client) assert segment_moved[0].path == "hello1.txt" assert segment_moved[0].open().read() == b"CONTENT_1" gas_client.delete_dataset(dataset_name)
def test_move_segment(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.move_segment("Segment1", "Segment2") assert segment_client.name == "Segment2" with pytest.raises(InvalidParamsError): dataset_client.move_segment("Segment1", "Segment3", strategy="push") segment2 = Segment("Segment2", client=dataset_client) assert segment2[0].path == "hello0.txt" assert segment2[0].path == segment[0].target_remote_path assert segment2[0].label gas_client.delete_dataset(dataset_name)
def test_overwrite_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() local_path = path / "hello0.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) segment_client.upload_file(data.path, data.target_remote_path) data.label = Label.loads(LABEL) segment_client.upload_label(data) # Replace labels data.label = Label.loads(NEW_LABEL) segment_client.upload_label(data) data = segment_client.list_data() assert data[0].path == "hello0.txt" assert data[0].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def test_upload_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() local_path = path / "hello0.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) # If not uploading file, uploading label is not allowed with pytest.raises(ResourceNotExistError): segment_client.upload_label(data) # Uploading files segment_client.upload_file(data.path, data.target_remote_path) data.label = Label.loads(LABEL) segment_client.upload_label(data) data = segment_client.list_data() assert data[0].path == "hello0.txt" assert data[0].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def test_upload_dataset_with_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) assert dataset_client.get_catalog() segment1 = Segment("Segment1", client=dataset_client) assert len(segment1) == 10 assert segment1[0].path == "hello0.txt" assert segment1[0].label gas_client.delete_dataset(dataset_name)
def test_delete_frame(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(BOX2D_CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") segment_client.upload_sensor(Sensor.loads(LIDAR_DATA)) path = tmp_path / "sub" path.mkdir() for i in range(5): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) frame[LIDAR_DATA["name"]] = data segment_client.upload_frame(frame, timestamp=i) frame_1_id = segment_client.list_frames()[0].frame_id segment_client.delete_frame(frame_1_id) frame_ids = [frame.frame_id for frame in segment_client.list_frames()] assert frame_1_id not in frame_ids gas_client.delete_dataset(dataset_name)
def test_upload_segment_with_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment = Segment("segment1") path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client.upload_segment(segment) segment1 = Segment(name="segment1", client=dataset_client) assert len(segment1) == 10 assert segment1[0].path == "hello0.txt" assert segment1[0].path == segment[0].target_remote_path assert segment1[0].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def test_upload_fusion_segment_with_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment = FusionSegment("segment1") segment.sensors.add(Sensor.loads(LIDAR_DATA)) path = tmp_path / "sub" path.mkdir() for i in range(10): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) frame[LIDAR_DATA["name"]] = data segment.append(frame) dataset_client.upload_segment(segment) segment1 = FusionSegment(name="segment1", client=dataset_client) assert len(segment1) == 10 assert segment1[0][LIDAR_DATA["name"]].path == "hello0.txt" assert ( segment1[0][LIDAR_DATA["name"]].path == segment[0][LIDAR_DATA["name"]].target_remote_path ) assert segment1[0][LIDAR_DATA["name"]].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def init_dataset_client(accesskey, url, tmp_path_factory): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, is_fusion=True) dataset = FusionDataset(name=dataset_name) dataset._catalog = Catalog.loads(CATALOG) path = tmp_path_factory.mktemp("sub") os.makedirs(path, exist_ok=True) for segment_name in SEGMENTS_NAME: segment = dataset.create_segment(segment_name) frame = Frame() for camera_name, label in LABEL.items(): camera = Camera(camera_name) translation = Vector3D(1, 2, 3) camera.set_extrinsics(translation=translation) camera.set_extrinsics(translation=translation) camera.set_camera_matrix(fx=1.1, fy=1.1, cx=1.1, cy=1.1) camera.set_distortion_coefficients(p1=1.2, p2=1.2, k1=1.2, k2=1.2) segment.sensors.add(camera) local_path = path / f"{segment_name}_{camera_name}.txt" local_path.write_text(f"CONTENT_{segment_name}_{camera_name}") data = Data(local_path=str(local_path)) data.label = Label.loads(label) frame[camera_name] = data segment.append(frame) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("commit-1") yield dataset_client gas_client.delete_dataset(dataset_name)
def test_delete_data(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment_client.upload_data(data) segment_client.delete_data("hello0.txt") data_paths = segment_client.list_data_paths() assert "hello0.txt" not in data_paths segment_client.delete_data(segment_client.list_data_paths()) data = segment_client.list_data() assert len(data) == 0 gas_client.delete_dataset(dataset_name)
def test_upload_frame_with_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") segment_client.upload_sensor(Sensor.loads(LIDAR_DATA)) path = tmp_path / "sub" path.mkdir() for i in range(5): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) frame[LIDAR_DATA["name"]] = data segment_client.upload_frame(frame, timestamp=i) frames = segment_client.list_frames() assert len(frames) == 5 assert frames[0][LIDAR_DATA["name"]].path == "hello0.txt" assert frames[0][LIDAR_DATA["name"]].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def test_create_and_upload_dataset_with_config(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() try: gas_client.get_auth_storage_config(name=_LOCAL_CONFIG_NAME) except ResourceNotExistError: pytest.skip(f"skip this case because there's no {_LOCAL_CONFIG_NAME} config") gas_client.create_dataset(dataset_name, config_name=_LOCAL_CONFIG_NAME) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(5): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) assert dataset_client.get_catalog() segment1 = Segment("Segment1", client=dataset_client) assert len(segment1) == 5 for i in range(5): assert segment1[i].path == f"hello{i}.txt" assert segment1[i].label gas_client.delete_dataset(dataset_name)
def test_upload_dataset_after_commit(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) dataset._catalog = Catalog.loads(CATALOG) dataset.notes.is_continuous = True segment = dataset.create_segment("Segment1") path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("test") dataset_remote = Dataset(name=dataset_name, gas=gas_client) assert dataset_remote.notes.is_continuous == dataset.notes.is_continuous assert dataset_remote.catalog == dataset.catalog segment_remote = dataset_remote[0] assert len(segment_remote) == len(segment) for remote_data, data in zip(segment_remote, segment): assert remote_data.path == data.target_remote_path assert remote_data.label == data.label gas_client.delete_dataset(dataset_name)
def test_modify_catalog(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG2)) catalog_to_upload = Catalog.loads(CATALOG2) dataset_client.upload_catalog(catalog_to_upload) catalog_get = dataset_client.get_catalog() assert catalog_get == catalog_to_upload # Uploading empty catalog is not allowed catalog_to_upload = Catalog.loads({}) with pytest.raises(InvalidParamsError): dataset_client.upload_catalog(catalog_to_upload) gas_client.delete_dataset(dataset_name)
def test_cache_dataset(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(_CATALOG) path = tmp_path / "sub" semantic_path = tmp_path / "semantic_mask" instance_path = tmp_path / "instance_mask" path.mkdir() semantic_path.mkdir() instance_path.mkdir() for i in range(_SEGMENT_LENGTH): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(_LABEL) semantic_mask = semantic_path / f"semantic_mask{i}.png" semantic_mask.write_text("SEMANTIC_MASK") data.label.semantic_mask = SemanticMask(str(semantic_mask)) instance_mask = instance_path / f"instance_mask{i}.png" instance_mask.write_text("INSTANCE_MASK") data.label.instance_mask = InstanceMask(str(instance_mask)) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("commit-1") cache_path = tmp_path / "cache_test" dataset_client.enable_cache(str(cache_path)) segment1 = Segment("Segment1", client=dataset_client) for data in segment1: data.open() data.label.semantic_mask.open() data.label.instance_mask.open() segment_cache_path = (cache_path / dataset_client.dataset_id / dataset_client.status.commit_id / "Segment1") semantic_mask_cache_path = segment_cache_path / "semantic_mask" instance_mask_cache_path = segment_cache_path / "instance_mask" for cache_dir, extension in ( (segment_cache_path, "txt"), (semantic_mask_cache_path, "png"), (instance_mask_cache_path, "png"), ): assert set(cache_dir.glob(f"*.{extension}")) == set( cache_dir / f"hello{i}.{extension}" for i in range(_SEGMENT_LENGTH)) gas_client.delete_dataset(dataset_name)
def load_catalog(self, filepath: str) -> None: """Load catalog from a json file. Arguments: filepath: The path of the json file which contains the catalog information. """ with open(filepath, "r", encoding="utf-8") as fp: contents = json.load(fp) self._catalog = Catalog.loads(contents)
def test_get_catelog(self, mocker): params = self.dataset_client._status.get_status_info() response_data = {"catalog": {"CLASSIFICATION": {"categories": [{"name": "cat"}]}}} open_api_do = mocker.patch( f"{gas.__name__}.Client.open_api_do", return_value=mock_response(data=response_data), ) assert self.dataset_client.get_catalog() == Catalog.loads(response_data["catalog"]) open_api_do.assert_called_once_with( "GET", "labels/catalogs", self.dataset_client._dataset_id, params=params )
def init_dataset_client(accesskey, url, tmp_path_factory): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.commit("commit-1") dataset_client.create_branch("dev") dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path_factory.mktemp("sub") os.makedirs(path, exist_ok=True) for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT_{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL_2) segment.append(data) dataset_client = gas_client.upload_dataset(dataset, branch_name="dev") dataset_client.commit("commit-2") dataset_client.checkout(DEFAULT_BRANCH) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path_factory.mktemp("sub") os.makedirs(path, exist_ok=True) for i in range(4): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT_{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL_1) segment.append(data) dataset_client = gas_client.upload_dataset(dataset, branch_name=DEFAULT_BRANCH) dataset_client.commit("commit-3") yield dataset_client gas_client.delete_dataset(dataset_name)
def test_get_data(self, accesskey, url, tmp_path, mask_file): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() # Upload data with label for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) semantic_mask = SemanticMask(str(mask_file)) semantic_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.semantic_mask = semantic_mask instance_mask = InstanceMask(str(mask_file)) instance_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.instance_mask = instance_mask panoptic_mask = PanopticMask(str(mask_file)) panoptic_mask.all_category_ids = {100: 0, 101: 1} data.label.panoptic_mask = panoptic_mask segment_client.upload_data(data) for i in range(10): data = segment_client.get_data(f"hello{i}.txt") assert data.path == f"hello{i}.txt" assert data.label.box2d == Label.loads(LABEL).box2d stem = os.path.splitext(data.path)[0] remote_semantic_mask = data.label.semantic_mask semantic_mask = RemoteSemanticMask.from_response_body(SEMANTIC_MASK_LABEL) assert remote_semantic_mask.path == f"{stem}.png" assert remote_semantic_mask.all_attributes == semantic_mask.all_attributes remote_instance_mask = data.label.instance_mask instance_mask = RemoteInstanceMask.from_response_body(INSTANCE_MASK_LABEL) assert remote_instance_mask.path == f"{stem}.png" assert remote_instance_mask.all_attributes == instance_mask.all_attributes remote_panoptic_mask = data.label.panoptic_mask panoptic_mask = RemotePanopticMask.from_response_body(PANOPTIC_MASK_LABEL) assert remote_panoptic_mask.path == f"{stem}.png" assert remote_panoptic_mask.all_category_ids == panoptic_mask.all_category_ids gas_client.delete_dataset(dataset_name)
def test_first_upload_catalog(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) catalog_to_upload = Catalog.loads(CATALOG1) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(catalog_to_upload) catalog_get = dataset_client.get_catalog() assert catalog_get == catalog_to_upload gas_client.delete_dataset(dataset_name)
def test_upload_catalog(self, mocker): self.dataset_client._status.checkout(draft_number=1) open_api_do = mocker.patch( f"{gas.__name__}.Client.open_api_do", return_value=mock_response(), ) catalog = Catalog.loads({"CLASSIFICATION": {"categories": [{"name": "cat"}]}}) self.dataset_client.upload_catalog(catalog) put_data = {"catalog": catalog.dumps()} put_data.update(self.dataset_client._status.get_status_info()) open_api_do.assert_called_once_with( "PUT", "labels/catalogs", self.dataset_client._dataset_id, json=put_data )
def test_upload_label(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG_CONTENTS) path = tmp_path / "sub" path.mkdir() local_path = path / "hello.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("upload dataset with label") dataset = Dataset(dataset_name, gas_client) assert dataset[0][0].label == Label.loads(LABEL) dataset_client.create_draft("update label") segment_client = dataset_client.get_segment(segment.name) upload_data = [] new_label = Label.loads(LABEL) new_label.multi_polygon[0].category = "dog" for data in segment: data.label = new_label upload_data.append(data) segment_client.upload_label(upload_data) dataset_client.commit("update label") dataset = Dataset(dataset_name, gas_client) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert dataset[0][0].label == new_label gas_client.delete_dataset(dataset_name)