def test_move_data(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.get_segment("Segment1") segment_client.move_data("hello0.txt", "goodbye0.txt") segment_client.move_data("hello9.txt", "goodbye1.txt") # with pytest.raises(InvalidParamsError): # segment_client.move_data("hello1.txt", "goodbye2.txt", strategy="push") segment2 = Segment("Segment1", client=dataset_client) assert segment2[0].path == "goodbye0.txt" assert segment2[1].path == "goodbye1.txt" assert segment2[9].path == "hello8.txt" assert segment2[0].label gas_client.delete_dataset(dataset_name)
def test_copy_segment_override(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment1 = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment1.append(data) segment2 = dataset.create_segment("Segment2") for i in range(10, 20): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment2.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.copy_segment("Segment1", "Segment2", strategy="override") segment_copied = Segment("Segment2", client=dataset_client) assert segment_copied[0].path == "hello0.txt" assert segment_copied[0].path == segment1[0].target_remote_path assert segment_copied[0].label gas_client.delete_dataset(dataset_name)
def test_copy_segment_abort(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment1 = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment1.append(data) segment2 = dataset.create_segment("Segment2") for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment2.append(data) dataset_client = gas_client.upload_dataset(dataset) with pytest.raises(InternalServerError): dataset_client.copy_segment("Segment1", "Segment2") gas_client.delete_dataset(dataset_name)
def test_copy_fusion_segment(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, is_fusion=True) dataset = FusionDataset(name=dataset_name) segment = dataset.create_segment("Segment1") segment.sensors.add(Sensor.loads(LIDAR_DATA)) dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) frame[LIDAR_DATA["name"]] = data segment.append(frame) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.copy_segment("Segment1", "Segment2") assert segment_client.name == "Segment2" with pytest.raises(InvalidParamsError): dataset_client.copy_segment("Segment1", "Segment3", strategy="push") segment2 = FusionSegment("Segment2", client=dataset_client) assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt" assert (segment2[0][LIDAR_DATA["name"]].path == segment[0][ LIDAR_DATA["name"]].target_remote_path) assert segment2[0][LIDAR_DATA["name"]].label gas_client.delete_dataset(dataset_name)
def test_move_data_skip(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") dataset._catalog = Catalog.loads(CATALOG) path = tmp_path / "sub" path.mkdir() for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT_{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) segment_client = dataset_client.get_segment("Segment1") segment_client.move_data("hello0.txt", "hello1.txt", strategy="skip") segment_moved = Segment("Segment1", client=dataset_client) assert segment_moved[0].path == "hello1.txt" assert segment_moved[0].open().read() == b"CONTENT_1" gas_client.delete_dataset(dataset_name)
def test_get_branch(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.commit("commit-1") commit_1_id = dataset_client.status.commit_id dataset_client.create_draft("draft-2") dataset_client.commit("commit-2", "test") commit_2_id = dataset_client.status.commit_id branch = dataset_client.get_branch(DEFAULT_BRANCH) assert branch.name == DEFAULT_BRANCH assert branch.commit_id == commit_2_id assert branch.parent_commit_id == commit_1_id assert branch.title == "commit-2" assert branch.description == "test" assert branch.committer.name assert branch.committer.date with pytest.raises(ResourceNotExistError): dataset_client.get_branch("main1") dataset_client.create_branch("T123") branch = dataset_client.get_branch("T123") assert branch.name == "T123" assert branch.commit_id == commit_2_id assert branch.parent_commit_id == commit_1_id assert branch.title == "commit-2" assert branch.description == "test" assert branch.committer.name assert branch.committer.date gas_client.delete_dataset(dataset_name)
def test_upload_frame_with_order(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") segment_client = dataset_client.get_or_create_segment("segment1") segment_client.upload_sensor(Sensor.loads(LIDAR_DATA)) path = tmp_path / "sub" path.mkdir() # If noe setting frame id in frame, set timestamp(order) when uploading for i in reversed(range(5)): frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) frame[LIDAR_DATA["name"]] = data segment_client.upload_frame(frame, timestamp=i) # Set frame id in frame for i in range(5, 10): frame = Frame(frame_id=ulid.from_timestamp(i)) local_path = path / f"goodbye{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) frame[LIDAR_DATA["name"]] = data segment_client.upload_frame(frame) # Both setting frame id in frame and set timestamp(order) when uploading are not allowed i = 10 frame = Frame(frame_id=ulid.from_timestamp(i)) local_path = path / f"goodbye{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) frame[LIDAR_DATA["name"]] = data with pytest.raises(FrameError): segment_client.upload_frame(frame, timestamp=i) # Neither setting frame id in frame nor set timestamp(order) when uploading is not allowed frame = Frame() local_path = path / f"goodbye{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) frame[LIDAR_DATA["name"]] = data with pytest.raises(FrameError): segment_client.upload_frame(frame) frames = segment_client.list_frames() assert len(frames) == 10 assert frames[0][LIDAR_DATA["name"]].path == "hello0.txt" assert frames[5][LIDAR_DATA["name"]].path == "goodbye5.txt" assert not frames[0][LIDAR_DATA["name"]].label # todo: match the input and output label gas_client.delete_dataset(dataset_name)
def test_create_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) assert dataset_client.dataset_id is not None gas_client.get_dataset(dataset_name) gas_client.delete_dataset(dataset_name)
def test_list_dataset_names(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) datasets = gas_client.list_dataset_names() assert dataset_name in datasets gas_client.delete_dataset(dataset_name)
def test_cache_dataset(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(_CATALOG) path = tmp_path / "sub" semantic_path = tmp_path / "semantic_mask" instance_path = tmp_path / "instance_mask" path.mkdir() semantic_path.mkdir() instance_path.mkdir() for i in range(_SEGMENT_LENGTH): local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(_LABEL) semantic_mask = semantic_path / f"semantic_mask{i}.png" semantic_mask.write_text("SEMANTIC_MASK") data.label.semantic_mask = SemanticMask(str(semantic_mask)) instance_mask = instance_path / f"instance_mask{i}.png" instance_mask.write_text("INSTANCE_MASK") data.label.instance_mask = InstanceMask(str(instance_mask)) segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("commit-1") cache_path = tmp_path / "cache_test" dataset_client.enable_cache(str(cache_path)) segment1 = Segment("Segment1", client=dataset_client) for data in segment1: data.open() data.label.semantic_mask.open() data.label.instance_mask.open() segment_cache_path = (cache_path / dataset_client.dataset_id / dataset_client.status.commit_id / "Segment1") semantic_mask_cache_path = segment_cache_path / "semantic_mask" instance_mask_cache_path = segment_cache_path / "instance_mask" for cache_dir, extension in ( (segment_cache_path, "txt"), (semantic_mask_cache_path, "png"), (instance_mask_cache_path, "png"), ): assert set(cache_dir.glob(f"*.{extension}")) == set( cache_dir / f"hello{i}.{extension}" for i in range(_SEGMENT_LENGTH)) gas_client.delete_dataset(dataset_name)
def test_create_public_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_public=True) assert dataset_client.dataset_id is not None dataset_client_get = gas_client.get_dataset(dataset_name) assert dataset_client_get.is_public == True gas_client.delete_dataset(dataset_name)
def test_update_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) new_dataset_alias = f"{get_dataset_name()}alias" gas_client.update_dataset(name=dataset_name, alias=new_dataset_alias, is_public=True) dataset_client_get = gas_client.get_dataset(dataset_name) assert dataset_client_get.alias == new_dataset_alias assert dataset_client_get.is_public is True gas_client.delete_dataset(dataset_name)
def test_create_dataset_with_alias(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() alias = f"{dataset_name}_alias" dataset_client = gas_client.create_dataset(dataset_name, alias=alias) assert dataset_client.dataset_id is not None dataset_client_get = gas_client.get_dataset(dataset_name) assert dataset_client_get.alias == alias gas_client.delete_dataset(dataset_name)
def test_first_upload_catalog(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) catalog_to_upload = Catalog.loads(CATALOG1) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(catalog_to_upload) catalog_get = dataset_client.get_catalog() assert catalog_get == catalog_to_upload gas_client.delete_dataset(dataset_name)
def test_get_new_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client_get = gas_client.get_dataset(dataset_name) assert dataset_client_get.status.commit_id == ROOT_COMMIT_ID assert dataset_client_get.status.branch_name == DEFAULT_BRANCH assert dataset_client_get.dataset_id == dataset_client.dataset_id assert dataset_client_get.is_public == False gas_client.delete_dataset(dataset_name)
def test_create_dataset_with_config(self, accesskey, url, config_name): gas_client = GAS(access_key=accesskey, url=url) try: gas_client.get_cloud_client(config_name) except ResourceNotExistError: pytest.skip(f"skip this case because there's no {config_name} config") dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, config_name=config_name) gas_client.get_dataset(dataset_name) gas_client.delete_dataset(dataset_name)
def test_get_data(self, accesskey, url, tmp_path, mask_file): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() # Upload data with label for i in range(10): local_path = path / f"hello{i}.txt" local_path.write_text(f"CONTENT{i}") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) semantic_mask = SemanticMask(str(mask_file)) semantic_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.semantic_mask = semantic_mask instance_mask = InstanceMask(str(mask_file)) instance_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.instance_mask = instance_mask panoptic_mask = PanopticMask(str(mask_file)) panoptic_mask.all_category_ids = {100: 0, 101: 1} data.label.panoptic_mask = panoptic_mask segment_client.upload_data(data) for i in range(10): data = segment_client.get_data(f"hello{i}.txt") assert data.path == f"hello{i}.txt" assert data.label.box2d == Label.loads(LABEL).box2d stem = os.path.splitext(data.path)[0] remote_semantic_mask = data.label.semantic_mask semantic_mask = RemoteSemanticMask.from_response_body(SEMANTIC_MASK_LABEL) assert remote_semantic_mask.path == f"{stem}.png" assert remote_semantic_mask.all_attributes == semantic_mask.all_attributes remote_instance_mask = data.label.instance_mask instance_mask = RemoteInstanceMask.from_response_body(INSTANCE_MASK_LABEL) assert remote_instance_mask.path == f"{stem}.png" assert remote_instance_mask.all_attributes == instance_mask.all_attributes remote_panoptic_mask = data.label.panoptic_mask panoptic_mask = RemotePanopticMask.from_response_body(PANOPTIC_MASK_LABEL) assert remote_panoptic_mask.path == f"{stem}.png" assert remote_panoptic_mask.all_category_ids == panoptic_mask.all_category_ids gas_client.delete_dataset(dataset_name)
def test_delete_segment(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.get_or_create_segment("segment1") dataset_client.delete_segment("segment1") with pytest.raises(ResourceNotExistError): dataset_client.get_segment("segment1") gas_client.delete_dataset(dataset_name)
def test_upload_segment_without_file(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("test") segment = Segment("segment1") dataset_client.upload_segment(segment) segment1 = Segment(name="segment1", client=dataset_client) assert len(segment1) == 0 gas_client.delete_dataset(dataset_name)
def test_catalog(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) with pytest.raises(StatusError): dataset_client.upload_catalog(Catalog.loads(CATALOG)) dataset_client.create_draft("draft-1") dataset_client.upload_catalog(Catalog.loads(CATALOG)) catalog = dataset_client.get_catalog() # todo: match the input and output catalog gas_client.delete_dataset(dataset_name)
def test_delete_tag(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.commit("commit-1", tag="V1") dataset_client.delete_tag("V1") with pytest.raises(ResourceNotExistError): dataset_client.get_tag("V1") gas_client.delete_dataset(dataset_name)
def test_create_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) assert dataset_client.status.commit_id == ROOT_COMMIT_ID assert dataset_client.status.draft_number is None assert not dataset_client.status.is_draft assert dataset_client.status.branch_name == DEFAULT_BRANCH assert dataset_client.name == dataset_name assert dataset_client.dataset_id is not None gas_client.get_dataset(dataset_name) gas_client.delete_dataset(dataset_name)
def test_rename_dataset(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("v_test") dataset_client.commit(message="Test", tag="V1") new_dataset_name = f"{get_dataset_name()}new" gas_client.rename_dataset(name=dataset_name, new_name=new_dataset_name) with pytest.raises(ResourceNotExistError): gas_client.get_dataset(dataset_name) gas_client.get_dataset(new_dataset_name) gas_client.delete_dataset(new_dataset_name)
def test_upload_dataset_with_mask(self, accesskey, url, tmp_path, mask_file): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name) dataset = Dataset(name=dataset_name) segment = dataset.create_segment("Segment1") # When uploading label, upload catalog first. dataset._catalog = Catalog.loads(CATALOG_CONTENTS) path = tmp_path / "sub" path.mkdir() local_path = path / "hello.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) remote_semantic_mask = SemanticMask(str(mask_file)) remote_semantic_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.semantic_mask = remote_semantic_mask instance_mask = InstanceMask(str(mask_file)) instance_mask.all_attributes = {0: {"occluded": True}, 1: {"occluded": False}} data.label.instance_mask = instance_mask panoptic_mask = PanopticMask(str(mask_file)) panoptic_mask.all_category_ids = {100: 0, 101: 1} data.label.panoptic_mask = panoptic_mask segment.append(data) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("upload dataset with label") dataset = Dataset(dataset_name, gas_client) remote_semantic_mask = dataset[0][0].label.semantic_mask semantic_mask = RemoteSemanticMask.from_response_body(SEMANTIC_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_semantic_mask.path == semantic_mask.path assert remote_semantic_mask.all_attributes == semantic_mask.all_attributes remote_instance_mask = dataset[0][0].label.instance_mask instance_mask = RemoteInstanceMask.from_response_body(INSTANCE_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_instance_mask.path == instance_mask.path assert remote_instance_mask.all_attributes == instance_mask.all_attributes remote_panoptic_mask = dataset[0][0].label.panoptic_mask panoptic_mask = RemotePanopticMask.from_response_body(PANOPTIC_MASK_LABEL) assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS) assert remote_panoptic_mask.path == panoptic_mask.path assert remote_panoptic_mask.all_category_ids == panoptic_mask.all_category_ids gas_client.delete_dataset(dataset_name)
def test_get_or_create_segment(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) with pytest.raises(StatusError): dataset_client.get_or_create_segment("segment") dataset_client.create_draft("draft-1") segment_client = dataset_client.get_or_create_segment("segment") assert segment_client.status.is_draft assert segment_client.name == "segment" dataset_client.get_segment("segment") gas_client.delete_dataset(dataset_name)
def test_list_segment_names(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.get_or_create_segment("segment1") dataset_client.get_or_create_segment("segment2") segments = dataset_client.list_segment_names() assert "segment1" in segments assert "segment2" in segments assert "segment3" not in segments gas_client.delete_dataset(dataset_name)
def test_upload_label_without_catalog(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") segment_client = dataset_client.get_or_create_segment("segment1") path = tmp_path / "sub" path.mkdir() local_path = path / "hello0.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) segment_client.upload_file(data.path, data.target_remote_path) gas_client.delete_dataset(dataset_name)
def test_upload_fusion_segment_without_file(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") segment = FusionSegment("segment1") dataset_client.upload_segment(segment) segment1 = FusionSegment(name="segment1", client=dataset_client) assert len(segment1) == 0 assert not segment1.sensors gas_client.delete_dataset(dataset_name)
def test_list_branches(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name) dataset_client.create_draft("draft-1") dataset_client.commit("commit-1") commit_1_id = dataset_client.status.commit_id branches = dataset_client.list_branches() assert len(branches) == 1 assert branches[0].name == DEFAULT_BRANCH assert branches[0].commit_id == commit_1_id gas_client.delete_dataset(dataset_name)
def test_create_dataset_with_region(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) regions = ("beijing", "hangzhou", "shanghai") for region in regions: dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, region=region) gas_client.get_dataset(dataset_name) gas_client.delete_dataset(dataset_name) region = "guangzhou" dataset_name = get_dataset_name() with pytest.raises(ResponseError): gas_client.create_dataset(dataset_name, region=region)