def test_create_dataset_with_config(self, accesskey, url, config_name):
        gas_client = GAS(access_key=accesskey, url=url)
        try:
            gas_client.get_cloud_client(config_name)
        except ResourceNotExistError:
            pytest.skip(f"skip this case because there's no {config_name} config")

        dataset_name = get_dataset_name()
        gas_client.create_dataset(dataset_name, config_name=config_name)
        gas_client.get_dataset(dataset_name)

        gas_client.delete_dataset(dataset_name)
Beispiel #2
0
    def test_import_cloud_files(self, accesskey, url, config_name):

        gas_client = GAS(access_key=accesskey, url=url)
        try:
            cloud_client = gas_client.get_cloud_client(config_name)
        except ResourceNotExistError:
            pytest.skip(
                f"skip this case because there's no {config_name} config")

        auth_data = cloud_client.list_auth_data("tests")
        dataset_name = get_dataset_name()
        dataset_client = gas_client.create_dataset(dataset_name,
                                                   config_name=config_name)

        dataset = Dataset(name=dataset_name)
        segment = dataset.create_segment("Segment1")
        for data in auth_data:
            segment.append(data)

        dataset_client = gas_client.upload_dataset(dataset, jobs=5)
        dataset_client.commit("import data")

        segment1 = Segment("Segment1", client=dataset_client)
        assert len(segment1) == len(segment)
        assert segment1[0].path == segment[0].path.split("/")[-1]
        assert not segment1[0].label

        assert len(auth_data) == len(segment)

        gas_client.delete_dataset(dataset_name)
    def test_import_cloud_files_to_fusiondataset(self, accesskey, url, config_name):
        gas_client = GAS(access_key=accesskey, url=url)
        try:
            cloud_client = gas_client.get_cloud_client(config_name)
        except ResourceNotExistError:
            pytest.skip(f"skip this case because there's no {config_name} config")

        auth_data = cloud_client.list_auth_data("tests")[:5]
        dataset_name = get_dataset_name()
        dataset_client = gas_client.create_dataset(dataset_name, True, config_name=config_name)

        dataset = FusionDataset(name=dataset_name)
        segment = dataset.create_segment("Segment1")
        lidar = Lidar("LIDAR")
        segment.sensors.add(lidar)

        for data in auth_data:
            data.label.classification = Classification("cat", attributes={"color": "red"})
            frame = Frame()
            frame["LIDAR"] = data
            segment.append(frame)

        dataset_client = gas_client.upload_dataset(dataset, jobs=5)
        dataset_client.commit("import data")

        segment1 = FusionSegment("Segment1", client=dataset_client)
        assert len(segment1) == len(segment)
        assert segment1[0]["LIDAR"].path == segment[0]["LIDAR"].path.split("/")[-1]
        assert segment1[0]["LIDAR"].label.classification.category == "cat"
        assert segment1[0]["LIDAR"].label.classification.attributes["color"] == "red"
        assert len(auth_data) == len(segment)

        gas_client.delete_dataset(dataset_name)
Beispiel #4
0
    def test_upload_frame_with_auth_data(self, accesskey, url, config_name):
        gas_client = GAS(access_key=accesskey, url=url)
        try:
            cloud_client = gas_client.get_cloud_client(config_name)
        except ResourceNotExistError:
            pytest.skip(f"skip this case because there's no {config_name} config")

        auth_data = cloud_client.list_auth_data("tests")[:5]
        dataset_name = get_dataset_name()
        dataset_client = gas_client.create_dataset(dataset_name, True, config_name=config_name)
        dataset_client.create_draft("draft-1")
        segment_client = dataset_client.get_or_create_segment("segment1")
        segment_client.upload_sensor(Sensor.loads(LIDAR_DATA))
        for index, data in enumerate(auth_data):
            frame = Frame()
            frame[LIDAR_DATA["name"]] = data
            segment_client.upload_frame(frame, timestamp=index)

        frames = segment_client.list_frames()
        assert len(frames) == len(auth_data)
        assert frames[0][LIDAR_DATA["name"]].path == auth_data[0].path.split("/")[-1]

        gas_client.delete_dataset(dataset_name)
Beispiel #5
0
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#

# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of auth cloud storage import."""
"""Get cloud client"""
from tensorbay import GAS

# Please visit `https://gas.graviti.com/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
cloud_client = gas.get_cloud_client("<CONFIG_NAME>")
""""""
"""Create storage config"""
gas.create_oss_storage_config(
    "<OSS_CONFIG_NAME>",
    "<path/to/dataset>",
    endpoint="<YOUR_ENDPOINT>",  # like oss-cn-qingdao.aliyuncs.com
    accesskey_id="<YOUR_ACCESSKEYID>",
    accesskey_secret="<YOUR_ACCESSKEYSECRET>",
    bucket_name="<YOUR_BUCKETNAME>",
)
""""""
"""Import dataset from cloud platform to the authorized storage dataset"""
import json

from tensorbay.dataset import Dataset
Beispiel #6
0
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#

# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of auth cloud storage import."""
"""Get cloud client"""
from tensorbay import GAS

gas = GAS("Accesskey-*****")
cloud_client = gas.get_cloud_client("config_name")
""""""
"""Create storage config"""
gas.create_oss_storage_config(
    "oss_config",
    "tests",
    endpoint="<YOUR_ENDPOINT>",  # like oss-cn-qingdao.aliyuncs.com
    accesskey_id="<YOUR_ACCESSKEYID>",
    accesskey_secret="<YOUR_ACCESSKEYSECRET>",
    bucket_name="<YOUR_BUCKETNAME>",
)
""""""
"""Import dataset from cloud platform to the authorized storage dataset"""
import json

from tensorbay.dataset import Dataset
from tensorbay.label import Classification