예제 #1
0
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#

# pylint: disable=wrong-import-position
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of tag.rst."""
"""Authorize a Dataset Client Instance"""
from tensorbay import GAS

# Please visit `https://gas.graviti.cn/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
dataset_client = gas.create_dataset("<DATASET_NAME>")
dataset_client.create_draft("draft-1")
# Add some data to the dataset.
dataset_client.commit("commit-1", tag="V1")
commit_id_1 = dataset_client.status.commit_id

dataset_client.create_draft("draft-2")
# Do some modifications to the dataset.
dataset_client.commit("commit-2", tag="V2")
commit_id_2 = dataset_client.status.commit_id
""""""
"""Create Branch"""
dataset_client.create_branch("T123")
""""""
"""Branch Name Will Be Stored"""
branch_name = dataset_client.status.branch_name
# branch_name = "T123"
예제 #2
0
#

# pylint: disable=wrong-import-position
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unsubscriptable-object
# flake8: noqa: F401
"""This file includes the python code of BSTLD.rst and read_dataset_class.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

ACCESS_KEY = "Accesskey-*****"
gas = GAS(ACCESS_KEY)
""""""
"""Create Dataset"""
gas.create_dataset("BSTLD")
""""""
"""Organize Dataset / regular import"""
from tensorbay.dataset import Dataset
""""""
"""Organize dataset / import dataloader"""
from tensorbay.opendataset import BSTLD

dataset = BSTLD("path/to/dataset/directory")
""""""
"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8, skip_uploaded_files=True)
dataset_client.commit("initial commit")
""""""
"""Read Dataset / get dataset"""
dataset = Dataset("BSTLD", gas)
# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unsubscriptable-object
# flake8: noqa: F401
"""This file includes the python code of LeedsSportsPose.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

# Please visit `https://gas.graviti.cn/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
""""""
"""Create Dataset"""
gas.create_dataset("LeedsSportsPose")
""""""
"""Organize Dataset / regular import"""
from tensorbay.dataset import Dataset
""""""
"""Organize dataset / import dataloader"""
from tensorbay.opendataset import LeedsSportsPose

dataset = LeedsSportsPose("<path/to/dataset>")
""""""
"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8)
dataset_client.commit("initial commit")
""""""
"""Read Dataset / get dataset"""
dataset = Dataset("LeedsSportsPose", gas)
예제 #4
0
# pylint: disable=wrong-import-order
# pylint: disable=not-callable
# pylint: disable=ungrouped-imports
# pylint: disable=import-error
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of CADC.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS
from tensorbay.dataset import FusionDataset

ACCESS_KEY = "Accesskey-*****"
gas = GAS(ACCESS_KEY)
""""""
"""Create Fusion Dataset"""
gas.create_dataset("CADC", is_fusion=True)
""""""
"""List Dataset Names"""
gas.list_dataset_names()
""""""

from tensorbay.opendataset import CADC

fusion_dataset = CADC("path/to/dataset/directory")
"""Upload Fusion Dataset"""
# fusion_dataset is the one you initialized in "Organize Fusion Dataset" section
fusion_dataset_client = gas.upload_dataset(fusion_dataset, jobs=8)
fusion_dataset_client.commit("initial commit")
""""""
"""Read Fusion Dataset / get fusion dataset"""
fusion_dataset = FusionDataset("CADC", gas)
#

# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=not-callable
# pylint: disable=ungrouped-imports
# pylint: disable=import-error
# pylint: disable=pointless-string-statement
"""This file includes the python code of getting_started_with_tensorbay.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

gas = GAS("<YOUR_ACCESSKEY>")
""""""
"""Create a Dataset"""
gas.create_dataset("DatasetName")
""""""
"""List Dataset Names"""
dataset_names = gas.list_dataset_names()
""""""
"""Upload Images to the Dataset"""
from tensorbay.dataset import Data, Dataset

# Organize the local dataset by the "Dataset" class before uploading.
dataset = Dataset("DatasetName")

# TensorBay uses "segment" to separate different parts in a dataset.
segment = dataset.create_segment()

segment.append(Data("0000001.jpg"))
segment.append(Data("0000002.jpg"))
예제 #6
0
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unsubscriptable-object
# flake8: noqa: F401

"""This file includes the python code of THCHS.rst and read_dataset_class.rst."""

"""Authorize a Client Instance"""
from tensorbay import GAS

# Please visit `https://gas.graviti.cn/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
""""""

"""Create Dataset"""
gas.create_dataset("THCHS-30")
""""""

"""Organize Dataset / regular import"""
from tensorbay.dataset import Dataset

""""""

"""Organize dataset / import dataloader"""
from tensorbay.opendataset import THCHS30

dataset = THCHS30("<path/to/dataset>")
""""""

"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8)
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#

# pylint: disable=wrong-import-position
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of merged_dataset.rst."""
"""Create Target Dataset"""
from tensorbay import GAS

gas = GAS("<YOUR_ACCESSKEY>")
dataset_client = gas.create_dataset("mergedDataset")
dataset_client.create_draft("merge dataset")
""""""
"""Copy Segment From Pet"""
pet_dataset_client = gas.get_dataset("OxfordIIITPet")
dataset_client.copy_segment("train",
                            target_name="trainval",
                            source_client=pet_dataset_client)
dataset_client.copy_segment("test", source_client=pet_dataset_client)
""""""
"""Upload Catalog"""
dataset_client.upload_catalog(pet_dataset_client.get_catalog())
""""""
"""Unify Category"""
from tensorbay.dataset import Data

segment_client = dataset_client.get_segment("train")
for remote_data in segment_client.list_data():
예제 #8
0
# pylint: disable=not-callable
# pylint: disable=ungrouped-imports
# pylint: disable=import-error
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unused-import
# flake8: noqa: F401
"""This file includes the python code of NeolixOD.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

ACCESS_KEY = "Accesskey-*****"
gas = GAS(ACCESS_KEY)
""""""
"""Create Dataset"""
gas.create_dataset("NeolixOD")
""""""
"""Organize Dataset / regular import"""
from tensorbay.dataset import Data, Dataset
from tensorbay.label import LabeledBox3D
""""""
"""Organize dataset / import dataloader"""
from tensorbay.opendataset import NeolixOD

dataset = NeolixOD("path/to/dataset/directory")
""""""
"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8)
dataset_client.commit("initial commit")
""""""
"""Read Dataset / get dataset"""
예제 #9
0
#

# pylint: disable=wrong-import-position
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unsubscriptable-object
# flake8: noqa: F401
"""This files includes the python code example in dogsvscats.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

# Please visit `https://gas.graviti.com/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
""""""
"""Create Dataset"""
gas.create_dataset("DogsVsCats")
""""""
"""Organize Dataset / regular import"""
from tensorbay.dataset import Dataset
""""""
"""Organize dataset / import dataloader"""
from tensorbay.opendataset import DogsVsCats

dataset = DogsVsCats("<path/to/dataset>")
""""""
"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8)
dataset_client.commit("initial commit")
""""""
"""Read Dataset / get dataset"""
dataset = Dataset("DogsVsCats", gas)
예제 #10
0
from tensorbay.dataset import Dataset
from tensorbay.label import Classification

# Use AuthData to organize a dataset by the "Dataset" class before importing.
dataset = Dataset("<DATASET_NAME>")

# TensorBay uses "segment" to separate different parts in a dataset.
segment = dataset.create_segment()

images = cloud_client.list_auth_data("<data/images/>")
labels = cloud_client.list_auth_data("<data/labels/>")

for auth_data, label in zip(images, labels):
    with label.open() as fp:
        auth_data.label.classification = Classification.loads(json.load(fp))
    segment.append(auth_data)

dataset_client = gas.upload_dataset(dataset, jobs=8)
""""""
"""Create local storage config"""
gas.create_local_storage_config(
    name="<LOCAL_STORAGE_CONFIG>",
    file_path="<path/to/dataset>",
    endpoint="<external IP address of the local storage service>",
)
""""""
"""Create authorized local storage dataset"""
dataset_client = gas.create_dataset("<DATASET_NAME>",
                                    config_name="<LOCAL_STORAGE_CONFIG>")
""""""
예제 #11
0
#

# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=not-callable
# pylint: disable=ungrouped-imports
# pylint: disable=import-error
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
"""This file includes the python code of diff.rst."""
"""Authorize a Dataset Client Instance"""
from tensorbay import GAS

ACCESS_KEY = "Accesskey-*****"
gas = GAS(ACCESS_KEY)
dataset_client = gas.create_dataset("DatasetName")
dataset_client.create_draft("draft-1")
# Add some data to the dataset.
dataset_client.commit("commit-1", tag="V1")
commit_id_1 = dataset_client.status.commit_id

dataset_client.create_draft("draft-2")
# Do some modifications to the dataset.
dataset_client.commit("commit-2", tag="V2")
commit_id_2 = dataset_client.status.commit_id

dataset_client.create_draft("draft-3")
draft_number_3 = dataset_client.status.draft_number
head = ""
""""""
"""Get Diff"""
예제 #12
0
# pylint: disable=invalid-name
# pylint: disable=unsubscriptable-object
# flake8: noqa: F401
# type: ignore[attr-defined]

"""This file includes the python code of VOC2012Segmentation.rst."""

"""Authorize a Client Instance"""
from tensorbay import GAS

# Please visit `https://gas.graviti.com/tensorbay/developer` to get the AccessKey.
gas = GAS("<YOUR_ACCESSKEY>")
""""""

"""Create Dataset"""
gas.create_dataset("VOC2012Segmentation")
""""""

"""Organize Dataset / regular import"""
from tensorbay.dataset import Dataset

""""""

"""Organize dataset / import dataloader"""
from tensorbay.opendataset import VOC2012Segmentation

dataset = VOC2012Segmentation("<path/to/dataset>")
""""""

"""Upload Dataset"""
dataset_client = gas.upload_dataset(dataset, jobs=8)
예제 #13
0
# pylint: disable=pointless-string-statement
# pylint: disable=invalid-name
# pylint: disable=unused-import
# flake8: noqa: F401

"""This file includes the python code of NewsGroups.rst and read_dataset_class.rst."""

"""Authorize a Client Instance"""
from tensorbay import GAS

ACCESS_KEY = "Accesskey-*****"
gas = GAS(ACCESS_KEY)
""""""

"""Create Dataset"""
gas.create_dataset("Newsgroups20")
""""""

"""Organize Dataset / regular import"""
from tensorbay.dataset import Data, Dataset
from tensorbay.label import LabeledBox2D

""""""

"""Organize dataset / import dataloader"""
from tensorbay.opendataset import Newsgroups20

dataset = Newsgroups20("path/to/dataset/directory")
""""""

"""Upload Dataset"""
# Copyright 2021 Graviti. Licensed under MIT License.
#

# pylint: disable=wrong-import-position
# pylint: disable=wrong-import-order
# pylint: disable=pointless-string-statement
# pylint: disable=not-an-iterable
# pylint: disable=invalid-name
"""This file includes the python code of getting_started_with_tensorbay.rst."""
"""Authorize a Client Instance"""
from tensorbay import GAS

gas = GAS("<YOUR_ACCESSKEY>")
""""""
"""Create a Dataset"""
gas.create_dataset("<DATASET_NAME>")
""""""
"""List Dataset Names"""
dataset_names = gas.list_dataset_names()
""""""
"""Upload Images to the Dataset"""
from tensorbay.dataset import Data, Dataset

# Organize the local dataset by the "Dataset" class before uploading.
dataset = Dataset("<DATASET_NAME>")

# TensorBay uses "segment" to separate different parts in a dataset.
segment = dataset.create_segment()

segment.append(Data("0000001.jpg"))
segment.append(Data("0000002.jpg"))
예제 #15
0
    def test_upload_dataset_with_mask(self, accesskey, url, tmp_path,
                                      mask_file):
        gas_client = GAS(access_key=accesskey, url=url)
        dataset_name = get_dataset_name()
        gas_client.create_dataset(dataset_name)

        dataset = Dataset(name=dataset_name)
        segment = dataset.create_segment("Segment1")
        # When uploading label, upload catalog first.
        dataset._catalog = Catalog.loads(CATALOG_CONTENTS)

        path = tmp_path / "sub"
        path.mkdir()
        local_path = path / "hello.txt"
        local_path.write_text("CONTENT")
        data = Data(local_path=str(local_path))
        remote_semantic_mask = SemanticMask(str(mask_file))
        remote_semantic_mask.all_attributes = {
            0: {
                "occluded": True
            },
            1: {
                "occluded": False
            }
        }
        data.label.semantic_mask = remote_semantic_mask

        instance_mask = InstanceMask(str(mask_file))
        instance_mask.all_attributes = {
            0: {
                "occluded": True
            },
            1: {
                "occluded": False
            }
        }
        data.label.instance_mask = instance_mask

        panoptic_mask = PanopticMask(str(mask_file))
        panoptic_mask.all_category_ids = {100: 0, 101: 1}
        data.label.panoptic_mask = panoptic_mask
        segment.append(data)

        dataset_client = gas_client.upload_dataset(dataset)
        dataset_client.commit("upload dataset with label")
        dataset = Dataset(dataset_name, gas_client)
        remote_semantic_mask = dataset[0][0].label.semantic_mask
        semantic_mask = RemoteSemanticMask.from_response_body(
            SEMANTIC_MASK_LABEL)
        assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS)
        assert remote_semantic_mask.path == semantic_mask.path
        assert remote_semantic_mask.all_attributes == semantic_mask.all_attributes

        remote_instance_mask = dataset[0][0].label.instance_mask
        instance_mask = RemoteInstanceMask.from_response_body(
            INSTANCE_MASK_LABEL)
        assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS)
        assert remote_instance_mask.path == instance_mask.path
        assert remote_instance_mask.all_attributes == instance_mask.all_attributes

        remote_panoptic_mask = dataset[0][0].label.panoptic_mask
        panoptic_mask = RemotePanopticMask.from_response_body(
            PANOPTIC_MASK_LABEL)
        assert dataset.catalog == Catalog.loads(CATALOG_CONTENTS)
        assert remote_panoptic_mask.path == panoptic_mask.path
        assert remote_panoptic_mask.all_category_ids == panoptic_mask.all_category_ids

        gas_client.delete_dataset(dataset_name)