Beispiel #1
0
def init_dataset_client(accesskey, url, tmp_path_factory):
    gas_client = GAS(access_key=accesskey, url=url)
    dataset_name = get_dataset_name()
    gas_client.create_dataset(dataset_name, is_fusion=True)

    dataset = FusionDataset(name=dataset_name)
    dataset._catalog = Catalog.loads(CATALOG)
    path = tmp_path_factory.mktemp("sub")
    os.makedirs(path, exist_ok=True)
    for segment_name in SEGMENTS_NAME:
        segment = dataset.create_segment(segment_name)
        frame = Frame()
        for camera_name, label in LABEL.items():
            camera = Camera(camera_name)
            translation = Vector3D(1, 2, 3)
            camera.set_extrinsics(translation=translation)
            camera.set_extrinsics(translation=translation)
            camera.set_camera_matrix(fx=1.1, fy=1.1, cx=1.1, cy=1.1)
            camera.set_distortion_coefficients(p1=1.2, p2=1.2, k1=1.2, k2=1.2)
            segment.sensors.add(camera)
            local_path = path / f"{segment_name}_{camera_name}.txt"
            local_path.write_text(f"CONTENT_{segment_name}_{camera_name}")
            data = Data(local_path=str(local_path))
            data.label = Label.loads(label)
            frame[camera_name] = data
        segment.append(frame)
    dataset_client = gas_client.upload_dataset(dataset)
    dataset_client.commit("commit-1")

    yield dataset_client

    gas_client.delete_dataset(dataset_name)
    def test_import_cloud_files_to_fusiondataset(self, accesskey, url, config_name):
        gas_client = GAS(access_key=accesskey, url=url)
        try:
            cloud_client = gas_client.get_cloud_client(config_name)
        except ResourceNotExistError:
            pytest.skip(f"skip this case because there's no {config_name} config")

        auth_data = cloud_client.list_auth_data("tests")[:5]
        dataset_name = get_dataset_name()
        dataset_client = gas_client.create_dataset(dataset_name, True, config_name=config_name)

        dataset = FusionDataset(name=dataset_name)
        segment = dataset.create_segment("Segment1")
        lidar = Lidar("LIDAR")
        segment.sensors.add(lidar)

        for data in auth_data:
            data.label.classification = Classification("cat", attributes={"color": "red"})
            frame = Frame()
            frame["LIDAR"] = data
            segment.append(frame)

        dataset_client = gas_client.upload_dataset(dataset, jobs=5)
        dataset_client.commit("import data")

        segment1 = FusionSegment("Segment1", client=dataset_client)
        assert len(segment1) == len(segment)
        assert segment1[0]["LIDAR"].path == segment[0]["LIDAR"].path.split("/")[-1]
        assert segment1[0]["LIDAR"].label.classification.category == "cat"
        assert segment1[0]["LIDAR"].label.classification.attributes["color"] == "red"
        assert len(auth_data) == len(segment)

        gas_client.delete_dataset(dataset_name)
Beispiel #3
0
    def test_copy_fusion_segment(self, accesskey, url, tmp_path):
        gas_client = GAS(access_key=accesskey, url=url)
        dataset_name = get_dataset_name()
        gas_client.create_dataset(dataset_name, is_fusion=True)
        dataset = FusionDataset(name=dataset_name)
        segment = dataset.create_segment("Segment1")
        segment.sensors.add(Sensor.loads(LIDAR_DATA))
        dataset._catalog = Catalog.loads(CATALOG)
        path = tmp_path / "sub"
        path.mkdir()
        for i in range(10):
            frame = Frame()
            local_path = path / f"hello{i}.txt"
            local_path.write_text("CONTENT")
            data = Data(local_path=str(local_path))
            data.label = Label.loads(LABEL)
            frame[LIDAR_DATA["name"]] = data
            segment.append(frame)

        dataset_client = gas_client.upload_dataset(dataset)
        segment_client = dataset_client.copy_segment("Segment1", "Segment2")
        assert segment_client.name == "Segment2"

        with pytest.raises(InvalidParamsError):
            dataset_client.copy_segment("Segment1", "Segment3", strategy="push")

        segment2 = FusionSegment("Segment2", client=dataset_client)
        assert segment2[0][LIDAR_DATA["name"]].path == "hello0.txt"
        assert (
            segment2[0][LIDAR_DATA["name"]].path
            == segment[0][LIDAR_DATA["name"]].target_remote_path
        )
        assert segment2[0][LIDAR_DATA["name"]].label

        gas_client.delete_dataset(dataset_name)
    def test_upload_fusion_dataset_after_commit(self, accesskey, url, tmp_path):
        gas_client = GAS(access_key=accesskey, url=url)
        dataset_name = get_dataset_name()
        gas_client.create_dataset(dataset_name, is_fusion=True)

        dataset = FusionDataset(name=dataset_name)
        dataset._catalog = Catalog.loads(CATALOG)
        dataset.notes.is_continuous = True
        segment = dataset.create_segment("Segment1")
        segment.sensors = Sensors.loads([LIDAR_DATA])

        path = tmp_path / "sub"
        path.mkdir()
        for i in range(10):
            remote_frame = Frame()
            local_path = path / f"hello{i}.txt"
            local_path.write_text("CONTENT")
            data = Data(local_path=str(local_path))
            data.label = Label.loads(LABEL)
            remote_frame[LIDAR_NAME] = data
            segment.append(remote_frame)

        dataset_client = gas_client.upload_dataset(dataset)
        dataset_client.commit("test")
        dataset_remote = FusionDataset(name=dataset_name, gas=gas_client)
        assert dataset_remote.notes.is_continuous == dataset.notes.is_continuous
        assert dataset_remote.catalog == dataset.catalog

        segment_remote = dataset_remote[0]
        assert len(segment_remote) == len(segment)
        assert segment_remote.sensors == segment.sensors
        for remote_frame, frame in zip(segment_remote, segment):
            assert remote_frame[LIDAR_NAME].path == frame[LIDAR_NAME].target_remote_path
            assert remote_frame[LIDAR_DATA["name"]].label == frame[LIDAR_NAME].label

        gas_client.delete_dataset(dataset_name)
Beispiel #5
0
def CADC(path: str) -> FusionDataset:
    """`CADC <http://cadcd.uwaterloo.ca/index.html>`_ dataset.

    The file structure should be like::

        <path>
            2018_03_06/
                0001/
                    3d_ann.json
                    labeled/
                        image_00/
                            data/
                                0000000000.png
                                0000000001.png
                                ...
                            timestamps.txt
                        ...
                        image_07/
                            data/
                            timestamps.txt
                        lidar_points/
                            data/
                            timestamps.txt
                        novatel/
                            data/
                            dataformat.txt
                            timestamps.txt
                ...
                0018/
                calib/
                    00.yaml
                    01.yaml
                    02.yaml
                    03.yaml
                    04.yaml
                    05.yaml
                    06.yaml
                    07.yaml
                    extrinsics.yaml
                    README.txt
            2018_03_07/
            2019_02_27/

    Arguments:
        path: The root directory of the dataset.

    Returns:
        Loaded `~tensorbay.dataset.dataset.FusionDataset` instance.

    """
    root_path = os.path.abspath(os.path.expanduser(path))

    dataset = FusionDataset(DATASET_NAME)
    dataset.notes.is_continuous = True
    dataset.load_catalog(
        os.path.join(os.path.dirname(__file__), "catalog.json"))

    for date in os.listdir(root_path):
        date_path = os.path.join(root_path, date)
        sensors = _load_sensors(os.path.join(date_path, "calib"))
        for index in os.listdir(date_path):
            if index == "calib":
                continue

            segment = dataset.create_segment(f"{date}-{index}")
            segment.sensors = sensors
            segment_path = os.path.join(root_path, date, index)
            data_path = os.path.join(segment_path, "labeled")

            with open(os.path.join(segment_path, "3d_ann.json"),
                      encoding="utf-8") as fp:
                # The first line of the json file is the json body.
                annotations = json.loads(fp.readline())
            timestamps = _load_timestamps(sensors, data_path)
            for frame_index, annotation in enumerate(annotations):
                segment.append(
                    _load_frame(sensors, data_path, frame_index, annotation,
                                timestamps))

    return dataset
 def test_create_fusion_segment(self):
     dataset = FusionDataset("test_name")
     segment = dataset.create_segment("train")
     assert segment.name == "train"
     assert isinstance(segment, FusionSegment)