def test_dumps(self): sensors = Sensors() sensors.add(Lidar.loads(_LIDAR_DATA)) sensors.add(Radar.loads(_RADAR_DATA)) sensors.add(Camera.loads(_CAMERA_DATA)) sensors.add(FisheyeCamera.loads(_FISHEYE_CAMERA_DATA)) assert sensors.dumps() == _SENSORS_DATA
def test_loads(self): sensors = Sensors.loads(_SENSORS_DATA) assert sensors[_LIDAR_DATA["name"]] == Lidar.loads(_LIDAR_DATA) assert sensors[_RADAR_DATA["name"]] == Radar.loads(_RADAR_DATA) assert sensors[_CAMERA_DATA["name"]] == Camera.loads(_CAMERA_DATA) assert sensors[_FISHEYE_CAMERA_DATA["name"]] == FisheyeCamera.loads( _FISHEYE_CAMERA_DATA)
def _load_timestamps(sensors: Sensors, data_path: str) -> Dict[str, List[str]]: timestamps = {} for sensor_name in sensors.keys(): data_folder = f"image_{sensor_name[-2:]}" if sensor_name != "LIDAR" else "lidar_points" timestamp_file = os.path.join(data_path, data_folder, "timestamps.txt") with open(timestamp_file, encoding="utf-8") as fp: timestamps[sensor_name] = fp.readlines() return timestamps
def test_sensor(self, accesskey, url): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") segment_client = dataset_client.get_or_create_segment("segment1") for sensor_data in SENSORS_DATA: segment_client.upload_sensor(Sensor.loads(sensor_data)) sensors = segment_client.get_sensors() assert sensors == Sensors.loads(SENSORS_DATA) segment_client.delete_sensor(SENSORS_DATA[0]["name"]) sensors = segment_client.get_sensors() assert len(sensors) == 4 assert sensors == Sensors.loads(SENSORS_DATA[1:]) gas_client.delete_dataset(dataset_name)
def __init__( self, name: str = "default", client: Optional["FusionDatasetClient"] = None ) -> None: super().__init__(name) self._data: MutableSequence[Frame] if client: self._client = client.get_segment(name) self._data = self._client.list_frames() self._repr_non_empty = True else: self._data = [] self._sensors = Sensors()
def _load_frame( sensors: Sensors, data_path: str, frame_index: int, annotation: Dict[str, Any], timestamps: Dict[str, List[str]], ) -> Frame: frame = Frame() for sensor_name in sensors.keys(): # The data file name is a string of length 10 with each digit being a number: # 0000000000.jpg # 0000000001.bin data_file_name = f"{frame_index:010}" # Each line of the timestamps file looks like: # 2018-03-06 15:02:33.000000000 timestamp = datetime.strptime( timestamps[sensor_name][frame_index][:23], "%Y-%m-%d %H:%M:%S.%f").timestamp() if sensor_name != "LIDAR": # The image folder corresponds to different cameras, whose name is likes "CAM00". # The image folder looks like "image_00". camera_folder = f"image_{sensor_name[-2:]}" image_file = f"{data_file_name}.png" data = Data( os.path.join(data_path, camera_folder, "data", image_file), target_remote_path=f"{camera_folder}-{image_file}", timestamp=timestamp, ) else: data = Data( os.path.join(data_path, "lidar_points", "data", f"{data_file_name}.bin"), timestamp=timestamp, ) data.label.box3d = _load_labels(annotation["cuboids"]) frame[sensor_name] = data return frame
def test_cache_fusion_dataset(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True) dataset_client.create_draft("draft-1") segment = FusionSegment("Segment1") segment.sensors = Sensors.loads(_SENSORS_DATA) paths = {"Lidar1": tmp_path / "lidar", "Camera1": tmp_path / "camera"} for path in paths.values(): path.mkdir() for i in range(_SEGMENT_LENGTH): frame = Frame() for sensor_data in _SENSORS_DATA: sensor_name = sensor_data["name"] data_path = paths[sensor_name] / f"{sensor_name}{i}.txt" data_path.write_text("CONTENT") frame[sensor_name] = Data(local_path=str(data_path)) segment.append(frame) dataset_client.upload_segment(segment) dataset_client.commit("commit-1") cache_path = tmp_path / "cache_test" dataset_client.enable_cache(str(cache_path)) segment1 = FusionSegment(name="Segment1", client=dataset_client) for frame in segment1: for data in frame.values(): data.open() segment_cache_path = (cache_path / dataset_client.dataset_id / dataset_client.status.commit_id / "Segment1") correct_files = set( segment_cache_path / f'{sensor_data["name"]}{i}.txt' for i in range(_SEGMENT_LENGTH) for sensor_data in _SENSORS_DATA) assert set(segment_cache_path.glob("*.txt")) == correct_files gas_client.delete_dataset(dataset_name)
def test_upload_fusion_dataset_after_commit(self, accesskey, url, tmp_path): gas_client = GAS(access_key=accesskey, url=url) dataset_name = get_dataset_name() gas_client.create_dataset(dataset_name, is_fusion=True) dataset = FusionDataset(name=dataset_name) dataset._catalog = Catalog.loads(CATALOG) dataset.notes.is_continuous = True segment = dataset.create_segment("Segment1") segment.sensors = Sensors.loads([LIDAR_DATA]) path = tmp_path / "sub" path.mkdir() for i in range(10): remote_frame = Frame() local_path = path / f"hello{i}.txt" local_path.write_text("CONTENT") data = Data(local_path=str(local_path)) data.label = Label.loads(LABEL) remote_frame[LIDAR_NAME] = data segment.append(remote_frame) dataset_client = gas_client.upload_dataset(dataset) dataset_client.commit("test") dataset_remote = FusionDataset(name=dataset_name, gas=gas_client) assert dataset_remote.notes.is_continuous == dataset.notes.is_continuous assert dataset_remote.catalog == dataset.catalog segment_remote = dataset_remote[0] assert len(segment_remote) == len(segment) assert segment_remote.sensors == segment.sensors for remote_frame, frame in zip(segment_remote, segment): assert remote_frame[LIDAR_NAME].path == frame[LIDAR_NAME].target_remote_path assert remote_frame[LIDAR_DATA["name"]].label == frame[LIDAR_NAME].label gas_client.delete_dataset(dataset_name)
def _load_sensors(calib_path: str) -> Sensors: try: import yaml # pylint: disable=import-outside-toplevel except ModuleNotFoundError as error: raise ModuleImportError(module_name=error.name, package_name="pyyaml") from error sensors = Sensors() lidar = Lidar("LIDAR") lidar.set_extrinsics() sensors.add(lidar) with open(os.path.join(calib_path, "extrinsics.yaml"), "r", encoding="utf-8") as fp: extrinsics = yaml.load(fp, Loader=yaml.FullLoader) for camera_calibration_file in glob(os.path.join(calib_path, "[0-9]*.yaml")): with open(camera_calibration_file, "r", encoding="utf-8") as fp: camera_calibration = yaml.load(fp, Loader=yaml.FullLoader) # camera_calibration_file looks like: # /path-to-CADC/2018_03_06/calib/00.yaml camera_name = f"CAM{os.path.splitext(os.path.basename(camera_calibration_file))[0]}" camera = Camera(camera_name) camera.description = camera_calibration["camera_name"] camera.set_extrinsics(matrix=extrinsics[f"T_LIDAR_{camera_name}"]) camera_matrix = camera_calibration["camera_matrix"]["data"] camera.set_camera_matrix( matrix=[camera_matrix[:3], camera_matrix[3:6], camera_matrix[6:9]]) distortion = camera_calibration["distortion_coefficients"]["data"] camera.set_distortion_coefficients( **dict(zip(("k1", "k2", "p1", "p2", "k3"), distortion))) sensors.add(camera) return sensors