Esempio n. 1
0
    def undistort_points(self,
                         pts_Local_batches: list,
                         timestamps: np.ndarray,
                         reference_ts: int = -1,
                         to_world: bool = False,
                         dtype=np.float64):
        """Transform 3D points that have not been sampled simultaneously to their 'correct' place
        referential.

        Args:
            pts_Local_batches: a list of arrays of [N, 3] points to be transformed
            timestamps: the N timestamps (common for all point batches)
            to_world:   If 'True', leave undistorted points in 'world' referential, otherwise
                        project them back to local referential
            reference_ts:   only used if to_world == False, let the use chose at what time
                            undistorted points are projected back to the local referential
                            (useful to compare points from different sensors in a common local referential)
            dtype: the output numpy data type

        Returns:
            The transformed points
        """

        warn_if_less_than_64bit(dtype)

        provider = self.datasource.sensor.platform.egomotion_provider
        tf_Ego_from_Local = self.compute_transform(provider.referential_name,
                                                   False,
                                                   dtype=dtype)
        traj_EgoZero_from_Ego = provider.compute_trajectory(
            timestamps, provider.tf_Global_from_EgoZero, dtype=dtype)

        for pts_Local in pts_Local_batches:

            pts_Ego = linalg.map_points(tf_Ego_from_Local, pts_Local)

            pts_EgoZero = provider.apply_trajectory(traj_EgoZero_from_Ego,
                                                    pts_Ego)

            if to_world:
                pts_Local[:] = pts_EgoZero
            else:
                if reference_ts < 0:
                    reference_ts = self.timestamp

                tf_Global_from_Ego = provider.get_Global_from_Ego_at(
                    reference_ts, dtype=dtype)
                tf_Local_from_EgoZero = linalg.tf_inv(
                    tf_Ego_from_Local) @ linalg.tf_inv(
                        tf_Global_from_Ego) @ provider.tf_Global_from_EgoZero
                pts_Local[:] = linalg.map_points(tf_Local_from_EgoZero,
                                                 pts_EgoZero)
Esempio n. 2
0
    def transform(self,
                  pts: np.ndarray,
                  referential_or_ds: str,
                  ignore_orientation: bool = False,
                  reference_ts: int = -1,
                  reverse: bool = False,
                  dtype=np.float64) -> np.ndarray:
        """Transform 3D points from this Sample sensor to another sensor
        referential.

        Arguments:
            pts: The [N, 3] points to be transformed
            referential_or_ds: The target sensor referential or full datasource name
            ignore_orientation: Ignore the source sensor orientation (default: {False})
            reference_ts: refer to compute_transform()'s doc (only used if referential_or_ds == 'world')
            reverse: apply the reverse transformation
            dtype: the output numpy data type

        Returns:
            The transformed points
        """
        r = self.compute_transform(referential_or_ds,
                                   ignore_orientation=ignore_orientation,
                                   reference_ts=reference_ts,
                                   dtype=dtype)

        if reverse:
            r = linalg.tf_inv(r)

        if r is not None:
            return Sample.transform_pts(r, pts)

        return pts
Esempio n. 3
0
    def apply_new_extrinsic(self, tr):
        ''' tr : 4x4 transformation matrix
        '''
        if not self.does_path_exist(False):
            return

        calibmode_ = self.window.calibmodeComboBox.currentIndex
        if calibmode_ == CalibMode.RELATIVE.value:
            orig = self.src_sensor.orig_extrinsics[self.dst_sensor.name]
            adjusted = self.src_sensor.extrinsics[self.dst_sensor.name][
                ...] = orig @ tr
            self.dst_sensor.extrinsics[self.src_sensor.name][
                ...] = linalg.tf_inv(adjusted)
        elif calibmode_ == CalibMode.ABSOLUTE.value:
            self.src_sensor.extrinsics[self.dst_sensor.name][...] = tr
            self.dst_sensor.extrinsics[self.src_sensor.name][
                ...] = linalg.tf_inv(tr)
        self.src_sensor.extrinsics_dirty()
        self.dst_sensor.extrinsics_dirty()
    def compute_trajectory(self, sorted_ts:np.ndarray, tf_Global_from_EgoZero:np.ndarray, dtype = np.float64) -> np.ndarray:
        """Trajectory of tf_EgoZero_from_Ego transforms """
        end = int(np.ceil(sorted_ts.shape[0]/self.subsampling))
        # use double precision for trajectory computation, due to utm big number
        trajectory_EgoZero_from_Ego = np.empty((end,4,4), dtype = dtype)

        tf_EgoZero_from_Global = linalg.tf_inv(tf_Global_from_EgoZero)
       
        for i in range(end):
            tf_Global_from_Ego = self.get_Global_from_Ego_at(sorted_ts[i*self.subsampling])
            trajectory_EgoZero_from_Ego[i] = tf_EgoZero_from_Global @ tf_Global_from_Ego

        return trajectory_EgoZero_from_Ego        
Esempio n. 5
0
    def num_pts_in(self, pt_cloud, margin=0):
        """ Returns, for each box, the mask of those points from pt_cloud that are inside the box.
            Args:
                pt_cloud - (M,3)
                margin (optional) - positive float- increases the size of box

            Returns:
                mask - boolean (n_boxe,M)
        """
        bbox = self.raw['data']
        nbpts = np.zeros((len(bbox), len(pt_cloud)), dtype=bool)
        for i in range(len(bbox)):
            tf_Localds_from_Box = np.eye(4)
            tf_Localds_from_Box[:3, :3] = euler.euler2mat(
                bbox['r'][i, 0], bbox['r'][i, 1], bbox['r'][i, 2])
            tf_Localds_from_Box[:3, 3] = bbox['c'][i, :]
            aabb = np.vstack([
                -(bbox['d'][i, :] + margin) / 2.0,
                (bbox['d'][i, :] + margin) / 2.0
            ])
            nbpts[i, :] = linalg.points_inside_box_mask(
                pt_cloud, aabb, linalg.tf_inv(tf_Localds_from_Box))
        return nbpts
Esempio n. 6
0
    def load_extrinsics(self, extrinsics_folder: str):
        """Looks for a pickle file containing extrinsics information for this sensor, named 'From-To' e.g. 'flir_tfl-eagle_tfc.pkl'

        Args:
            intrinsics_config: path to folder containing this sensor's extrinsics pickle file 
            (absolute or relative to dataset path), e.g. '/nas/extrinsics' or 'extrinsics'
        """
        targets = {}

        for target in self.pf.yml.keys():
            if self.name == target:
                targets[target] = np.eye(4, dtype='f8')
                continue

            # try to find self.name -> target mapping
            extrinsics_folder_path = self.pf.try_absolute_or_relative(
                extrinsics_folder)

            paths = glob.glob(
                os.path.join(extrinsics_folder_path,
                             f"{self.name}-{target}.pkl"))

            if paths:
                with open(paths[0], 'rb') as f:
                    targets[target] = pickle.load(f).astype('f8')
            else:
                # try to find target -> self.name mapping instead
                paths = glob.glob(
                    os.path.join(extrinsics_folder_path,
                                 f"{target}-{self.name}.pkl"))
                if paths:
                    with open(paths[0], 'rb') as f:
                        targets[target] = linalg.tf_inv(
                            pickle.load(f)).astype('f8')

        self.extrinsics = targets
Esempio n. 7
0
    def __compute_lookup_table_cameras_to_cylinders(self, image_center):
        # u,v,scale in camera, used to checked if points are in front or behind the camera
        pt_in_cam_3 = self.new_matrices[
            Pos.CENTER] @ self.cylinder_points[:3, :]
        # project 3D cylinder points in 2D image
        pt_in_cam = (cv2.projectPoints(
            self.cylinder_points[:3, :], np.zeros((3, 1)), np.zeros(
                (3, 1)), self.new_matrices[Pos.CENTER],
            self.distortion_coefficients[Pos.CENTER] * 0.0))[0].reshape(-1,
                                                                        2).T
        # keep point respect image shape, and point in front of the camera
        keep = np.logical_and(
            np.logical_and(
                np.logical_and(
                    np.logical_and(pt_in_cam[0, :] >= 0,
                                   pt_in_cam[0, :] < image_center.shape[1]),
                    pt_in_cam[1, :] >= 0),
                pt_in_cam[1, :] < image_center.shape[0]),
            pt_in_cam_3[2, :] > 0)
        self.keeped_in_cam_points[Pos.CENTER] = pt_in_cam[:,
                                                          keep].astype(np.int)
        self.keeped_cylinder_points_2d[
            Pos.CENTER] = self.cylinder_points_2d[:, keep].astype(np.int)
        # compute left and right image limits in the cylinder, used to creat the right merging masks
        self.images_min_x[Pos.CENTER] = self.keeped_cylinder_points_2d[
            Pos.CENTER][0, self.keeped_cylinder_points_2d[Pos.CENTER].
                        reshape(2, -1)[1, :] == self.image_height // 2].min()
        self.images_max_x[Pos.CENTER] = self.keeped_cylinder_points_2d[
            Pos.CENTER][0, self.keeped_cylinder_points_2d[Pos.CENTER].
                        reshape(2, -1)[1, :] == self.image_height // 2].max()

        # left camera
        calib_extrinsic_l_c_inv = linalg.tf_inv(
            self.extrinsic_calibrations[Pos.LEFT])
        pt_in_cam_3 = self.new_matrices[Pos.LEFT] @ (
            calib_extrinsic_l_c_inv @ self.cylinder_points)[:3, :]
        pt_in_cam_3d = (calib_extrinsic_l_c_inv @ self.cylinder_points)[:3, :]
        pt_in_cam = (cv2.projectPoints(
            pt_in_cam_3d, np.zeros((3, 1)), np.zeros(
                (3, 1)), self.new_matrices[Pos.LEFT],
            self.distortion_coefficients[Pos.LEFT] * 0.0))[0].reshape(-1, 2).T
        keep = np.logical_and(
            np.logical_and(
                np.logical_and(
                    np.logical_and(pt_in_cam[0, :] >= 0,
                                   pt_in_cam[0, :] < image_center.shape[1]),
                    pt_in_cam[1, :] >= 0),
                pt_in_cam[1, :] < image_center.shape[0]),
            pt_in_cam_3[2, :] > 0)
        self.keeped_in_cam_points[Pos.LEFT] = pt_in_cam[:, keep].astype(np.int)
        self.keeped_cylinder_points_2d[
            Pos.LEFT] = self.cylinder_points_2d[:, keep].astype(np.int)
        self.images_min_x[Pos.LEFT] = self.keeped_cylinder_points_2d[Pos.LEFT][
            0, self.keeped_cylinder_points_2d[Pos.LEFT].reshape(2, -1)[
                1, :] == self.image_height // 2].min()
        self.images_max_x[Pos.LEFT] = self.keeped_cylinder_points_2d[Pos.LEFT][
            0, self.keeped_cylinder_points_2d[Pos.LEFT].reshape(2, -1)[
                1, :] == self.image_height // 2].max()

        # right camera
        calib_extrinsic_r_c_inv = linalg.tf_inv(
            self.extrinsic_calibrations[Pos.RIGHT])
        pt_in_cam_3 = self.new_matrices[Pos.RIGHT] @ (
            calib_extrinsic_r_c_inv @ self.cylinder_points)[:3, :]
        pt_in_cam_3d = (calib_extrinsic_r_c_inv @ self.cylinder_points)[:3, :]
        pt_in_cam = (cv2.projectPoints(
            pt_in_cam_3d, np.zeros((3, 1)), np.zeros(
                (3, 1)), self.new_matrices[Pos.RIGHT],
            self.distortion_coefficients[Pos.RIGHT] * 0.0))[0].reshape(-1, 2).T
        keep = np.logical_and(
            np.logical_and(
                np.logical_and(
                    np.logical_and(pt_in_cam[0, :] >= 0,
                                   pt_in_cam[0, :] < image_center.shape[1]),
                    pt_in_cam[1, :] >= 0),
                pt_in_cam[1, :] < image_center.shape[0]),
            pt_in_cam_3[2, :] > 0)
        self.keeped_in_cam_points[Pos.RIGHT] = pt_in_cam[:,
                                                         keep].astype(np.int)
        self.keeped_cylinder_points_2d[
            Pos.RIGHT] = self.cylinder_points_2d[:, keep].astype(np.int)
        self.images_min_x[Pos.RIGHT] = self.keeped_cylinder_points_2d[
            Pos.RIGHT][0, self.keeped_cylinder_points_2d[Pos.RIGHT].
                       reshape(2, -1)[1, :] == self.image_height // 2].min()
        self.images_max_x[Pos.RIGHT] = self.keeped_cylinder_points_2d[
            Pos.RIGHT][0, self.keeped_cylinder_points_2d[Pos.RIGHT].
                       reshape(2, -1)[1, :] == self.image_height // 2].max()
Esempio n. 8
0
    def __getitem__(self, key: Any):

        #TODO: if multiple point cloud datasources in dependencies, we could merge them.

        min_key = key - self.memory

        if not self._is_live:
            min_key = max([0, min_key])
        else:
            min_key = -min([
                -min_key,
                len(self.datasources[self.original_point_cloud_datasource])
            ])

        samples = self.datasources[
            self.original_point_cloud_datasource][min_key:key + 1]

        nb_features = 1 if not self.has_rgb else 4
        pc_map = np.empty((0, 5 + nb_features))

        cached_indices = []
        if self.local_cache is not None:
            pc_map = self.local_cache

            if not self._is_live:
                keep = np.where(
                    (pc_map[:,5] >= min_key) &\
                    (pc_map[:,5] <= key) &\
                    (pc_map[:,5] % self.skip == 0)
                )
            else:
                keep = np.where(
                    (pc_map[:,5] >= samples[0].raw['absolute_index']) &\
                    (pc_map[:,5] <= samples[-1].raw['absolute_index']) &\
                    (pc_map[:,5] % self.skip == 0)
                )
            pc_map = pc_map[keep]
            cached_indices = np.unique(pc_map[:, 5])

        for sample in samples:

            if not self._is_live:
                index = sample.index
                if index % self.skip and index != key:
                    continue
            else:
                index = sample.raw['absolute_index']
                if index % self.skip and index != samples[-1].raw[
                        'absolute_index']:
                    continue

            if index in cached_indices:
                continue  #don't re-add what is already cached

            pc = np.empty((sample.amplitudes.size, 5 + nb_features))
            pc[:, [0, 1, 2]] = sample.point_cloud(referential='world',
                                                  undistort=False)
            pc[:, 3] = sample.amplitudes
            pc[:, 4] = sample.timestamp
            pc[:, 5] = index

            if self.has_rgb:
                pc[:, 6] = sample.raw['r']
                pc[:, 7] = sample.raw['g']
                pc[:, 8] = sample.raw['b']

            pc_map = self.stack_point_cloud(pc_map, pc)

        self.local_cache = copy.deepcopy(pc_map)

        if self.voxel_size > 0 and OPEN3D_AVAILABLE:
            pc_map = self.voxelize(pc_map)

        to_world = samples[-1].compute_transform('world')
        to_sensor = tf_inv(to_world)
        pc_map[:, [0, 1, 2]] = map_points(to_sensor, pc_map[:, [0, 1, 2]])

        #package in das format
        dtype = datasource_xyzit() if not self.has_rgb else sample.raw.dtype
        raw = np.empty((pc_map.shape[0]), dtype=dtype)
        raw['x'] = pc_map[:, 0]
        raw['y'] = pc_map[:, 1]
        raw['z'] = pc_map[:, 2]
        raw['i'] = pc_map[:, 3]
        raw['t'] = pc_map[:, 4]

        if self.has_rgb:
            raw['r'] = pc_map[:, 6]
            raw['g'] = pc_map[:, 7]
            raw['b'] = pc_map[:, 8]

        sample_object = self.sensor.factories['xyzit'][0]

        return sample_object(index=key,
                             datasource=self,
                             virtual_raw=raw,
                             virtual_ts=samples[-1].timestamp)
 def compute_tf_EgoZero_from_Sensor(self, tf_Ego_from_Sensor:np.ndarray, reference_ts:int)->np.ndarray:
     tf_Global_from_Ego = self.get_Global_from_Ego_at(reference_ts) 
     return linalg.tf_inv(self.tf_Global_from_EgoZero) @ tf_Global_from_Ego @ tf_Ego_from_Sensor