Exemple #1
0
    def get_pixel_location(self, pixel, camera_setup):
        """ Gets the 3D world location from pixel coordinates.

        Args:
            pixel: A pylot.utils.Vector2D denoting pixel coordinates.
            camera_setup: The setup of the camera.
        Returns:
            A pylot.utils.Location of the 3D world location, or None if all the
            point cloud points are behind.
        """
        if len(self.points) == 0:
            return None
        intrinsic_mat = camera_setup.get_intrinsic_matrix()
        # Project our 2D pixel location into 3D space, onto the z=1 plane.
        p3d = np.dot(inv(intrinsic_mat), np.array([[pixel.x], [pixel.y],
                                                   [1.0]]))
        location = self._get_closest_point_in_point_cloud(
            Vector2D(p3d[0], p3d[1]))
        # Normalize our point to have the same depth as our closest point.
        p3d *= np.array([location.z])
        p3d_locations = [
            Location(px, py, pz) for px, py, pz in np.asarray(p3d.transpose())
        ]
        # Convert from camera to unreal coordinates.
        to_world_transform = camera_setup.get_unreal_transform()
        camera_point_cloud = to_world_transform.transform_points(p3d_locations)
        return camera_point_cloud[0]
Exemple #2
0
    def get_pixel_location(self, pixel, camera_setup):
        """ Gets the 3D world location from pixel coordinates.

        Args:
            pixel (:py:class:`~pylot.utils.Vector2D`): Pixel coordinates.
            camera_setup (:py:class:`~pylot.drivers.sensor_setup.CameraSetup`):
                The setup of the camera.
        Returns:
            :py:class:`~pylot.utils.Location`: The 3D world location, or None
            if all the point cloud points are behind.
        """
        # Select only points that are in front.
        fwd_points = self.points[np.where(self.points[:, 2] > 0.0)]
        if len(fwd_points) == 0:
            return None
        intrinsic_mat = camera_setup.get_intrinsic_matrix()
        # Project our 2D pixel location into 3D space, onto the z=1 plane.
        p3d = np.dot(inv(intrinsic_mat), np.array([[pixel.x], [pixel.y],
                                                   [1.0]]))
        location = PointCloud.get_closest_point_in_point_cloud(
            fwd_points, Vector2D(p3d[0], p3d[1]))
        # Normalize our point to have the same depth as our closest point.
        p3d *= np.array([location.z])
        p3d = p3d.transpose()
        # Convert from camera to unreal coordinates.
        to_world_transform = camera_setup.get_unreal_transform()
        camera_point_cloud = to_world_transform.transform_points(p3d)[0]
        pixel_location = Location(camera_point_cloud[0], camera_point_cloud[1],
                                  camera_point_cloud[2])
        return pixel_location
Exemple #3
0
    def get_pixel_location(self, pixel, camera_setup: CameraSetup):
        """ Gets the 3D world location from pixel coordinates.

        Args:
            pixel (:py:class:`~pylot.utils.Vector2D`): Pixel coordinates.
            camera_setup (:py:class:`~pylot.drivers.sensor_setup.CameraSetup`):
                The setup of the camera with its transform in the world frame
                of reference.

        Returns:
            :py:class:`~pylot.utils.Location`: The 3D world location, or None
            if all the point cloud points are behind.
        """
        # Select only points that are in front.
        # Setting the threshold to 0.1 because super close points cause
        # floating point errors.
        fwd_points = self.points[np.where(self.points[:, 2] > 0.1)]
        if len(fwd_points) == 0:
            return None
        intrinsic_mat = camera_setup.get_intrinsic_matrix()
        # Project our 2D pixel location into 3D space, onto the z=1 plane.
        p3d = np.dot(inv(intrinsic_mat), np.array([[pixel.x], [pixel.y],
                                                   [1.0]]))

        if self._lidar_setup.lidar_type == 'sensor.lidar.ray_cast':
            location = PointCloud.get_closest_point_in_point_cloud(
                fwd_points, Vector2D(p3d[0], p3d[1]), normalized=True)
            # Use the depth from the retrieved location.
            p3d *= np.array([location.z])
            p3d = p3d.transpose()
            # Convert from camera to unreal coordinates if the lidar type is
            # sensor.lidar.ray_cast
            to_world_transform = camera_setup.get_unreal_transform()
            camera_point_cloud = to_world_transform.transform_points(p3d)[0]
            pixel_location = Location(camera_point_cloud[0],
                                      camera_point_cloud[1],
                                      camera_point_cloud[2])
        elif self._lidar_setup.lidar_type == 'velodyne':
            location = PointCloud.get_closest_point_in_point_cloud(
                fwd_points, Vector2D(p3d[0], p3d[1]), normalized=False)
            # Use the depth from the retrieved location.
            p3d[2] = location.z
            p3d = p3d.transpose()
            pixel_location = Location(p3d[0, 0], p3d[0, 1], p3d[0, 2])
        return pixel_location
Exemple #4
0
 def estimate_obstacle_orientation(self):
     """Uses the obstacle's past trajectory to estimate its angle from the
        positive x-axis (assumes trajectory points are in the ego-vehicle's
        coordinate frame)."""
     other_idx = len(self.trajectory) - 2
     # TODO: Setting a default yaw is dangerous. Find some way to estimate
     # the orientation of a stationary object (e.g. 3D object detection).
     yaw = 0.0  # Default orientation for stationary objects.
     current_loc = self.trajectory[-1].location.as_vector_2D()
     while other_idx >= 0:
         past_ref_loc = self.trajectory[other_idx].location.as_vector_2D()
         vec = current_loc - past_ref_loc
         displacement = current_loc.l2_distance(past_ref_loc)
         if displacement > 0.001:
             # Angle between displacement vector and the x-axis, i.e.
             # the (1,0) vector.
             yaw = vec.get_angle(Vector2D(1, 0))
             break
         else:
             other_idx -= 1
     return math.degrees(yaw)
Exemple #5
0
    def _calculate_metrics(self, timestamp, ground_trajectories, predictions):
        """ Calculates and logs MSD (mean squared distance), ADE (average
            displacement error), and FDE (final displacement error).

            Args:
                ground_trajectories: A dict of perfect past trajectories.
                predictions: A list of obstacle predictions.
        """
        # Vehicle metrics.
        vehicle_cnt = 0
        vehicle_msd = 0.0
        vehicle_ade = 0.0
        vehicle_fde = 0.0

        # Person metrics.
        person_cnt = 0
        person_msd = 0.0
        person_ade = 0.0
        person_fde = 0.0

        for obstacle in predictions:
            # We remove altitude from the accuracy calculation because the
            # prediction operators do not currently predict altitude.
            predicted_trajectory = [
                Vector2D(transform.location.x, transform.location.y)
                for transform in obstacle.trajectory
            ]
            ground_trajectory = [
                Vector2D(transform.location.x, transform.location.y)
                for transform in ground_trajectories[obstacle.id].trajectory
            ]
            if obstacle.label in VEHICLE_LABELS:
                vehicle_cnt += 1
            elif obstacle.label == 'person':
                person_cnt += 1
            else:
                raise ValueError('Unexpected obstacle label {}'.format(
                    obstacle.label))
            l2_distance = 0.0
            l1_distance = 0.0
            for idx in range(1, len(predicted_trajectory) + 1):
                # Calculate MSD
                l2_distance += predicted_trajectory[-idx].l2_distance(
                    ground_trajectory[-idx])
                # Calculate ADE
                l1_distance += predicted_trajectory[-idx].l1_distance(
                    ground_trajectory[-idx])
            l2_distance /= len(predicted_trajectory)
            l1_distance /= len(predicted_trajectory)
            fde = predicted_trajectory[-1].l1_distance(ground_trajectory[-1])
            if obstacle.label in VEHICLE_LABELS:
                vehicle_msd += l2_distance
                vehicle_ade += l1_distance
                vehicle_fde += fde
            elif obstacle.label == 'person':
                person_msd += l2_distance
                person_ade += l1_distance
                person_fde += fde
            else:
                raise ValueError('Unexpected obstacle label {}'.format(
                    obstacle.label))

        # Log metrics.
        sim_time = timestamp.coordinates[0]
        actor_cnt = person_cnt + vehicle_cnt
        if actor_cnt > 0:
            msd = (person_msd + vehicle_msd) / actor_cnt
            ade = (person_ade + vehicle_ade) / actor_cnt
            fde = (person_fde + vehicle_fde) / actor_cnt
            self._csv_logger.info('{},{},prediction,MSD,{:.4f}'.format(
                time_epoch_ms(), sim_time, msd))
            self._csv_logger.info('{},{},prediction,ADE,{:.4f}'.format(
                time_epoch_ms(), sim_time, ade))
            self._csv_logger.info('{},{},prediction,FDE,{:.4f}'.format(
                time_epoch_ms(), sim_time, fde))
        if person_cnt > 0:
            person_msd /= person_cnt
            person_ade /= person_cnt
            person_fde /= person_cnt
            self._logger.info('Person MSD is: {:.4f}'.format(person_msd))
            self._logger.info('Person ADE is: {:.4f}'.format(person_ade))
            self._logger.info('Person FDE is: {:.4f}'.format(person_fde))
            self._csv_logger.info('{},{},prediction,person-MSD,{:.4f}'.format(
                time_epoch_ms(), sim_time, person_msd))
            self._csv_logger.info('{},{},prediction,person-ADE,{:.4f}'.format(
                time_epoch_ms(), sim_time, person_ade))
            self._csv_logger.info('{},{},prediction,person-FDE,{:.4f}'.format(
                time_epoch_ms(), sim_time, person_fde))
        if vehicle_cnt > 0:
            vehicle_msd /= vehicle_cnt
            vehicle_ade /= vehicle_cnt
            vehicle_fde /= vehicle_cnt
            self._logger.info('Vehicle MSD is: {:.4f}'.format(vehicle_msd))
            self._logger.info('Vehicle ADE is: {:.4f}'.format(vehicle_ade))
            self._logger.info('Vehicle FDE is: {:.4f}'.format(vehicle_fde))
            self._csv_logger.info('{},{},prediction,vehicle-MSD,{:.4f}'.format(
                time_epoch_ms(), sim_time, vehicle_msd))
            self._csv_logger.info('{},{},prediction,vehicle-ADE,{:.4f}'.format(
                time_epoch_ms(), sim_time, vehicle_ade))
            self._csv_logger.info('{},{},prediction,vehicle-FDE,{:.4f}'.format(
                time_epoch_ms(), sim_time, vehicle_fde))
Exemple #6
0
    depth_frame = DepthFrame(depth_frame, camera_setup)
    # Resulting unreal coordinates.
    point_cloud = depth_frame.as_point_cloud()
    print(point_cloud)
    for i in range(width * height):
        assert np.isclose(point_cloud[i].x, expected[i].x), 'Returned x '
        'value is not the same as expected'
        assert np.isclose(point_cloud[i].y, expected[i].y), 'Returned y '
        'value is not the same as expected'
        assert np.isclose(point_cloud[i].z, expected[i].z), 'Returned z '
        'value is not the same as expected'


@pytest.mark.parametrize("depth_frame, pixels, expected", [
    (np.array([[0.4, 0.3], [0.2, 0.1]]), \
     [Vector2D(0,1), Vector2D(1,0)], \
     [Location(200, -200, -200), Location(300, 300, 300)])
])
def test_get_pixel_locations(depth_frame, pixels, expected):
    height, width = depth_frame.shape
    camera_setup = CameraSetup('test_setup',
                               'test_type',
                               width,
                               height,
                               Transform(location=Location(0, 0, 0),
                                         rotation=Rotation(0, 0, 0)),
                               fov=90)
    depth_frame = DepthFrame(depth_frame, camera_setup)
    locations = depth_frame.get_pixel_locations(pixels)
    for i in range(len(pixels)):
        assert np.isclose(locations[i].x, expected[i].x), 'Returned x '
Exemple #7
0
    depth_frame = DepthFrame(depth_frame, camera_setup)
    # Resulting unreal coordinates.
    point_cloud = depth_frame.as_point_cloud()
    print(point_cloud)
    for i in range(width * height):
        assert np.isclose(point_cloud[i][0], expected[i][0]), 'Returned x '
        'value is not the same as expected'
        assert np.isclose(point_cloud[i][1], expected[i][1]), 'Returned y '
        'value is not the same as expected'
        assert np.isclose(point_cloud[i][2], expected[i][2]), 'Returned z '
        'value is not the same as expected'


@pytest.mark.parametrize("depth_frame, pixels, expected", [
    (np.array([[0.4, 0.3], [0.2, 0.1]]), \
     [Vector2D(0,1), Vector2D(1,0)], \
     [Location(200, -200, -200), Location(300, 300, 300)])
])
def test_get_pixel_locations(depth_frame, pixels, expected):
    height, width = depth_frame.shape
    camera_setup = CameraSetup('test_setup',
                               'sensor.camera.depth',
                               width,
                               height,
                               Transform(location=Location(0, 0, 0),
                                         rotation=Rotation(0, 0, 0)),
                               fov=90)
    depth_frame = DepthFrame(depth_frame, camera_setup)
    locations = depth_frame.get_pixel_locations(pixels)
    for i in range(len(pixels)):
        assert np.isclose(locations[i].x, expected[i].x), 'Returned x '