Exemplo n.º 1
0
def plot_lane_centerlines_in_img(
    lidar_pts: np.ndarray,
    city_to_egovehicle_se3: SE3,
    img: np.ndarray,
    city_name: str,
    avm: ArgoverseMap,
    camera_config: CameraConfig,
    planes: Iterable[Tuple[np.array, np.array, np.array, np.array, np.array]],
    color: Tuple[int, int, int] = (0, 255, 255),
    linewidth: Number = 10,
) -> np.ndarray:
    """
    Args:
        city_to_egovehicle_se3: SE3 transformation representing egovehicle to city transformation
        img: Array of shape (M,N,3) representing updated image
        city_name: str, string representing city name, i.e. 'PIT' or 'MIA'
        avm: instance of ArgoverseMap
        camera_config: instance of CameraConfig
        planes: five frustum clipping planes
        color: RGB-tuple representing color
        linewidth: Number = 10) -> np.ndarray

    Returns:
        img: Array of shape (M,N,3) representing updated image
    """
    R = camera_config.extrinsic[:3, :3]
    t = camera_config.extrinsic[:3, 3]
    cam_SE3_egovehicle = SE3(rotation=R, translation=t)

    query_x, query_y, _ = city_to_egovehicle_se3.translation
    local_centerlines = avm.find_local_lane_centerlines(
        query_x, query_y, city_name)

    for centerline_city_fr in local_centerlines:
        color = [
            intensity + np.random.randint(0, LANE_COLOR_NOISE) -
            LANE_COLOR_NOISE // 2 for intensity in color
        ]

        ground_heights = avm.get_ground_height_at_xy(centerline_city_fr,
                                                     city_name)

        valid_idx = np.isnan(ground_heights)
        centerline_city_fr = centerline_city_fr[~valid_idx]

        centerline_egovehicle_fr = city_to_egovehicle_se3.inverse(
        ).transform_point_cloud(centerline_city_fr)
        centerline_uv_cam = cam_SE3_egovehicle.transform_point_cloud(
            centerline_egovehicle_fr)

        # can also clip point cloud to nearest LiDAR point depth
        centerline_uv_cam = clip_point_cloud_to_visible_region(
            centerline_uv_cam, lidar_pts)
        for i in range(centerline_uv_cam.shape[0] - 1):
            draw_clipped_line_segment(img, centerline_uv_cam[i],
                                      centerline_uv_cam[i + 1], camera_config,
                                      linewidth, planes, color)
    return img
Exemplo n.º 2
0
    def get_candidate_centerlines_for_raster(
            self,
            xy: np.ndarray,
            city_name: str,
            avm: ArgoverseMap,
            yaw_deg: int,
            viz: bool = False,
            max_search_radius: float = 100.0,
            world_to_image_space=None) -> List[np.ndarray]:

        # Get all lane candidates within a bubble
        curr_lane_candidates = avm.get_lane_ids_in_xy_bbox(
            xy[-1, 0], xy[-1, 1], city_name, 100)

        candidate_centerlines = avm.find_local_lane_centerlines(
            xy[-1, 0], xy[-1, 1], city_name, max_search_radius)
        lane_distance = []
        centroid = xy[19]
        for lane in candidate_centerlines:
            lane_distance.append(
                [lane, np.linalg.norm(centroid - lane[0][:2])])
        lane_distance.sort(key=operator.itemgetter(1))

        raster_size = (224, 224)
        candidate_centerlines_normalized = []

        if viz:
            img = 255 * np.ones(shape=(raster_size[0], raster_size[1], 3),
                                dtype=np.uint8)

            #             plt.figure(0, figsize=(8, 7))
            for centerline_coords, distance in lane_distance:
                cnt_line = transform_points(centerline_coords,
                                            world_to_image_space)
                cv2.polylines(img, [cv2_subpixel(cnt_line)],
                              False, (0, 0, 0),
                              lineType=cv2.LINE_AA,
                              shift=CV2_SHIFT)
                cropped_vector = crop_tensor(cnt_line, raster_size)
                if len(cropped_vector) > 1:
                    candidate_centerlines_normalized.append(cropped_vector)
#                     print("cropped cntr-line",cropped_vector)
#                     break

            img = img.astype(np.float32) / 255

        return img, candidate_centerlines_normalized