def test_find_all_polygon_bboxes_overlapping_query_bbox( polygons_and_gt_bboxes): """Test for correctness of """ poly_bboxes = np.array( [compute_point_cloud_bbox(poly) for poly in polygons_and_gt_bboxes[0]]) query_bbox = np.array([-1.5, 0.5, 1.5, 1.5]) overlap_indxs = find_all_polygon_bboxes_overlapping_query_bbox( poly_bboxes, query_bbox) gt_overlap_bool = np.array([True, True, False, True, True]) gt_overlap_indxs = np.where(gt_overlap_bool)[0] assert np.allclose(overlap_indxs, gt_overlap_indxs)
def test_find_all_polygon_bboxes_overlapping_query_bbox( polygons_and_gt_bboxes: Tuple[List[np.ndarray], List[np.ndarray]]) -> None: """Test for correctness of finding polygons which overlap with the query bbox.""" poly_bboxes = np.array( [compute_point_cloud_bbox(poly) for poly in polygons_and_gt_bboxes[0]]) query_bbox = np.array([-1.5, 0.5, 1.5, 1.5]) overlap_indxs = find_all_polygon_bboxes_overlapping_query_bbox( poly_bboxes, query_bbox) gt_overlap_bool = np.array([True, True, False, True, True]) gt_overlap_indxs = np.where(gt_overlap_bool)[0] assert np.allclose(overlap_indxs, gt_overlap_indxs)
def cuboid_to_2d_frustum_bbox(corners: np.ndarray, planes: List[np.ndarray], K: np.ndarray) -> Optional[np.ndarray]: """Convert a 3D cuboid to a 2D frustum bounding box. We bring the 3D points into each camera, and do the clipping there. Args: corners: The corners to use as the corners of the frustum bounding box planes: List of 4-tuples for ax + by + cz = d representing planes in Hessian Normal Form K: 3x3 camera intrinsic matrix Returns: bbox_2d: Numpy array of shape (4,) with entries [x_min,y_min,x_max,y_max] """ def clip_line_segment(pt_a: np.ndarray, pt_b: np.ndarray, K: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: """Clip a line segment based on two points and the camera instrinc matrix. Args: pt_a: One 3D point vector constraining a line segment pt_b: One 3D point vector constraining a line segment K: A 3x3 array representing a camera intrinsic matrix Returns: a, b: A tuple of the clipped line segment 3D point vectors """ pt_a = K.dot(pt_a) pt_a /= pt_a[2] pt_b = K.dot(pt_b) pt_b /= pt_b[2] return np.round(pt_a).astype(np.int32), np.round(pt_b).astype(np.int32) def clip_rect(selected_corners: np.ndarray, clipped_uv_verts: np.ndarray) -> np.ndarray: """Clip a rectangle based on the selected corners and clipped vertices coordinates. Args: selected_corners: A list of selected corners clipped_uv_verts: A list of clipped vertices Returns: A new list of clipped vertices based on the selected corners """ prev = selected_corners[-1] for corner in selected_corners: # interpolate line segments to the image border clip_prev, clip_corner = clip_segment_v3_plane_n( copy.deepcopy(prev), copy.deepcopy(corner), copy.deepcopy(planes)) prev = corner if clip_prev is None or clip_corner is None: continue a, b = clip_line_segment(clip_prev, clip_corner, K) clipped_uv_verts = np.vstack( [clipped_uv_verts, a[:2].reshape(-1, 2)]) clipped_uv_verts = np.vstack( [clipped_uv_verts, b[:2].reshape(-1, 2)]) return clipped_uv_verts clipped_uv_verts = np.zeros((0, 2)) # Draw the sides for i in range(4): corner_f = corners[i] # front corner corner_b = corners[i + 4] # back corner clip_c_f, clip_c_b = clip_segment_v3_plane_n(corner_f, corner_b, planes) if clip_c_f is None or clip_c_b is None: continue a, b = clip_line_segment(clip_c_f, clip_c_b, K) clipped_uv_verts = np.vstack([clipped_uv_verts, a[:2].reshape(-1, 2)]) clipped_uv_verts = np.vstack([clipped_uv_verts, b[:2].reshape(-1, 2)]) # Draw front (first 4 corners) and rear (last 4 corners) rectangles(3d)/lines(2d) front_verts = clip_rect(corners[:4], clipped_uv_verts) back_verts = clip_rect(corners[4:], clipped_uv_verts) clipped_uv_verts = np.vstack( [clipped_uv_verts, front_verts.reshape(-1, 2)]) clipped_uv_verts = np.vstack([clipped_uv_verts, back_verts.reshape(-1, 2)]) if clipped_uv_verts.shape[0] == 0: return None bbox_2d = compute_point_cloud_bbox(clipped_uv_verts) return bbox_2d
def test_compute_point_cloud_bbox_2d(point_cloud: np.ndarray, gt_bbox: np.ndarray) -> None: """Test for bounding box from pointcloud functionality.""" assert np.allclose(compute_point_cloud_bbox(point_cloud), gt_bbox)
def create_lanes_xml( nusc_map: NuScenesMap, root: ET.Element, data: Dict[str, Iterable[Any]], filename: str, argo_dir: str, lane_dict: Dict[str, Iterable[int]], poly_dict: Dict[str, np.ndarray], ) -> None: """ Fill up the xml map file with lane centelines. Also create the supporting files halluc_bbox_table.npy and tableidx_to_laneid_map.json """ # Id to assign to lanes in the new xml map file. We arbitrarily start with 8000000. # We make up new lane_ids since the original one's are non numerical global_way_id = 8000000 # Map that links new lane_id in the xml to its original token in the json. way_to_lane_id = {} # map lane segment IDs to their index in the table tableidx_to_laneid_map = {} # array that holds xmin,ymin,xmax,ymax for each coord halluc_bbox_table = [] table_idx_counter = 0 ## Iterate over the lanes to create the required xml and supporting files for way in data["lane"] + data["lane_connector"]: node = ET.SubElement(root, "way") if way["token"] not in way_to_lane_id: way_to_lane_id[way["token"]] = global_way_id global_way_id += 1 curr_id = way_to_lane_id[way["token"]] node.set("lane_id", str(curr_id)) traffic = ET.SubElement(node, "tag") traffic.set("k", "has_traffic_control") traffic.set("v", DEFAULT_TRAFFIC_CONTROL) turn = ET.SubElement(node, "tag") turn.set("k", "turn_direction") turn.set("v", DEFAULT_TURN_DIRECTION) intersection = ET.SubElement(node, "tag") intersection.set("k", "is_intersection") intersection.set("v", DEFAULT_IS_INTERSECTION) ln = ET.SubElement(node, "tag") ln.set("k", "l_neighbor_id") ln.set("v", DEFAULT_L_NEIGHBOR) rn = ET.SubElement(node, "tag") rn.set("k", "r_neighbor_id") rn.set("v", DEFAULT_R_NEIGHBOR) for waypoint in lane_dict[way["token"]]: nd = ET.SubElement(node, "nd") nd.set("ref", str(waypoint)) predecessors = nusc_map.get_incoming_lane_ids(way["token"]) successors = nusc_map.get_outgoing_lane_ids(way["token"]) for pred_id in predecessors: pre = ET.SubElement(node, "tag") pre.set("k", "predecessor") if pred_id not in way_to_lane_id: way_to_lane_id[pred_id] = global_way_id global_way_id += 1 int_pred_id = way_to_lane_id[pred_id] pre.set("v", str(int_pred_id)) for succ_id in successors: succ = ET.SubElement(node, "tag") succ.set("k", "successor") if succ_id not in way_to_lane_id: way_to_lane_id[succ_id] = global_way_id global_way_id += 1 int_succ_id = way_to_lane_id[succ_id] succ.set("v", str(int_succ_id)) lane_id = way_to_lane_id[way["token"]] tableidx_to_laneid_map[table_idx_counter] = lane_id table_idx_counter += 1 xmin, ymin, xmax, ymax = compute_point_cloud_bbox( poly_dict[way["polygon_token"]]) halluc_bbox_table += [(xmin, ymin, xmax, ymax)] halluc_bbox_table = np.array(halluc_bbox_table) halluc_bbox_dict = { "tableidx_to_laneid_map": tableidx_to_laneid_map, "halluc_bbox_table": halluc_bbox_table, } np.save( f"{argo_dir}/{filename_to_id[filename]}_halluc_bbox_table.npy", halluc_bbox_table, ) with open( f"{argo_dir}/{filename_to_id[filename]}_tableidx_to_laneid_map.json", "w") as outfile: json.dump(tableidx_to_laneid_map, outfile) tree = ET.ElementTree(root) with open( f"{argo_dir}/pruned_nuscenes_{filename_to_id[filename]}_vector_map.xml", "wb") as files: tree.write(files)
def main(data_dir): """ """ fnames = glob.glob(f"{data_dir}/*.csv") fnames = [Path(fname).name for fname in fnames] am = ArgoverseMap() city_graph_dict = build_city_lane_graphs(am) for fname in fnames: # # very hard cases # if int(Path(fname).stem) not in [ # 166633, 150381,11905, 136010, 49854, 27155]: # continue # # hard cases -- , # [174545,119781, 210709, 139445, 11381, 175883, 122703, 166633]: #23333,,124414]: # csv_fpath = f"{data_dir}/{fname}" traj, city_name = get_traj_and_city_name_from_csv(csv_fpath) plausible_start_ids = set() lane_vote_dict = defaultdict(int) for j, pt in enumerate(traj): contained_ids = am.get_lane_segments_containing_xy( pt[0], pt[1], city_name) for id in contained_ids: lane_vote_dict[id] += 1 plausible_start_ids.add(id) plausible_start_ids = list(plausible_start_ids) plausible_start_ids.sort() paths = [] # START BFS IN ANY PLAUSIBLE LANE ID! for start_id in plausible_start_ids: paths.extend( find_all_paths_from_src(city_graph_dict[city_name], str(start_id), max_depth=DFS_MAX_DEPTH)) paths = convert_str_lists_to_int_lists(paths) paths = trim_paths_with_no_inliers(paths, lane_vote_dict) paths = remove_repeated_paths(paths) path_votes_dict = defaultdict(int) for path_id, path in enumerate(paths): for id in path: path_votes_dict[path_id] += lane_vote_dict[id] # find which path has most inliers best_path_ids = get_dict_key_with_max_value(path_votes_dict) # if they are all tied, take the shortest best_path_lengths = [len(paths[id]) for id in best_path_ids] min_best_path_length = min(best_path_lengths) best_path_ids = [ id for id in best_path_ids if len(paths[id]) == min_best_path_length ] fig = plt.figure(figsize=(15, 15)) plt.axis("off") ax = fig.add_subplot(111) plot_all_nearby_lanes(am, ax, city_name, np.mean(traj[:, 0]), np.mean(traj[:, 1])) colors = ["g", "b", "r", "m"] # then plot this path for best_path_id in best_path_ids: color = colors[best_path_id % len(colors)] print( "Candidate: ", paths[best_path_id], " with ", path_votes_dict[best_path_id], ) for lane_id in paths[best_path_id]: polygon_3d = am.get_lane_segment_polygon(lane_id, city_name) plot_lane_segment_patch(polygon_3d[:, :2], ax, color=color) ax.text(np.mean(polygon_3d[:, 0]), np.mean(polygon_3d[:, 1]), f"{lane_id}") # just use one for now break # draw_lane_ids(plausible_start_ids, am, ax, city_name) all_nearby_lane_ids = [] for path in paths: all_nearby_lane_ids.extend(path) draw_lane_ids(set(all_nearby_lane_ids), am, ax, city_name) draw_traj(traj, ax) xmin, ymin, xmax, ymax = compute_point_cloud_bbox(traj) WINDOW_RADIUS_MARGIN = 10 xmin -= WINDOW_RADIUS_MARGIN xmax += WINDOW_RADIUS_MARGIN ymin -= WINDOW_RADIUS_MARGIN ymax += WINDOW_RADIUS_MARGIN ax.set_xlim([xmin, xmax]) ax.set_ylim([ymin, ymax]) plt.savefig( f"/Users/johnlamb/Documents/argoverse-api/temp_files_oracle/{Path(fname).stem}.png" ) plt.close("all")