def create_gt_parts(self, root=None): if root is None: root = Path(self.root_path) for idx, subset, lidar, label, log in iter(self): save_path = root / subset / log.current_log / 'gt_parts' save_path.mkdir(parents=True, exist_ok=True) gt_boxes = np.zeros((len(label), 7)) for i, obj in enumerate(label): loc = obj.translation quat = obj.quaternion dim = (obj.width, obj.length, obj.height) rot = R.from_quat(quat).as_euler('zyx') gt_boxes[i] = np.hstack((loc, dim, rot[0])) point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(lidar[:, :3]), torch.from_numpy(gt_boxes)).numpy() for i, obj in enumerate(label): filename = save_path / '{}_{}_{}.bin'.format( idx, obj.label_class, obj.track_id) gt_points = lidar[point_indices[i] > 0] if len(gt_points) >= 10: gt_points -= gt_points.mean(axis=0) with open(filename, 'wb') as f: gt_points.tofile(f)
def create_groundtruth_database(self, info_path=None, used_classes=None, split='train'): import torch database_save_path = Path(self.root_path) / ('gt_database' if split == 'train' else ('gt_database_%s' % split)) db_info_save_path = Path(self.root_path) / ('kitti_dbinfos_%s.pkl' % split) database_save_path.mkdir(parents=True, exist_ok=True) all_db_infos = {} with open(info_path, 'rb') as f: infos = pickle.load(f) for k in range(len(infos)): print('gt_database sample: %d/%d' % (k + 1, len(infos))) info = infos[k] sample_idx = info['point_cloud']['lidar_idx'] points = self.get_lidar(sample_idx) annos = info['annos'] names = annos['name'] difficulty = annos['difficulty'] bbox = annos['bbox'] gt_boxes = annos['gt_boxes_lidar'] num_obj = gt_boxes.shape[0] point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(points[:, 0:3]), torch.from_numpy(gt_boxes) ).numpy() # (nboxes, npoints) for i in range(num_obj): filename = '%s_%s_%d.bin' % (sample_idx, names[i], i) filepath = database_save_path / filename gt_points = points[point_indices[i] > 0] gt_points[:, :3] -= gt_boxes[i, :3] with open(filepath, 'w') as f: gt_points.tofile(f) if (used_classes is None) or names[i] in used_classes: db_path = str(filepath.relative_to(self.root_path)) # gt_database/xxxxx.bin db_info = {'name': names[i], 'path': db_path, 'image_idx': sample_idx, 'gt_idx': i, 'box3d_lidar': gt_boxes[i], 'num_points_in_gt': gt_points.shape[0], 'difficulty': difficulty[i], 'bbox': bbox[i], 'score': annos['score'][i]} if names[i] in all_db_infos: all_db_infos[names[i]].append(db_info) else: all_db_infos[names[i]] = [db_info] for k, v in all_db_infos.items(): print('Database %s: %d' % (k, len(v))) with open(db_info_save_path, 'wb') as f: pickle.dump(all_db_infos, f)
def create_groundtruth_database(self, info_path=None, used_classes=None, split='train'): """ Construct a database of ground truths including their points. This is useful later for data augmentation. The result is a kitti_dbinfos_train.pkl that contains info about those gts, as well as a dir gt_database containing xxxxx.bin, where each bin file contains the points of each gt object. The dbinfo is of the format Car [car0, car1, ...] Cyclist [cyclist0, cyclist1, ...] Pedestrian [p0, p1, ...] where each object is a dict containing its metadata and paths to its lidar points. :param info_path: :param used_classes: the classes of objects that will be written to the dbinfo :param split: :return: """ # the directory to save each gt's points database_save_path = Path( self.root_path) / ('gt_database' if split == 'train' else ('gt_database_%s' % split)) # the pkl db info containing metadata about each gts db_info_save_path = Path( self.root_path) / ('kitti_dbinfos_%s.pkl' % split) database_save_path.mkdir(parents=True, exist_ok=True) all_db_infos = {} with open(info_path, 'rb') as f: infos = pickle.load(f) for k in range(len(infos)): print('gt_database sample: %d/%d' % (k + 1, len(infos))) info = infos[k] sample_idx = info['point_cloud']['lidar_idx'] points = self.get_lidar(sample_idx) annos = info['annos'] names = annos['name'] difficulty = annos['difficulty'] bbox = annos['bbox'] gt_boxes = annos['gt_boxes_lidar'] num_obj = gt_boxes.shape[0] # point_indices[i] is a mask that finds all the points that belong to an object point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(points[:, 0:3]), torch.from_numpy(gt_boxes)).numpy() # (nboxes, npoints) for i in range(num_obj): filename = '%s_%s_%d.bin' % (sample_idx, names[i], i) filepath = database_save_path / filename gt_points = points[point_indices[i] > 0] # move the points to the center origin gt_points[:, :3] -= gt_boxes[i, :3] with open(filepath, 'w') as f: gt_points.tofile(f) if (used_classes is None) or names[i] in used_classes: db_path = str(filepath.relative_to( self.root_path)) # gt_database/xxxxx.bin db_info = { 'name': names[i], 'path': db_path, 'image_idx': sample_idx, 'gt_idx': i, 'box3d_lidar': gt_boxes[i], 'num_points_in_gt': gt_points.shape[0], 'difficulty': difficulty[i], 'bbox': bbox[i], 'score': annos['score'][i] } if names[i] in all_db_infos: all_db_infos[names[i]].append(db_info) else: all_db_infos[names[i]] = [db_info] # find out the number of appearance per object type for k, v in all_db_infos.items(): print('Database %s: %d' % (k, len(v))) # write the db to disk with open(db_info_save_path, 'wb') as f: pickle.dump(all_db_infos, f)
def lidar_callback(msg): t1 = time.time() msg_cloud = ros_numpy.point_cloud2.pointcloud2_to_array(msg) frame_id = msg.header.frame_id np_p = get_xyz_points(msg_cloud, True) print(msg.header.stamp.to_sec()) print(rospy.Time.now().to_sec()) print(np_p.size) scores, dt_box_lidar, types = proc_1.run(np_p) bbox_corners3d = boxes_to_corners_3d(dt_box_lidar) empty_markers = MarkerArray() clear_marker = Marker() clear_marker.header.stamp = rospy.Time.now() clear_marker.header.frame_id = frame_id clear_marker.ns = "objects" clear_marker.id = 0 clear_marker.action = clear_marker.DELETEALL clear_marker.lifetime = rospy.Duration() empty_markers.markers.append(clear_marker) pub_bbox_array.publish(empty_markers) bbox_arry = MarkerArray() if scores.size != 0: for i in range(scores.size): point_list = [] bbox = Marker() bbox.type = Marker.LINE_LIST bbox.ns = "objects" bbox.id = i box = bbox_corners3d[i] q = yaw2quaternion(float(dt_box_lidar[i][6])) for j in range(24): p = Point() point_list.append(p) point_list[0].x = float(box[0, 0]) point_list[0].y = float(box[0, 1]) point_list[0].z = float(box[0, 2]) point_list[1].x = float(box[1, 0]) point_list[1].y = float(box[1, 1]) point_list[1].z = float(box[1, 2]) point_list[2].x = float(box[1, 0]) point_list[2].y = float(box[1, 1]) point_list[2].z = float(box[1, 2]) point_list[3].x = float(box[2, 0]) point_list[3].y = float(box[2, 1]) point_list[3].z = float(box[2, 2]) point_list[4].x = float(box[2, 0]) point_list[4].y = float(box[2, 1]) point_list[4].z = float(box[2, 2]) point_list[5].x = float(box[3, 0]) point_list[5].y = float(box[3, 1]) point_list[5].z = float(box[3, 2]) point_list[6].x = float(box[3, 0]) point_list[6].y = float(box[3, 1]) point_list[6].z = float(box[3, 2]) point_list[7].x = float(box[0, 0]) point_list[7].y = float(box[0, 1]) point_list[7].z = float(box[0, 2]) point_list[8].x = float(box[4, 0]) point_list[8].y = float(box[4, 1]) point_list[8].z = float(box[4, 2]) point_list[9].x = float(box[5, 0]) point_list[9].y = float(box[5, 1]) point_list[9].z = float(box[5, 2]) point_list[10].x = float(box[5, 0]) point_list[10].y = float(box[5, 1]) point_list[10].z = float(box[5, 2]) point_list[11].x = float(box[6, 0]) point_list[11].y = float(box[6, 1]) point_list[11].z = float(box[6, 2]) point_list[12].x = float(box[6, 0]) point_list[12].y = float(box[6, 1]) point_list[12].z = float(box[6, 2]) point_list[13].x = float(box[7, 0]) point_list[13].y = float(box[7, 1]) point_list[13].z = float(box[7, 2]) point_list[14].x = float(box[7, 0]) point_list[14].y = float(box[7, 1]) point_list[14].z = float(box[7, 2]) point_list[15].x = float(box[4, 0]) point_list[15].y = float(box[4, 1]) point_list[15].z = float(box[4, 2]) point_list[16].x = float(box[0, 0]) point_list[16].y = float(box[0, 1]) point_list[16].z = float(box[0, 2]) point_list[17].x = float(box[4, 0]) point_list[17].y = float(box[4, 1]) point_list[17].z = float(box[4, 2]) point_list[18].x = float(box[1, 0]) point_list[18].y = float(box[1, 1]) point_list[18].z = float(box[1, 2]) point_list[19].x = float(box[5, 0]) point_list[19].y = float(box[5, 1]) point_list[19].z = float(box[5, 2]) point_list[20].x = float(box[2, 0]) point_list[20].y = float(box[2, 1]) point_list[20].z = float(box[2, 2]) point_list[21].x = float(box[6, 0]) point_list[21].y = float(box[6, 1]) point_list[21].z = float(box[6, 2]) point_list[22].x = float(box[3, 0]) point_list[22].y = float(box[3, 1]) point_list[22].z = float(box[3, 2]) point_list[23].x = float(box[7, 0]) point_list[23].y = float(box[7, 1]) point_list[23].z = float(box[7, 2]) for j in range(24): bbox.points.append(point_list[j]) bbox.scale.x = 0.1 bbox.color.a = 1.0 bbox.color.r = 1.0 bbox.color.g = 0.0 bbox.color.b = 0.0 #bbox.header.stamp = rospy.Time.now() bbox.header.stamp = msg.header.stamp bbox.header.frame_id = frame_id bbox_arry.markers.append(bbox) # add text text_show = Marker() text_show.type = Marker.TEXT_VIEW_FACING text_show.ns = "objects" text_show.header.stamp = msg.header.stamp text_show.header.frame_id = frame_id text_show.id = i + scores.size text_show.pose = Pose(position=Point( float(dt_box_lidar[i][0]), float(dt_box_lidar[i][1]), float(dt_box_lidar[i][2]) + 2.0)) text_show.text = str( proc_1.net.class_names[types[i] - 1]) + ' ' + str( round(scores[i], 2)) text_show.action = Marker.ADD text_show.color.a = 1.0 text_show.color.r = 1.0 text_show.color.g = 1.0 text_show.color.b = 0.0 text_show.scale.z = 1.5 bbox_arry.markers.append(text_show) print("total callback time: ", time.time() - t1) pub_bbox_array.publish(bbox_arry) cloud_array = xyz_array_to_pointcloud2(np_p[:, :3], rospy.Time.now(), frame_id) pub_point2_.publish(cloud_array) ## publish to fusion msg_lidar_objects = ObjectArray() msg_lidar_objects.header.stamp = msg.header.stamp msg_lidar_objects.header.frame_id = frame_id # get points in each box t3 = time.time() num_obj = dt_box_lidar.shape[0] point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(np_p[:, 0:3]), torch.from_numpy(dt_box_lidar)).numpy() # (nboxes, npoints) t4 = time.time() print(f"get points in each box cost time: {t4 - t3}") print('') #clustered_points = [] #for test if scores.size != 0: for i in range(scores.size): lidar_object = Object() lidar_object.header.stamp = msg.header.stamp lidar_object.header.frame_id = frame_id lidar_object.id = i lidar_object.pose.position.x = float(dt_box_lidar[i][0]) lidar_object.pose.position.y = float(dt_box_lidar[i][1]) lidar_object.pose.position.z = float(dt_box_lidar[i][2]) lidar_object.pose.orientation = yaw2quaternion( float(dt_box_lidar[i][6])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][3])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][4])) lidar_object.shape.dimensions.append(float(dt_box_lidar[i][5])) lidar_object.classification = types[i] gt_points = np_p[point_indices[i] > 0] for pp in range(gt_points.shape[0]): temp_point = Point32() temp_point.x = gt_points[pp][0] temp_point.y = gt_points[pp][1] temp_point.z = gt_points[pp][2] lidar_object.polygon.points.append(temp_point) #clustered_points.append(gt_points[pp,:].tolist()) msg_lidar_objects.objects.append(lidar_object) # clustered_points = np.array(clustered_points) # cloud_array = xyz_array_to_pointcloud2(clustered_points[:, :3], rospy.Time.now(), frame_id) # pub_point2_.publish(cloud_array) pub_object_array.publish(msg_lidar_objects)
def create_groundtruth_database(root_path=None, used_classes=None): database_save_path = Path(root_path) / 'gt_database' db_info_save_path = Path(root_path) / 'plusai_gt_dbinfos.pkl' database_save_path.mkdir(parents=True, exist_ok=True) all_db_infos = {} index_file = Path(root_path) / 'all.txt' with open(index_file, 'r') as f: dir_names = f.read().splitlines() frame_idx = 0 for dir_name in tqdm(dir_names): root_json_file = Path(root_path) / dir_name / (dir_name + '.json') with open(root_json_file, 'r') as f: root_json = json.load(f) num_frames = len(root_json['labeling']) # num_frames = 1 # for DEBUG for k in tqdm(range(num_frames)): # Load object json file object_json_filename = root_json['labeling'][k]['filename'] object_json_file = Path(root_path) / object_json_filename with open(object_json_file) as f: object_json = json.load(f) # Get calibration calib_info = { 'car_heading': object_json['car_heading'], 'car_position': object_json['car_position'], 'device_heading': object_json['device_heading'], 'device_position': object_json['device_position'], 'pointcloud_info': object_json['pointcloud_info'], 'images': object_json['images'] } calib = Calibration(calib_info) # Get lidar point cloud lidar_path = Path( root_path) / object_json['pointcloud_info']['pointcloud_url'] lidar_pts = get_lidar(lidar_path) lidar_pts_car_coord = calib.lidar2car(lidar_pts) # Get ground truth annotations gt_names = [] gt_boxes = [] for annos in root_json['labeling'][k]['annotations_3d']: obj = Object3D(annos) # Distinguish the categories of different size of Cars by length if obj.l < 5.0: obj.type = 'Car' elif obj.l < 8.0: obj.type = 'Truck' else: obj.type = 'Tram' gt_names.append(obj.type) # Calculate location in vehicle coordinate location_world = np.array([obj.t], dtype=np.float) location_car_coord = np.squeeze( calib.world2car(location_world)) dimensions = np.array([obj.l, obj.w, obj.h], dtype=np.float) rotz_car_coord = calib.get_object_rotz(obj) rotz_car_coord = np.array([rotz_car_coord], dtype=np.float) # Ground truth box: [x, y, z, l, w, h, rz] gt_box_car_coord = np.concatenate( [location_car_coord, dimensions, rotz_car_coord], axis=0) gt_boxes.append(gt_box_car_coord) gt_boxes = np.array(gt_boxes, dtype=np.float) num_obj = gt_boxes.shape[0] point_indices = roiaware_pool3d_utils.points_in_boxes_cpu( torch.from_numpy(lidar_pts_car_coord), torch.from_numpy(gt_boxes)).numpy() # (nboxes, npoints) for i in range(num_obj): filename = '%06d_%s_%d.bin' % (frame_idx, gt_names[i], i) filepath = database_save_path / filename gt_points = lidar_pts_car_coord[point_indices[i] > 0] gt_points[:, :3] -= gt_boxes[i, :3] with open(filepath, 'w') as f: gt_points.tofile(f) if (used_classes is None) or gt_names[i] in used_classes: db_path = str(filepath.relative_to( root_path)) # gt_database/xxxxx.bin db_info = { 'name': gt_names[i], 'path': db_path, 'image_idx': frame_idx, 'gt_idx': i, 'box3d_lidar': gt_boxes[i], 'num_points_in_gt': gt_points.shape[0] } if gt_names[i] in all_db_infos: all_db_infos[gt_names[i]].append(db_info) else: all_db_infos[gt_names[i]] = [db_info] frame_idx += 1 for k, v in all_db_infos.items(): print('Database %s: %d' % (k, len(v))) with open(db_info_save_path, 'wb') as f: pickle.dump(all_db_infos, f) print("Ground truth database info saved in %s." % db_info_save_path)