def test_save_pickle_from_disk() -> None: """Save a dictionary to a pickle file. The file should contain the same Python dictionary as above: {'a': 1, 'b':'2', 'c':[9,8,7,6,5,'d','c','b','a'], 'd': np.array([True,False,True]) } """ pkl_fpath = _TEST_DIR / "test_data/pkl_test_file.pkl" intended_dict = { "a": 1, "b": "2", "c": [9, 8, 7, 6, 5, "d", "c", "b", "a"], "d": np.array([True, False, True]) } save_pkl_dictionary(pkl_fpath, intended_dict) with open(pkl_fpath, "rb") as f: loaded_pkl_dict = pkl.load(f) dictionaries_are_equal(intended_dict, loaded_pkl_dict)
def __init__( self, dataset_dir: str, labels_dir: str, experiment_prefix: str, bboxes_3d: bool = False, save: bool = True, ) -> None: """Initialize PerFrameLabelAccumulator object for use with tracking benchmark data. Args: dataset_dir (str): Dataset directory. labels_dir (str): Labels directory. experiment_prefix (str): Prefix for experimint to use. bboxes_3d (bool, optional): to use 3d bounding boxes (True) or 2d bounding boxes (False). """ self.bboxes_3d = bboxes_3d self.dataset_dir = dataset_dir self.labels_dir = labels_dir tmp_dir = tempfile.gettempdir() per_city_traj_dict_fpath = f"{tmp_dir}/per_city_traj_dict_{experiment_prefix}.pkl" log_egopose_dict_fpath = f"{tmp_dir}/log_egopose_dict_{experiment_prefix}.pkl" log_timestamp_dict_fpath = f"{tmp_dir}/log_timestamp_dict_{experiment_prefix}.pkl" # coordinate system is the map world frame self.per_city_traj_dict: Dict[str, List[Tuple[np.ndarray, str]]] = { "MIA": [], "PIT": [], } # all the trajectories for these 2 cities self.log_egopose_dict: Dict[str, Dict[int, Dict[str, np.ndarray]]] = {} self.log_timestamp_dict: Dict[str, Dict[int, List[FrameRecord]]] = {} self.sdb = SynchronizationDB(self.dataset_dir) if save: self.accumulate_per_log_data() save_pkl_dictionary(per_city_traj_dict_fpath, self.per_city_traj_dict) save_pkl_dictionary(log_egopose_dict_fpath, self.log_egopose_dict) save_pkl_dictionary(log_timestamp_dict_fpath, self.log_timestamp_dict)
def make_att_files(root_dir: str) -> None: """Write a .pkl file with difficulty attributes per track""" path_output_vis = "vis_output" filename_output = "att_file.npy" if not os.path.exists(path_output_vis): os.mkdir(path_output_vis) list_folders = ["test"] list_name_class = ["VEHICLE", "PEDESTRIAN"] count_track = 0 dict_att_all: Dict[str, Any] = {} for name_folder in list_folders: dict_att_all[name_folder] = {} list_log_folders = glob.glob(os.path.join(root_dir, name_folder, "*")) for ind_log, path_log in enumerate(list_log_folders): id_log = f"{Path(path_log).name}" print("%s %s %d/%d" % (name_folder, id_log, ind_log, len(list_log_folders))) if check_track_label_folder: list_path_label_persweep = glob.glob( os.path.join(path_log, "per_sweep_annotations_amodal", "*")) list_path_label_persweep.sort() dict_track_labels: Dict[str, Any] = {} for path_label_persweep in list_path_label_persweep: data = read_json_file(path_label_persweep) for data_obj in data: id_obj = data_obj["track_label_uuid"] if id_obj not in dict_track_labels.keys(): dict_track_labels[id_obj] = [] dict_track_labels[id_obj].append(data_obj) data_amodal: Dict[str, Any] = {} for key in dict_track_labels.keys(): dict_amodal: Dict[str, Any] = {} data_amodal[key] = dict_amodal data_amodal[key]["label_class"] = dict_track_labels[key][ 0]["label_class"] data_amodal[key]["uuid"] = dict_track_labels[key][0][ "track_label_uuid"] data_amodal[key]["log_id"] = id_log data_amodal[key]["track_label_frames"] = dict_track_labels[ key] argoverse_loader = ArgoverseTrackingLoader( os.path.join(root_dir, name_folder)) data_log = argoverse_loader.get(id_log) list_lidar_timestamp = data_log.lidar_timestamp_list dict_tracks: Dict[str, Any] = {} for id_track in data_amodal.keys(): data = data_amodal[id_track] if data["label_class"] not in list_name_class: continue data_per_frame = data["track_label_frames"] dict_per_track: Dict[str, Any] = {} dict_tracks[id_track] = dict_per_track dict_tracks[id_track]["ind_lidar_min"] = -1 dict_tracks[id_track]["ind_lidar_max"] = -1 length_log = len(list_lidar_timestamp) dict_tracks[id_track]["list_city_se3"] = [None] * length_log dict_tracks[id_track]["list_bbox"] = [None] * length_log count_track += 1 dict_tracks[id_track]["list_center"] = np.full([length_log, 3], np.nan) dict_tracks[id_track]["list_center_w"] = np.full( [length_log, 3], np.nan) dict_tracks[id_track]["list_dist"] = np.full([length_log], np.nan) dict_tracks[id_track]["exists"] = np.full([length_log], False) for box in data_per_frame: if box["timestamp"] in list_lidar_timestamp: ind_lidar = list_lidar_timestamp.index( box["timestamp"]) else: continue if dict_tracks[id_track]["ind_lidar_min"] == -1: dict_tracks[id_track]["ind_lidar_min"] = ind_lidar dict_tracks[id_track]["ind_lidar_max"] = max( ind_lidar, dict_tracks[id_track]["ind_lidar_max"]) center = np.array([ box["center"]["x"], box["center"]["y"], box["center"]["z"] ]) city_SE3_egovehicle = argoverse_loader.get_pose( ind_lidar, id_log) if city_SE3_egovehicle is None: print("Pose not found!") continue center_w = city_SE3_egovehicle.transform_point_cloud( center[np.newaxis, :])[0] dict_tracks[id_track]["list_center"][ind_lidar] = center dict_tracks[id_track]["list_center_w"][ ind_lidar] = center_w dict_tracks[id_track]["list_dist"][ ind_lidar] = np.linalg.norm(center[0:2]) dict_tracks[id_track]["exists"][ind_lidar] = True dict_tracks[id_track]["list_city_se3"][ ind_lidar] = city_SE3_egovehicle dict_tracks[id_track]["list_bbox"][ind_lidar] = box length_track = dict_tracks[id_track][ "ind_lidar_max"] - dict_tracks[id_track][ "ind_lidar_min"] + 1 assert not (dict_tracks[id_track]["ind_lidar_max"] == -1 and dict_tracks[id_track]["ind_lidar_min"] == -1), "zero-length track" dict_tracks[id_track]["length_track"] = length_track ( dict_tracks[id_track]["list_vel"], dict_tracks[id_track]["list_acc"], ) = compute_v_a(dict_tracks[id_track]["list_center_w"]) dict_tracks[id_track]["num_missing"] = ( dict_tracks[id_track]["length_track"] - dict_tracks[id_track]["exists"].sum()) dict_tracks[id_track]["difficult_att"] = [] # get scalar velocity per timestamp as 2-norm of (vx, vy) vel_abs = np.linalg.norm( dict_tracks[id_track]["list_vel"][:, 0:2], axis=1) acc_abs = np.linalg.norm( dict_tracks[id_track]["list_acc"][:, 0:2], axis=1) ind_valid = np.nonzero( 1 - np.isnan(dict_tracks[id_track]["list_dist"]))[0] ind_close = np.nonzero(dict_tracks[id_track]["list_dist"] [ind_valid] < NEAR_DISTANCE_THRESH)[0] if len(ind_close) > 0: ind_close_max = ind_close.max() + 1 ind_close_min = ind_close.min() # Only compute "fast" and "occluded" tags for near objects # The thresholds are not very meaningful for faraway objects, since they are usually pretty short. if dict_tracks[id_track]["list_dist"][ind_valid].min( ) > NEAR_DISTANCE_THRESH: dict_tracks[id_track]["difficult_att"].append("far") else: is_short_len_track1 = dict_tracks[id_track][ "length_track"] < SHORT_TRACK_LENGTH_THRESH is_short_len_track2 = dict_tracks[id_track]["exists"].sum( ) < SHORT_TRACK_COUNT_THRESH if is_short_len_track1 or is_short_len_track2: dict_tracks[id_track]["difficult_att"].append("short") else: if (ind_close_max - ind_close_min) - dict_tracks[id_track][ "exists"][ind_close_min:ind_close_max].sum( ) > MAX_OCCLUSION_PCT: dict_tracks[id_track]["difficult_att"].append( "occ") if np.quantile(vel_abs[ind_valid][ind_close], 0.9) > FAST_TRACK_THRESH: dict_tracks[id_track]["difficult_att"].append( "fast") if len(dict_tracks[id_track]["difficult_att"]) == 0: dict_tracks[id_track]["difficult_att"].append("easy") if visualize: for ind_lidar, timestamp_lidar in enumerate( list_lidar_timestamp): list_bboxes = [] list_difficulty_att = [] for id_track in dict_tracks.keys(): if dict_tracks[id_track]["exists"][ind_lidar]: list_bboxes.append( dict_tracks[id_track]["list_bbox"][ind_lidar]) list_difficulty_att.append( dict_tracks[id_track]["difficult_att"]) path_lidar = os.path.join(path_log, "lidar", "PC_%s.ply" % timestamp_lidar) pc = np.asarray(o3d.io.read_point_cloud(path_lidar).points) list_lidar_timestamp = data_log.lidar_timestamp_list save_bev_img( path_output_vis, list_bboxes, list_difficulty_att, "argoverse_%s" % name_folder, id_log, timestamp_lidar, pc, ) for id_track in dict_tracks.keys(): list_key = list(dict_tracks[id_track].keys()).copy() for key in list_key: if key != "difficult_att": del dict_tracks[id_track][key] dict_att_all[name_folder][id_log] = dict_tracks save_pkl_dictionary(filename_output, dict_att_all)