def __init__(self, dataset_root_directory : str, context_indices : int = 5, context_time : float = 1.75, prediction_time : float = 1.75, fake_length=3000): super(PoseVelocityDataset, self).__init__() self.dataset_root_directory = dataset_root_directory self.udp_data_dir = os.path.join(self.dataset_root_directory, "udp_data") self.motion_data_dir = os.path.join(self.udp_data_dir, "motion_packets") self.lap_data_dir = os.path.join(self.udp_data_dir, "lap_packets") self.cache_dir = os.path.join(dataset_root_directory, "__pose_vel_cache__") self.context_indices = context_indices self.context_time = context_time self.prediction_time = prediction_time with open(os.path.join(self.dataset_root_directory, "f1_dataset_config.yaml"), "r") as f: self.dataset_config = yaml.load(f, Loader=yaml.SafeLoader) if os.path.isdir(self.cache_dir): motion_packet_times = np.load(os.path.join(self.cache_dir, "motion_packet_times.npy")) all_positions = np.load(os.path.join(self.cache_dir, "positions.npy")) all_velocities = np.load(os.path.join(self.cache_dir, "velocities.npy")) all_quaternions = np.load(os.path.join(self.cache_dir, "quaternions.npy")) lap_packet_times = np.load(os.path.join(self.cache_dir, "lap_packet_times.npy")) result_statuses = np.load(os.path.join(self.cache_dir, "result_statuses.npy")) with open(os.path.join(self.cache_dir, "metadata.yaml"), "r") as f: metadatadict = yaml.load(f, Loader=yaml.SafeLoader) player_car_idx = metadatadict["player_car_index"] assert(np.all(motion_packet_times==motion_packet_times)) assert(np.all(all_positions==all_positions)) assert(np.all(all_velocities==all_velocities)) assert(np.all(all_quaternions==all_quaternions)) assert(np.all(lap_packet_times==lap_packet_times)) assert(np.all(result_statuses==result_statuses)) else: all_motion_packets = sorted(getAllMotionPackets(self.motion_data_dir, self.dataset_config["use_json"]), key=packetKey) all_positions = np.nan*np.zeros((len(all_motion_packets), 20, 3), dtype=np.float64) all_velocities = np.nan*np.zeros((len(all_motion_packets), 20, 3), dtype=np.float64) all_quaternions = np.nan*np.zeros((len(all_motion_packets), 20, 4), dtype=np.float64) motion_packet_times = np.empty(len(all_motion_packets), dtype=np.float64) for i in tqdm(range(len(all_motion_packets)), desc="Extracting positions, velocities, and quaternions"): packet = all_motion_packets[i] all_positions[i] = np.stack([extractPosition(packet.udp_packet, car_index=j) for j in range(20)]) all_velocities[i] = np.stack([extractVelocity(packet.udp_packet, car_index=j) for j in range(20)]) all_quaternions[i] = np.stack([extractRotation(packet.udp_packet, car_index=j) for j in range(20)]) motion_packet_times[i] = packetKey(packet) assert(np.all(all_positions==all_positions)) assert(np.all(all_velocities==all_velocities)) assert(np.all(all_quaternions==all_quaternions)) all_lap_packets = sorted(getAllLapDataPackets(self.lap_data_dir, self.dataset_config["use_json"]), key=packetKey) lap_packet_times = np.asarray([packetKey(all_lap_packets[i]) for i in range(len(all_lap_packets))], dtype=np.float64) result_statuses = np.zeros((len(all_lap_packets), 20), dtype=np.int32) for i in tqdm(range(len(all_lap_packets)), desc="Extracting lap information"): lap_packet = all_lap_packets[i] result_statuses[i] = np.asarray([lap_packet.udp_packet.m_lapData[j].m_resultStatus for j in range(20)], dtype=np.int32) assert(np.all(lap_packet_times==lap_packet_times)) assert(np.all(result_statuses==result_statuses)) os.makedirs(self.cache_dir) player_car_idx = all_motion_packets[0].udp_packet.m_header.m_playerCarIndex with open(os.path.join(self.cache_dir, "metadata.yaml"), "w") as f: yaml.dump({"player_car_index" : player_car_idx}, f, Dumper=yaml.SafeDumper) np.save(os.path.join(self.cache_dir, "motion_packet_times.npy"), motion_packet_times) np.save(os.path.join(self.cache_dir, "positions.npy"), all_positions) np.save(os.path.join(self.cache_dir, "velocities.npy"), all_velocities) np.save(os.path.join(self.cache_dir, "quaternions.npy"), all_quaternions) np.save(os.path.join(self.cache_dir, "lap_packet_times.npy"), lap_packet_times) np.save(os.path.join(self.cache_dir, "result_statuses.npy"), result_statuses) self.position_splines : List[BSpline] = [make_interp_spline(motion_packet_times, all_positions[:,i]) for i in range(all_positions.shape[1])] self.velocity_splines : List[BSpline] = [make_interp_spline(motion_packet_times, all_velocities[:,i]) for i in range(all_velocities.shape[1])] self.quaternion_splines : List[RotSpline] = [RotSpline(motion_packet_times, Rot.from_quat(all_quaternions[:,i])) for i in range(all_quaternions.shape[1])] # Iclip = (motion_packet_times>(lap_packet_times[0] + 10.0))*(motion_packet_times<(lap_packet_times[-1] - 10.0)) Iclip = (motion_packet_times>(lap_packet_times[0] + context_time + 0.5))*(motion_packet_times<(lap_packet_times[-1] - prediction_time - 0.5)) self.all_positions=all_positions[Iclip] self.all_velocities=all_velocities[Iclip] self.all_quaternions=all_quaternions[Iclip] self.motion_packet_times=motion_packet_times[Iclip] assert(self.motion_packet_times.shape[0] == self.all_positions.shape[0] == self.all_velocities.shape[0] == self.all_quaternions.shape[0]) self.time_dist = dist.Uniform(self.motion_packet_times[0], self.motion_packet_times[-1]) self.fake_length = fake_length self.lap_packet_times=lap_packet_times self.result_statuses=result_statuses self.player_car_idx=player_car_idx self.lap_index : TimeIndex = TimeIndex(self.lap_packet_times, self.result_statuses)
spectating = any(spectating_flags) car_indices = [int(packet.udp_packet.m_spectatorCarIndex) for packet in session_packets] car_indices_set = set(car_indices) print(car_indices_set) print(car_indices) if spectating: if len(car_indices_set)>1: raise ValueError("Spectated datasets are only supported if you only spectate 1 car the entire time.") else: car_index = car_indices[0] else: car_index = None image_tags = getAllImageFilePackets(image_folder, use_json) motion_packets = getAllMotionPackets(motion_data_folder, use_json) motion_packets = sorted(motion_packets, key=deepracing.timestampedUdpPacketKey) session_times = np.array([packet.udp_packet.m_header.m_sessionTime for packet in motion_packets]) system_times = np.array([packet.timestamp/1000.0 for packet in motion_packets]) print(system_times) print(session_times) maxudptime = system_times[-1] image_tags = [ tag for tag in image_tags if tag.timestamp/1000.0<(maxudptime) ] image_tags = sorted(image_tags, key = imageDataKey) image_timestamps = np.array([data.timestamp/1000.0 for data in image_tags]) first_image_time = image_timestamps[0] print(first_image_time) Imin = system_times>(first_image_time + 1.0) firstIndex = np.argmax(Imin)
results_dir = os.path.join(db_path, "results") bezier_curve_path = os.path.join(udp_path, "bezier_curves") motion_packet_path = os.path.join(udp_path, "motion_packets") dset_config_file = os.path.join(db_path, "f1_dataset_config.yaml") with open(dset_config_file, "r") as f: dset_config = yaml.load(f, Loader=yaml.SafeLoader) use_json = dset_config["use_json"] if os.path.isdir(results_dir): shutil.rmtree(results_dir) time.sleep(0.25) os.makedirs(results_dir) motion_packets = sorted([ packet.udp_packet for packet in proto_utils.getAllMotionPackets(motion_packet_path, use_json) ], key=motionPacketKey) tmin = motion_packets[0].m_header.m_sessionTime + 5.0 tmax = motion_packets[-1].m_header.m_sessionTime - 5.0 bcurves = [ bc for bc in proto_utils.getAllBezierCurves(bezier_curve_path, use_json) if (bc.m_sessionTime < tmax and bc.m_sessionTime > tmin) ] poses = [proto_utils.extractPose(packet) for packet in motion_packets] positions = np.array([pose[0] for pose in poses]) quaternions = np.array([pose[1] for pose in poses]) wnegative = quaternions[:, 3] < 0 quaternions[wnegative] *= -1.0 rotations = Rot.from_quat(quaternions)