def calculate(self, bag_file): """ Calculate mileage """ last_pos = None last_mode = 'Unknown' mileage = collections.defaultdict(lambda: 0.0) chassis = chassis_pb2.Chassis() localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kChassisTopic: chassis.ParseFromString(msg.message) # Mode changed if last_mode != chassis.driving_mode: if (last_mode == Chassis.COMPLETE_AUTO_DRIVE and chassis.driving_mode == Chassis.EMERGENCY_MODE): self.disengagements += 1 last_mode = chassis.driving_mode # Reset start position. last_pos = None elif msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) cur_pos = localization.pose.position if last_pos: # Accumulate mileage, from xyz-distance to miles. mileage[last_mode] += 0.000621371 * math.sqrt( (cur_pos.x - last_pos.x)**2 + (cur_pos.y - last_pos.y)**2 + (cur_pos.z - last_pos.z)**2) last_pos = cur_pos self.auto_mileage += mileage[Chassis.COMPLETE_AUTO_DRIVE] self.manual_mileage += (mileage[Chassis.COMPLETE_MANUAL] + mileage[Chassis.EMERGENCY_MODE])
def process_file(self, record_file): """ Extract information from record file. Return True if we are done collecting all information. """ try: reader = RecordReader(record_file) print("Begin to process record file {}".format(record_file)) for msg in reader.read_messages(): print(msg.topic) if msg.topic == kChassisInfoTopic and self.vehicle_vin is None: chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) if chassis.license.vin: self.vehicle_vin = chassis.license.vin elif msg.topic == kHMIInfoTopic and self.vehicle_name is None: hmistatus = hmi_status_pb2.HMIStatus() hmistatus.ParseFromString(msg.message) if hmistatus.current_map: self.vehicle_name = hmistatus.current_map print(self.vehicle_name) if self.done(): return True except: return False print("Finished processing record file {}".format(record_file)) return self.done()
def process_record(cls, input_record): channel_size_stats = {} freader = RecordReader(input_record) print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in ChannelSizeStats.TOPICS: if channelname in channel_size_stats: channel_size_stats[channelname]['total'] += len(msg) channel_size_stats[channelname]['num'] += 1 else: channel_size_stats[channelname] = {} channel_size_stats[channelname]['total'] = len(msg) channel_size_stats[channelname]['num'] = 1.0 elif channelname == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg) name = "planning_no_debug" adc_trajectory.ClearField("debug") planning_str = adc_trajectory.SerializeToString() if name in channel_size_stats: channel_size_stats[name]['total'] += len(planning_str) channel_size_stats[name]['num'] += 1 else: channel_size_stats[name] = {} channel_size_stats[name]['total'] = len(planning_str) channel_size_stats[name]['num'] = 1.0 for channelname in channel_size_stats.keys(): print( channelname, " num:", channel_size_stats[channelname]['num'], " avg size:", channel_size_stats[channelname]['total'] / channel_size_stats[channelname]['num']) print('----- Finish processing record -----')
def missing_message_data(path, channels=CHANNELS): for record in list_records(path): glog.info("reading records %s" % record) reader = RecordReader(record) for channel in channels: glog.info("has %d messages" % reader.get_messagenumber(channel)) if reader.get_messagenumber(channel) == 0: return True return False
def dump_bag(in_bag, out_dir): """ out_bag = in_bag + routing_bag """ reader = RecordReader(in_bag) seq = 0 global g_args topic_name_map = { "/apollo/localization/pose": ["localization", None], "/apollo/canbus/chassis": ["chassis", None], "/apollo/routing_response": ["routing", None], "/apollo/routing_resquest": ["routing_request", None], "/apollo/perception/obstacles": ["perception", None], "/apollo/prediction": ["prediction", None], "/apollo/planning": ["planning", None], "/apollo/control": ["control", None] } first_time = None record_num = 0 for channel, message, _type, _timestamp in reader.read_messages(): t = _timestamp msg = message record_num += 1 if record_num % 1000 == 0: print('Processing record_num: %d' % record_num) if first_time is None: first_time = t if channel not in topic_name_map: continue dt1 = datetime.utcfromtimestamp(t / 1000000000) dt2 = datetime.utcfromtimestamp(first_time / 1000000000) relative_time = (dt1 - dt2).seconds - g_args.start_time print "relative_time", relative_time if ((g_args.time_duration > 0) and (relative_time < 0 or relative_time > g_args.time_duration)): continue if channel == '/apollo/planning': seq += 1 topic_name_map[channel][1] = msg print('Generating seq: %d' % seq) for t, name_pb in topic_name_map.items(): if name_pb[1] is None: continue file_path = os.path.join( out_dir, str(seq) + "_" + name_pb[0] + ".pb.txt") write_to_file(file_path, name_pb[1]) topic_name_map[channel][1] = msg
def dump_bag(in_dir, out_file): """ out_bag = in_bag """ print('Begin') gnss = gnss_pb2.RawData() global g_args bag_files = glob.glob(in_dir + "/*.record.*") with open(out_file, 'w') as fp: for bag_file in sorted(bag_files): print('Processing bag_file: %s' % bag_file) reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kRawDataTopic: gnss.ParseFromString(msg.message) f.write(str(gnss))
def process_record(cls, input_record, output_record): print("filtering: {} -> {}".format(input_record, output_record)) output_dir = os.path.dirname(output_record) if output_dir != "" and not os.path.exists(output_dir): os.makedirs(output_dir) freader = RecordReader(input_record) fwriter = RecordWriter() if not fwriter.open(output_record): print('writer open failed!') return print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in SamplePNC.TOPICS: desc = freader.get_protodesc(channelname) fwriter.write_channel(channelname, datatype, desc) fwriter.write_message(channelname, msg, timestamp) print('----- Finish processing record -----')
def tf_stats(in_bag): """ """ reader = RecordReader(in_bag) global g_args stats = {} for channel, message, _type, _timestamp in reader.read_messages(): if channel != '/tf': continue tf_pb = transform_pb2.TransformStampeds() tf_pb.ParseFromString(message) for transform in tf_pb.transforms: key = transform.header.frame_id + "=>" + transform.child_frame_id if key in stats.keys(): stats[key] += 1 else: stats[key] = 1 print('tf stats: {}'.format(stats))
def calculate(self, bag_file): """ Calculate mileage """ try: drive_event = drive_event_pb2.DriveEvent() reader = RecordReader(bag_file) except Exception: print('Cannot open bag file %s' % bag_file) else: with open('/apollo/test.txt', 'a') as fp: for msg in reader.read_messages(): if msg.topic == kEventTopic: drive_event.ParseFromString(msg.message) msg_time = time.localtime( drive_event.header.timestamp_sec) fp.write(time.strftime("%Y-%m-%d %H:%M:%S", msg_time)) fp.write(str(drive_event.type) + ':') fp.write(drive_event.event.encode('utf-8') + '\n')
def read(self, topics): reader = RecordReader(self.record_file) for msg in reader.read_messages(): if msg.topic not in topics: continue if msg.topic == "/apollo/canbus/chassis": chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) data = {"chassis": chassis} yield data if msg.topic == "/apollo/localization/pose": location_est = localization_pb2.LocalizationEstimate() location_est.ParseFromString(msg.message) data = {"pose": location_est} yield data if msg.topic == "/apollo/planning": planning = planning_pb2.ADCTrajectory() planning.ParseFromString(msg.message) data = {"planning": planning} yield data
def validate_record(record_file): """Validate the record file.""" # Check the validity of a cyber record file according to header info. record_reader = RecordReader(record_file) header_msg = record_reader.get_headerstring() header = record_pb2.Header() header.ParseFromString(header_msg) print("header is {}".format(header)) if not header.is_complete: print('Record file: %s is not completed.' % record_file) return False if header.size == 0: print('Record file: %s. size is 0.' % record_file) return False if header.major_version != 1 and header.minor_version != 0: print('Record file: %s. version [%d:%d] is wrong.' % (record_file, header.major_version, header.minor_version)) return False if header.begin_time >= header.end_time: print('Record file: %s. begin time [%s] is equal or larger than ' 'end time [%s].' % (record_file, header.begin_time, header.end_time)) return False if header.message_number < 1 or header.channel_number < 1: print('Record file: %s. [message:channel] number [%d:%d] is invalid.' % (record_file, header.message_number, header.channel_number)) return False # There should be at least one sensor channel sensor_channels = get_sensor_channel_list(record_file) if len(sensor_channels) < 1: print('Record file: %s. cannot find sensor channels.' % record_file) return False return True
def restore_record(input_record, output_record): """Entrance of processing.""" # Define working dirs that store intermediate results in the middle of processing work_dir = 'restore_video_work_dir_{}'.format( datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d-%H-%M-%S')) # Decode videos converters = {} for topic in VIDEO_CHANNELS: converters[topic] = VideoConverter(work_dir, topic) reader = RecordReader(input_record) for message in reader.read_messages(): if message.topic in VIDEO_CHANNELS: converters[message.topic].write_frame(message) image_dir = os.path.join(work_dir, 'images') makedirs(image_dir) for topic in VIDEO_CHANNELS: converters[topic].close_writer() converters[topic].decode() converters[topic].move_images(image_dir) # Restore target record file writer = RecordWriter(0, 0) writer.open(output_record) topic_descs = {} counter = 0 reader = RecordReader(input_record) for message in reader.read_messages(): message_content = message.message message_topic = message.topic if message.topic in VIDEO_CHANNELS: message_content = retrieve_image(image_dir, message) message_topic = VIDEO_IMAGE_MAP[message.topic] if not message_content: continue counter += 1 if counter % 1000 == 0: logging.info('rewriting {} th message to record {}'.format( counter, output_record)) writer.write_message(message_topic, message_content, message.timestamp) if message_topic not in topic_descs: topic_descs[message_topic] = reader.get_protodesc(message_topic) writer.write_channel(message_topic, message.data_type, topic_descs[message_topic]) writer.close() logging.info('All Done, converted record: {}'.format(output_record))
args = parse_args() loc_file = open("{}_loc_file.txt".format(args.in_bag[0].split('.')[0]), 'w') gnss_file = open("{}_gnss_file.txt".format(args.in_bag[0].split('.')[0]), 'w') chassis_file = open("{}_chassis_file.txt".format(args.in_bag[0].split('.')[0]), 'w') imu_file = open("{}_imu_file.txt".format(args.in_bag[0].split('.')[0]), 'w') loc_file.write("timestamp x y z\n") chassis_file.write("timestamp speed_mps steering_percentage \n") gnss_file.write("timestamp latitude longitude lat_std_dev lon_std_dev\n") imu_file.write("timestamp a_x a_y a_z w_x w_y w_z \n") for bag_names in args.in_bag: print("Start reading {}...".format(bag_names)) reader = RecordReader(bag_names) for msg in reader.read_messages(): if msg.topic == '/apollo/localization/pose': localization.ParseFromString(msg.message) loc_string = "" loc_string += "{} {} {} {}\n".format( str(localization.header.timestamp_sec), str(localization.pose.position.x), str(localization.pose.position.y), str(localization.pose.position.z)) loc_file.write(loc_string) if msg.topic == '/apollo/canbus/chassis': chassis.ParseFromString(msg.message) chassis_string = ""
if last_timestamp_sec is None: last_steering_percentage = steering_percentage last_speed_mps = speed_mps last_timestamp_sec = timestamp_sec continue if (timestamp_sec - last_timestamp_sec) > 0.02: d_steering = (steering_percentage - last_steering_percentage) \ / (timestamp_sec - last_timestamp_sec) speed_data.append(speed_mps) d_steering_data.append(d_steering) last_steering_percentage = steering_percentage last_speed_mps = speed_mps last_timestamp_sec = timestamp_sec return speed_data, d_steering_data if __name__ == "__main__": fns = sys.argv[1:] fig, ax = plt.subplots() for fn in fns: reader = RecordReader(fn) speed_data, d_steering_data = process(reader) ax.scatter(speed_data, d_steering_data) ax.set_xlim(-5, 40) ax.set_ylim(-300, 300) plt.show()
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### import sys from datetime import datetime from cyber_py3.record import RecordReader from modules.localization.proto import localization_pb2 if __name__ == '__main__': if len(sys.argv) < 2: print( "usage: python record_extractor.py record_file1 record_file2 ...") frecords = sys.argv[1:] now = datetime.now().strftime("%Y-%m-%d_%H.%M.%S") with open("path_" + frecords[0].split('/')[-1] + ".txt", 'w') as f: for frecord in frecords: print("processing " + frecord) reader = RecordReader(frecord) for msg in reader.read_messages(): if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) x = localization.pose.position.x y = localization.pose.position.y f.write(str(x) + "," + str(y) + "\n")
def get_sensor_channel_list(record_file): """Get the channel list of sensors for calibration.""" record_reader = RecordReader(record_file) return set( channel_name for channel_name in record_reader.get_channellist() if 'sensor' in channel_name or '/localization/pose' in channel_name)
parser = argparse.ArgumentParser( description='Process and analyze control and planning data') parser.add_argument('--bag', type=str, help='use Rosbag') args = parser.parse_args() fig, axarr = plt.subplots(2, 2) plt.tight_layout() axarr[0, 0].get_shared_x_axes().join(axarr[0, 0], axarr[1, 0]) axarr[1, 1].get_shared_x_axes().join(axarr[0, 0], axarr[1, 1]) controlinfo = ControlInfo(axarr) if args.bag: file_path = args.bag # bag = rosbag.Bag(file_path) reader = RecordReader(file_path) for msg in reader.read_messages(): print(msg.timestamp, msg.topic) if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) controlinfo.callback_localization(localization) elif msg.topic == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg.message) controlinfo.callback_planning(adc_trajectory) elif msg.topic == "/apollo/control": control_cmd = control_cmd_pb2.ControlCommand() control_cmd.ParseFromString(msg.message) controlinfo.callback_control(control_cmd) elif msg.topic == "/apollo/canbus/chassis":
def extract_data(record_files, output_path, channels, start_timestamp, end_timestamp, extraction_rates): """ Extract the desired channel messages if channel_list is specified. Otherwise extract all sensor calibration messages according to extraction rate, 10% by default. """ # all records have identical sensor channels. sensor_channels = get_sensor_channel_list(record_files[0]) if (len(channels) > 0 and not validate_channel_list(channels, sensor_channels)): print('The input channel list is invalid.') return False # Extract all the sensor channels if channel_list is empty(no input arguments). print(sensor_channels) if len(channels) == 0: channels = sensor_channels # Declare logging variables process_channel_success_num = len(channels) process_channel_failure_num = 0 process_msg_failure_num = 0 channel_success = {} channel_occur_time = {} channel_output_path = {} #channel_messages = {} channel_parsers = {} for channel in channels: channel_success[channel] = True channel_occur_time[channel] = -1 topic_name = channel.replace('/', '_') channel_output_path[channel] = os.path.join(output_path, topic_name) process_dir(channel_output_path[channel], operation='create') channel_parsers[channel] =\ build_parser(channel, channel_output_path[channel]) # if channel in SMALL_TOPICS: # channel_messages[channel] = list() for record_file in record_files: record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in channels: # Only care about messages in certain time intervals msg_timestamp_sec = msg.timestamp / 1e9 if not in_range(msg_timestamp_sec, start_timestamp, end_timestamp): continue channel_occur_time[msg.topic] += 1 # Extract the topic according to extraction_rate if channel_occur_time[msg.topic] % extraction_rates[ msg.topic] != 0: continue ret = channel_parsers[msg.topic].parse_sensor_message(msg) # Calculate parsing statistics if not ret: process_msg_failure_num += 1 if channel_success[msg.topic]: channel_success[msg.topic] = False process_channel_failure_num += 1 process_channel_success_num -= 1 print( 'Failed to extract data from channel: %s in record %s' % (msg.topic, record_file)) # traverse the dict, if any channel topic stored as a list # then save the list as a summary file, mostly binary file for channel, parser in channel_parsers.items(): save_combined_messages_info(parser, channel) # Logging statics about channel extraction print('Extracted sensor channel number [%d] from record files: %s' % (len(channels), ' '.join(record_files))) print('Successfully processed [%d] channels, and [%d] was failed.' % (process_channel_success_num, process_channel_failure_num)) if process_msg_failure_num > 0: print('Channel extraction failure number is [%d].' % process_msg_failure_num) return True
extract localization message from bag files Usage: python path_extract.py file1 file2 ... """ import sys import datetime from cyber_py3.record import RecordReader from modules.localization.proto import localization_pb2 kLocalizationTopic = '/apollo/localization/pose' if __name__ == '__main__': bag_files = sys.argv[1:] bag_file = bag_files[0] now = datetime.datetime.now().strftime("%Y-%m-%d_%H.%M.%S") f = open("path_" + bag_file.split('/')[-1] + ".txt", 'w') for bag_file in bag_files: print("begin to extract path from file :", bag_file) reader = RecordReader(bag_file) localization = localization_pb2.LocalizationEstimate() for msg in reader.read_messages(): if msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) x = localization.pose.position.x y = localization.pose.position.y f.write(str(x) + "," + str(y) + "\n") print("Finished extracting path from file :", bag_file) f.close()
parser.add_argument( "-refpath", "--planningrefpath", action="store_const", const=True, help="plot planing reference paths in cartesian coordinate.") parser.add_argument( "-a", "--alldata", action="store_const", const=True, help="Analyze all data (both auto and manual), otherwise auto data only without this option.") parser.add_argument( "-acc", "--showacc", action="store_const", const=True, help="Analyze all data (both auto and manual), otherwise auto data only without this option.") args = parser.parse_args() record_file = args.file reader = RecordReader(record_file) control_analyzer = ControlAnalyzer() planning_analyzer = PlannigAnalyzer(args) lidar_endtoend_analyzer = LidarEndToEndAnalyzer() process(control_analyzer, planning_analyzer, lidar_endtoend_analyzer, args.simulation, args.planningpath, args.planningrefpath, args.alldata) if args.simulation: planning_analyzer.print_sim_results() elif args.planningpath or args.planningrefpath: plt.axis('equal') plt.show() else: