def calculate(self, bag_file): """ Calculate mileage """ last_pos = None last_mode = 'Unknown' mileage = collections.defaultdict(lambda: 0.0) chassis = chassis_pb2.Chassis() localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kChassisTopic: chassis.ParseFromString(msg.message) # Mode changed if last_mode != chassis.driving_mode: if (last_mode == Chassis.COMPLETE_AUTO_DRIVE and chassis.driving_mode == Chassis.EMERGENCY_MODE): self.disengagements += 1 last_mode = chassis.driving_mode # Reset start position. last_pos = None elif msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) cur_pos = localization.pose.position if last_pos: # Accumulate mileage, from xyz-distance to miles. mileage[last_mode] += 0.000621371 * math.sqrt( (cur_pos.x - last_pos.x)**2 + (cur_pos.y - last_pos.y)**2 + (cur_pos.z - last_pos.z)**2) last_pos = cur_pos self.auto_mileage += mileage[Chassis.COMPLETE_AUTO_DRIVE] self.manual_mileage += (mileage[Chassis.COMPLETE_MANUAL] + mileage[Chassis.EMERGENCY_MODE])
def get_sensor_channel_list(record_file): """ Get the channel list of sensors for calibration """ record_reader = RecordReader(record_file) return set(channel_name for channel_name in record_reader.get_channellist() if 'sensor' in channel_name)
def calculate(self, bag_file): """Calculate mileage.""" last_pos = None last_mode = 'Unknown' mileage = collections.defaultdict(lambda: 0.0) chassis = chassis_pb2.Chassis() localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kChassisTopic: chassis.ParseFromString(msg.message) # Mode changed if last_mode != chassis.driving_mode: if (last_mode == Chassis.COMPLETE_AUTO_DRIVE and chassis.driving_mode == Chassis.EMERGENCY_MODE): self.disengagements += 1 last_mode = chassis.driving_mode # Reset start position. last_pos = None elif msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) cur_pos = localization.pose.position if last_pos: # Accumulate mileage, from xyz-distance to miles. mileage[last_mode] += 0.000621371 * math.sqrt( (cur_pos.x - last_pos.x) ** 2 + (cur_pos.y - last_pos.y) ** 2 + (cur_pos.z - last_pos.z) ** 2) last_pos = cur_pos self.auto_mileage += mileage[Chassis.COMPLETE_AUTO_DRIVE] self.manual_mileage += (mileage[Chassis.COMPLETE_MANUAL] + mileage[Chassis.EMERGENCY_MODE])
def process_file(self, record_file): """ Extract information from record file. Return True if we are done collecting all information. """ try: reader = RecordReader(record_file) print("Begin to process record file {}".format(record_file)) for msg in reader.read_messages(): print msg.topic if msg.topic == kChassisInfoTopic and self.vehicle_vin is None: chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) if chassis.license.vin: self.vehicle_vin = chassis.license.vin elif msg.topic == kHMIInfoTopic and self.vehicle_name is None: hmistatus = hmi_status_pb2.HMIStatus() hmistatus.ParseFromString(msg.message) if hmistatus.current_map: self.vehicle_name = hmistatus.current_map print self.vehicle_name if self.done(): return True except: return False print("Finished processing record file {}".format(record_file)) return self.done()
def extract_data(record_file, output_path, channel_name, extraction_ratio): """ Extract the desired channel messages if channel_name is specified. Otherwise extract all sensor calibration messages according to extraction ratio, 10% by default. """ if channel_name != ' ': ret = extract_channel_data(record_file, output_path, channel_name, extraction_ratio) return ret sensor_channels = get_sensor_channel_list(record_file) channel_num = len(sensor_channels) print('Sensor channel number: [%d] in record file: %s' % (channel_num, record_file)) process_channel_success_num = 0 process_channel_failure_num = 0 record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in sensor_channels: ret = extract_channel_data(record_reader, output_path, msg.topic, extraction_ratio) if ret is False: print('Failed to extract data from channel: %s' % msg.topic) process_channel_failure_num += 1 continue process_channel_success_num += 1 print( 'Processed [%d] channels, and [%d] was failed.' % process_channel_success_num, process_channel_failure_num) return True
def process_file(self, bag_file): """ Extract information from bag file. Return True if we are done collecting all information. """ try: reader = RecordReader(bag_file) print "begin" for msg in reader.read_messages(): if msg.topic == kChassisInfoTopic and self.vehicle_vin == None: chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) if chassis.license.vin: self.vehicle_vin = chassis.license.vin elif msg.topic == kHMIInfoTopic and self.vehicle_name == None: hmistatus = hmi_status_pb2.HMIStatus() hmistatus.ParseFromString(msg.message) print hmistatus.MessageType if hmistatus.current_vehicle: self.vehicle_name = hmistatus.current_vehicle print self.vehicle_name if self.done(): return True except: return False return self.done()
def process_record(cls, input_record): channel_size_stats = {} freader = RecordReader(input_record) print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in ChannelSizeStats.TOPICS: if channelname in channel_size_stats: channel_size_stats[channelname]['total'] += len(msg) channel_size_stats[channelname]['num'] += 1 else: channel_size_stats[channelname] = {} channel_size_stats[channelname]['total'] = 0.0 channel_size_stats[channelname]['num'] = 0.0 if channelname == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg) name = "planning_no_debug" adc_trajectory.ClearField("debug") planning_str = adc_trajectory.SerializeToString() if name in channel_size_stats: channel_size_stats[name]['total'] += len(planning_str) channel_size_stats[name]['num'] += 1 else: channel_size_stats[name] = {} channel_size_stats[name]['total'] = 0.0 channel_size_stats[name]['num'] = 0.0 for channelname in channel_size_stats.keys(): print( channelname, " num:", channel_size_stats[channelname]['num'], " avg size:", channel_size_stats[channelname]['total'] / channel_size_stats[channelname]['num']) print('----- Finish processing record -----')
def process_file(self, bag_file): """ Extract information from bag file. Return True if we are done collecting all information. """ try: reader = RecordReader(bag_file) print "begin to process bag file {}".format(bag_file) for msg in reader.read_messages(): print msg.topic if msg.topic == kChassisInfoTopic and self.vehicle_vin == None: chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) if chassis.license.vin: self.vehicle_vin = chassis.license.vin elif msg.topic == kHMIInfoTopic and self.vehicle_name == None: hmistatus = hmi_status_pb2.HMIStatus() hmistatus.ParseFromString(msg.message) if hmistatus.current_map: self.vehicle_name = hmistatus.current_map print self.vehicle_name if self.done(): return True except: return False print "finished processing bag file {}".format(bag_file) return self.done()
def missing_message_data(path, channels=CHANNELS): for record in list_records(path): glog.info("reading records %s" % record) reader = RecordReader(record) for channel in channels: glog.info("has %d messages" % reader.get_messagenumber(channel)) if reader.get_messagenumber(channel) == 0: return True return False
def __init__(self, record_file): """Init input reader and output record.""" record_file = os.path.abspath(record_file) self.record = Record(path=record_file, dir=os.path.dirname(record_file)) self._reader = RecordReader(record_file) # State during processing messages. self._current_driving_mode = None self._last_position = None # To sample driving path. self._last_position_sampled = None self._last_position_sampled_time = None
def dump_bag(in_dir, out_file): """out_bag = in_bag""" print "begin" gnss = gnss_pb2.RawData() global g_args bag_files = glob.glob(in_dir + "/*.record.*") f = file(out_file, 'w') for bag_file in sorted(bag_files): print "Processing ", bag_file, " ..." reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kRawDataTopic: gnss.ParseFromString(msg.message) f.write(str(gnss)) f.close()
def dump_bag(in_bag, out_dir): """ out_bag = in_bag + routing_bag """ reader = RecordReader(in_bag) seq = 0 global g_args topic_name_map = { "/apollo/localization/pose": ["localization", None], "/apollo/canbus/chassis": ["chassis", None], "/apollo/routing_response": ["routing", None], "/apollo/routing_resquest": ["routing_request", None], "/apollo/perception/obstacles": ["perception", None], "/apollo/prediction": ["prediction", None], "/apollo/planning": ["planning", None], "/apollo/control": ["control", None] } first_time = None record_num = 0 for channel, message, _type, _timestamp in reader.read_messages(): t = _timestamp msg = message record_num += 1 if record_num % 1000 == 0: print('Processing record_num: %d' % record_num) if first_time is None: first_time = t if channel not in topic_name_map: continue dt1 = datetime.utcfromtimestamp(t / 1000000000) dt2 = datetime.utcfromtimestamp(first_time / 1000000000) relative_time = (dt1 - dt2).seconds - g_args.start_time print "relative_time", relative_time if ((g_args.time_duration > 0) and (relative_time < 0 or relative_time > g_args.time_duration)): continue if channel == '/apollo/planning': seq += 1 topic_name_map[channel][1] = msg print('Generating seq: %d' % seq) for t, name_pb in topic_name_map.items(): if name_pb[1] is None: continue file_path = os.path.join( out_dir, str(seq) + "_" + name_pb[0] + ".pb.txt") write_to_file(file_path, name_pb[1]) topic_name_map[channel][1] = msg
def dump_bag(in_dir, out_file): """ out_bag = in_bag """ print('Begin') gnss = gnss_pb2.RawData() global g_args bag_files = glob.glob(in_dir + "/*.record.*") with open(out_file, 'w') as fp: for bag_file in sorted(bag_files): print('Processing bag_file: %s' % bag_file) reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kRawDataTopic: gnss.ParseFromString(msg.message) f.write(str(gnss))
def read(self, topics): reader = RecordReader(self.record_file) for msg in reader.read_messages(): if msg.topic not in topics: continue if msg.topic == "/apollo/canbus/chassis": chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) data = {"chassis": chassis} yield data if msg.topic == "/apollo/localization/pose": location_est = localization_pb2.LocalizationEstimate() location_est.ParseFromString(msg.message) data = {"pose": location_est} yield data
def calculate(self, bag_file): """Calculate mileage.""" try: drive_event = drive_event_pb2.DriveEvent() reader = RecordReader(bag_file) except: print("can't open bag") else: f = file('/apollo/test.txt', 'a') for msg in reader.read_messages(): if msg.topic == kEventTopic: drive_event.ParseFromString(msg.message) msg_time = time.localtime(msg.timestamp/long(1e9)) f.write(time.strftime("%Y-%m-%d %H:%M:%S", msg_time)) f.write(str(drive_event.type)+":") f.write(drive_event.event.encode('utf-8')+'\n') f.close()
def process_record(cls, input_record, output_record): print("filtering: {} -> {}".format(input_record, output_record)) output_dir = os.path.dirname(output_record) if output_dir != "" and not os.path.exists(output_dir): os.makedirs(output_dir) freader = RecordReader(input_record) fwriter = RecordWriter() if not fwriter.open(output_record): print "writer open failed!" return print "+++ begin to process..." for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in SamplePNC.TOPICS: desc = freader.get_protodesc(channelname) fwriter.write_channel(channelname, datatype, desc) fwriter.write_message(channelname, msg, timestamp) print "+++ Finished processing..."
def process_record(cls, input_record, output_record): print("filtering: {} -> {}".format(input_record, output_record)) output_dir = os.path.dirname(output_record) if output_dir != "" and not os.path.exists(output_dir): os.makedirs(output_dir) freader = RecordReader(input_record) fwriter = RecordWriter() if not fwriter.open(output_record): print('writer open failed!') return print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in SamplePNC.TOPICS: desc = freader.get_protodesc(channelname) fwriter.write_channel(channelname, datatype, desc) fwriter.write_message(channelname, msg, timestamp) print('----- Finish processing record -----')
def calculate(self, bag_file): """Calculate mileage.""" try: drive_event = drive_event_pb2.DriveEvent() reader = RecordReader(bag_file) except: print("can't open bag") else: f = file('/apollo/test.txt', 'a') for msg in reader.read_messages(): if msg.topic == kEventTopic: drive_event.ParseFromString(msg.message) msg_time = time.localtime(drive_event.header.timestamp_sec) f.write(time.strftime("%Y-%m-%d %H:%M:%S", msg_time)) f.write(str(drive_event.type) + ":") f.write(drive_event.event.encode('utf-8') + '\n') f.close()
def dump_bag(in_bag, out_dir): """out_bag = in_bag + routing_bag""" reader = RecordReader(in_bag) seq = 0 global g_args topic_name_map = { "/apollo/localization/pose": ["localization", None], "/apollo/canbus/chassis": ["chassis", None], "/apollo/routing_response": ["routing", None], "/apollo/routing_resquest": ["routing_request", None], "/apollo/perception/obstacles": ["perception", None], "/apollo/prediction": ["prediction", None], "/apollo/planning": ["planning", None], "/apollo/control": ["control", None] } first_time = None record_num = 0 for channel, message, _type, _timestamp in reader.read_messages(): t = _timestamp msg = message record_num += 1 if record_num % 1000 == 0: print "Processing record_num:", record_num if first_time is None: first_time = t if channel not in topic_name_map: continue dt1 = datetime.utcfromtimestamp(t/1000000000) dt2 = datetime.utcfromtimestamp(first_time/1000000000) relative_time = (dt1 - dt2).seconds - g_args.start_time print "relative_time", relative_time if ((g_args.time_duration > 0) and (relative_time < 0 or relative_time > g_args.time_duration)): continue if channel == "/apollo/planning": seq += 1 topic_name_map[channel][1] = msg print "Generating seq:", seq for t, name_pb in topic_name_map.iteritems(): if name_pb[1] is None: continue file_path = os.path.join(out_dir, str(seq) + "_" + name_pb[0] + ".pb.txt") write_to_file(file_path, name_pb[1]) topic_name_map[channel][1] = msg
def calculate(self, bag_file): """ Calculate mileage """ try: drive_event = drive_event_pb2.DriveEvent() reader = RecordReader(bag_file) except Exception: print('Cannot open bag file %s' % bag_file) else: with open('/apollo/test.txt', 'a') as fp: for msg in reader.read_messages(): if msg.topic == kEventTopic: drive_event.ParseFromString(msg.message) msg_time = time.localtime(drive_event.header.timestamp_sec) fp.write(time.strftime("%Y-%m-%d %H:%M:%S", msg_time)) fp.write(str(drive_event.type) + ':') fp.write(drive_event.event.encode('utf-8') + '\n')
def tf_stats(in_bag): """ """ reader = RecordReader(in_bag) global g_args stats = {} for channel, message, _type, _timestamp in reader.read_messages(): if channel != '/tf': continue tf_pb = transform_pb2.TransformStampeds() tf_pb.ParseFromString(message) for transform in tf_pb.transforms: key = transform.header.frame_id + "=>" + transform.child_frame_id if key in stats.keys(): stats[key] = stats[key] + 1 else: stats[key] = 1 print "tf stats:", stats
def validate_record(record_file): """ Validate the record file """ # Check the validity of a cyber record file according to header info. record_reader = RecordReader(record_file) header_msg = record_reader.get_headerstring() header = record_pb2.Header() header.ParseFromString(header_msg) if len(header) != CYBER_RECORD_HEADER_LENGTH: print('Record file: %s. header length should be %d.' % (record_file, CYBER_RECORD_HEADER_LENGTH)) return False if header.size == 0: print('Record file: %s. size is 0.' % record_file) return False if header.major_version != 1 and header.minor_version != 0: print('Record file: %s. version [%d:%d] is wrong.' % (record_file, header.major_version, header.minor_version)) return False if header.begin_time >= header.end_time: print('Record file: %s. begin time [%s] is equal or larger than ' 'end time [%s].' % (record_file, header.begin_time, header.end_time)) return False if not header.is_complete: print('Record file: %s is not completed.' % record_file) return False if header.message_number < 1 or header.channel_number < 1: print('Record file: %s. message|channel number [%d|%d] is invalid.' % (record_file, header.message_number, header.channel_number)) return False # There should be at least has one sensor channel sensor_channels = get_sensor_channel_list(record_file) if len(sensor_channels) < 1: print('Record file: %s. cannot found sensor channels.' % record_file) return False return True
def get_driving_mode(self, bag_file): """get driving mode, which is stored in a dict""" mode = {} mode["status"] = 'UNKNOW' mode["start_time"] = 0.0 mode["end_time"] = 0.0 chassis = chassis_pb2.Chassis() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kChassisTopic: chassis.ParseFromString(msg.message) _t = msg.timestamp t = long(str(_t)) * pow(10, -9) cur_status = chassis.driving_mode if mode["status"] != cur_status: if mode["status"] != 'UNKNOW': self.driving_mode.append(mode) mode["status"] = cur_status mode["start_time"] = t mode["end_time"] = t self.driving_mode.append(mode)
def restore_record(input_record, output_record): """Entrance of processing.""" # Define working dirs that store intermediate results in the middle of processing work_dir = 'restore_video_work_dir_{}'.format( datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d-%H-%M-%S')) # Decode videos converters = {} for topic in VIDEO_CHANNELS: converters[topic] = VideoConverter(work_dir, topic) reader = RecordReader(input_record) for message in reader.read_messages(): if message.topic in VIDEO_CHANNELS: converters[message.topic].write_frame(message) image_dir = os.path.join(work_dir, 'images') makedirs(image_dir) for topic in VIDEO_CHANNELS: converters[topic].close_writer() converters[topic].decode() converters[topic].move_images(image_dir) # Restore target record file writer = RecordWriter(0, 0) writer.open(output_record) topic_descs = {} counter = 0 reader = RecordReader(input_record) for message in reader.read_messages(): message_content = message.message message_topic = message.topic if message.topic in VIDEO_CHANNELS: message_content = retrieve_image(image_dir, message) message_topic = VIDEO_IMAGE_MAP[message.topic] if not message_content: continue counter += 1 if counter % 1000 == 0: logging.info('rewriting {} th message to record {}'.format( counter, output_record)) writer.write_message(message_topic, message_content, message.timestamp) if message_topic not in topic_descs: topic_descs[message_topic] = reader.get_protodesc(message_topic) writer.write_channel(message_topic, message.data_type, topic_descs[message_topic]) writer.close() logging.info('All Done, converted record: {}'.format(output_record))
def calculate(self, bag_file): """calculate body sensation, it should be after get driving mode""" localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) _t = msg.timestamp t = long(str(_t)) * pow(10, -9) self.timestamp = t diff_bump_time = t - self._last_bump_time if diff_bump_time <= BUMP_TIME_THRESHOLD: continue acc_x = localization.pose.linear_acceleration.x acc_y = localization.pose.linear_acceleration.y acc_z = localization.pose.linear_acceleration.z if abs( acc_z ) >= SPEED_UP_THRESHOLD_2 and diff_bump_time >= BUMP_TIME_THRESHOLD: self._bumps_rollback(t) self._last_bump_time = t if self._check_status(t): self.auto_counts["bumps"] += 1 else: self.manual_counts["bumps"] += 1 else: if self._speed_down_2_flag: if acc_y <= SPEED_DOWN_THRESHOLD_4: self._speed_down_4_flag = 1 continue if acc_y <= SPEED_DOWN_THRESHOLD_2: continue if self._speed_down_4_flag == 1 \ and t - self._last_speed_down_4_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_down_4_time = t if self._check_status(t): self.auto_counts["speed_down_4"] += 1 else: self.manual_counts["speed_down_4"] += 1 elif t - self._last_speed_down_2_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_down_2_time = t if self._check_status(t): self.auto_counts["speed_down_2"] += 1 else: self.manual_counts["speed_down_2"] += 1 self._speed_down_2_flag = 0 self._speed_down_4_flag = 0 elif acc_y <= SPEED_DOWN_THRESHOLD_2: self._speed_down_2_flag = 1 if self._speed_up_2_flag: if acc_y >= SPEED_UP_THRESHOLD_4: self._speed_up_4_flag = 1 continue if acc_y >= SPEED_UP_THRESHOLD_2: continue if self._speed_up_4_flag == 1 \ and t - self._last_speed_up_4_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_up_4_time = t if self._check_status(t): self.auto_counts["speed_up_4"] += 1 else: self.manual_counts["speed_up_4"] += 1 elif t - self._last_speed_up_2_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_up_2_time = t if self._check_status(t): self.auto_counts["speed_up_2"] += 1 else: self.manual_counts["speed_up_2"] += 1 self._speed_up_2_flag = 0 self._speed_up_4_flag = 0 elif acc_y >= SPEED_UP_THRESHOLD_2: self._speed_up_2_flag = 1 if self._turning_flag: if abs(acc_x) >= SPEED_UP_THRESHOLD_2: continue if t - self._last_turning_time >= ACCELERATE_TIME_THRESHOLD: self._last_turning_time = t if self._check_status(t): self.auto_counts["turning"] += 1 else: self.manual_counts["turning"] += 1 self._turning_flag = 0 elif abs(acc_x) >= SPEED_UP_THRESHOLD_2: self._turning_flag = 1
action="store_const", const=True, help="plot planing reference paths in cartesian coordinate.") parser.add_argument( "-a", "--alldata", action="store_const", const=True, help= "Analyze all data (both auto and manual), otherwise auto data only without this option." ) args = parser.parse_args() record_file = args.file reader = RecordReader(record_file) control_analyzer = ControlAnalyzer() planning_analyzer = PlannigAnalyzer(args.simulation, args.simulation2) lidar_endtoend_analyzer = LidarEndToEndAnalyzer() process(control_analyzer, planning_analyzer, lidar_endtoend_analyzer, args.simulation, args.planningpath, args.planningrefpath, args.alldata) if args.simulation: planning_analyzer.print_simulation_results() elif args.simulation2: planning_analyzer.print_sim_results() elif args.planningpath or args.planningrefpath: plt.axis('equal')
f_localization = open("localization_" + frecords[0].split('/')[-1] + ".csv", 'wb') csv_writer_localization = csv.writer(f_localization) csv_writer_localization.writerow(["timestamp", "x", "y"]) f_chassis = open("chassis_" + frecords[0].split('/')[-1] + ".csv", 'wb') csv_writer_chassis = csv.writer(f_chassis) csv_writer_chassis.writerow(["timestamp", "speed", "steering_percentage"]) f_routing = open("routng_" + frecords[0].split('/')[-1] + ".csv", 'wb') csv_writer_routing = csv.writer(f_routing) csv_writer_routing.writerow(["timestamp", "guidepost_x", "guidepost_y", "use_guidepost", "cur_id", "end_id"]) #with open("path_" + frecords[0].split('/')[-1] + ".txt", 'w') as f: for frecord in frecords: print("processing " + frecord) reader = RecordReader(frecord) for msg in reader.read_messages(): if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) loc_time = localization.header.timestamp_sec x = localization.pose.position.x y = localization.pose.position.y #f.write(str(loc_time) + "," + str(x) + "," + str(y) + "\n") csv_writer_localization.writerow([loc_time, x, y]) if msg.topic == "/apollo/canbus/chassis": chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) chassis_time = chassis.header.timestamp_sec
def extract_data(record_file, output_path, channel_list, start_timestamp, end_timestamp, extraction_ratio): """ Extract the desired channel messages if channel_list is specified. Otherwise extract all sensor calibration messages according to extraction ratio, 10% by default. """ # Validate extration_ratio, and set it as an integer. if extraction_ratio < 1.0: raise ValueError("Extraction rate must be a number greater than 1.") extraction_ratio = np.floor(extraction_ratio) sensor_channels = get_sensor_channel_list(record_file) if len(channel_list) > 0 and validate_channel_list( channel_list, sensor_channels) is False: print('Input channel list is invalid.') return False # Extract all the sensor channels if channel_list is empty(no input arguments). print(sensor_channels) if len(channel_list) == 0: channel_list = sensor_channels # Declare logging variables process_channel_success_num = len(channel_list) process_channel_failure_num = 0 process_msg_failure_num = 0 channel_success_dict = {} channel_occur_time = {} channel_output_path = {} for channel in channel_list: channel_success_dict[channel] = True channel_occur_time[channel] = -1 topic_name = channel.replace('/', '_') channel_output_path[channel] = os.path.join(output_path, topic_name) process_dir(channel_output_path[channel], operation='create') record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in channel_list: # Only care about messages in certain time intervals msg_timestamp_sec = msg.timestamp / 1e9 if not in_range(msg_timestamp_sec, start_timestamp, end_timestamp): continue channel_occur_time[msg.topic] += 1 # Extract the topic according to extraction_ratio if channel_occur_time[msg.topic] % extraction_ratio != 0: continue ret = extract_channel_data(channel_output_path[msg.topic], msg) # Calculate parsing statistics if ret is False: process_msg_failure_num += 1 if channel_success_dict[msg.topic] is True: channel_success_dict[msg.topic] = False process_channel_failure_num += 1 process_channel_success_num -= 1 print('Failed to extract data from channel: %s' % msg.topic) # Logging statics about channel extraction print('Extracted sensor channel number [%d] in record file: %s' % (len(channel_list), record_file)) print('Successfully processed [%d] channels, and [%d] was failed.' % (process_channel_success_num, process_channel_failure_num)) if process_msg_failure_num > 0: print('Channel extraction failure number is: %d' % process_msg_failure_num) return True
parser = argparse.ArgumentParser( description='Process and analyze control and planning data') parser.add_argument('--bag', type=str, help='use Rosbag') args = parser.parse_args() fig, axarr = plt.subplots(2, 2) plt.tight_layout() axarr[0, 0].get_shared_x_axes().join(axarr[0, 0], axarr[1, 0]) axarr[1, 1].get_shared_x_axes().join(axarr[0, 0], axarr[1, 1]) controlinfo = ControlInfo(axarr) if args.bag: file_path = args.bag # bag = rosbag.Bag(file_path) reader = RecordReader(file_path) for msg in reader.read_messages(): print msg.timestamp, msg.topic if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) controlinfo.callback_localization(localization) elif msg.topic == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg.message) controlinfo.callback_planning(adc_trajectory) elif msg.topic == "/apollo/control": control_cmd = control_cmd_pb2.ControlCommand() control_cmd.ParseFromString(msg.message) controlinfo.callback_control(control_cmd) elif msg.topic == "/apollo/canbus/chassis":
def extract_data(record_file, output_path, channel_list, start_timestamp, end_timestamp, extraction_rate_dict): """ Extract the desired channel messages if channel_list is specified. Otherwise extract all sensor calibration messages according to extraction rate, 10% by default. """ sensor_channels = get_sensor_channel_list(record_file) if len(channel_list) > 0 and validate_channel_list( channel_list, sensor_channels) is False: print('Input channel list is invalid.') return False # Extract all the sensor channels if channel_list is empty(no input arguments). print(sensor_channels) if len(channel_list) == 0: channel_list = sensor_channels # Declare logging variables process_channel_success_num = len(channel_list) process_channel_failure_num = 0 process_msg_failure_num = 0 channel_success_dict = {} channel_occur_time = {} channel_output_path = {} channel_msgs_dict = {} for channel in channel_list: channel_success_dict[channel] = True channel_occur_time[channel] = -1 topic_name = channel.replace('/', '_') channel_output_path[channel] = os.path.join(output_path, topic_name) process_dir(channel_output_path[channel], operation='create') if channel in SMALL_TOPICS: channel_msgs_dict[channel] = list() record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in channel_list: # Only care about messages in certain time intervals msg_timestamp_sec = msg.timestamp / 1e9 if not in_range(msg_timestamp_sec, start_timestamp, end_timestamp): continue channel_occur_time[msg.topic] += 1 # Extract the topic according to extraction_rate if channel_occur_time[msg.topic] % extraction_rate_dict[ msg.topic] != 0: continue ret, channel_msgs_dict = extract_channel_data( channel_output_path[msg.topic], msg, channel_msgs_dict) # Calculate parsing statistics if ret is False: process_msg_failure_num += 1 if channel_success_dict[msg.topic] is True: channel_success_dict[msg.topic] = False process_channel_failure_num += 1 process_channel_success_num -= 1 print('Failed to extract data from channel: %s' % msg.topic) # traverse the dict, if any channel topic stored as a list # then save the list as a summary file, mostly binary file for channel, msg_list in channel_msgs_dict.items(): save_msg_list_to_file(channel_output_path[channel], channel, msg_list) # Logging statics about channel extraction print('Extracted sensor channel number [%d] in record file: %s' % (len(channel_list), record_file)) print('Successfully processed [%d] channels, and [%d] was failed.' % (process_channel_success_num, process_channel_failure_num)) if process_msg_failure_num > 0: print('Channel extraction failure number is: %d' % process_msg_failure_num) return True
if last_timestamp_sec is None: last_steering_percentage = steering_percentage last_speed_mps = speed_mps last_timestamp_sec = timestamp_sec continue if (timestamp_sec - last_timestamp_sec) > 0.02: d_steering = (steering_percentage - last_steering_percentage) \ / (timestamp_sec - last_timestamp_sec) speed_data.append(speed_mps) d_steering_data.append(d_steering) last_steering_percentage = steering_percentage last_speed_mps = speed_mps last_timestamp_sec = timestamp_sec return speed_data, d_steering_data if __name__ == "__main__": fns = sys.argv[1:] fig, ax = plt.subplots() for fn in fns: reader = RecordReader(fn) speed_data, d_steering_data = process(reader) ax.scatter(speed_data, d_steering_data) ax.set_xlim(-5, 40) ax.set_ylim(-300, 300) plt.show()
def extract_data(record_files, output_path, channels, start_timestamp, end_timestamp, extraction_rates): """ Extract the desired channel messages if channel_list is specified. Otherwise extract all sensor calibration messages according to extraction rate, 10% by default. """ # all records have identical sensor channels. sensor_channels = get_sensor_channel_list(record_files[0]) if (len(channels) > 0 and not validate_channel_list(channels, sensor_channels)): print('The input channel list is invalid.') return False # Extract all the sensor channels if channel_list is empty(no input arguments). print(sensor_channels) if len(channels) == 0: channels = sensor_channels # Declare logging variables process_channel_success_num = len(channels) process_channel_failure_num = 0 process_msg_failure_num = 0 channel_success = {} channel_occur_time = {} channel_output_path = {} #channel_messages = {} channel_parsers = {} for channel in channels: channel_success[channel] = True channel_occur_time[channel] = -1 topic_name = channel.replace('/', '_') channel_output_path[channel] = os.path.join(output_path, topic_name) process_dir(channel_output_path[channel], operation='create') channel_parsers[channel] =\ build_parser(channel, channel_output_path[channel]) # if channel in SMALL_TOPICS: # channel_messages[channel] = list() for record_file in record_files: record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in channels: # Only care about messages in certain time intervals msg_timestamp_sec = msg.timestamp / 1e9 if not in_range(msg_timestamp_sec, start_timestamp, end_timestamp): continue channel_occur_time[msg.topic] += 1 # Extract the topic according to extraction_rate if channel_occur_time[msg.topic] % extraction_rates[msg.topic] != 0: continue ret = channel_parsers[msg.topic].parse_sensor_message(msg) # Calculate parsing statistics if not ret: process_msg_failure_num += 1 if channel_success[msg.topic]: channel_success[msg.topic] = False process_channel_failure_num += 1 process_channel_success_num -= 1 print('Failed to extract data from channel: %s in record %s' % (msg.topic, record_file)) # traverse the dict, if any channel topic stored as a list # then save the list as a summary file, mostly binary file for channel, parser in channel_parsers.items(): save_combined_messages_info(parser, channel) # Logging statics about channel extraction print('Extracted sensor channel number [%d] from record files: %s' % (len(channels), ' '.join(record_files))) print('Successfully processed [%d] channels, and [%d] was failed.' % (process_channel_success_num, process_channel_failure_num)) if process_msg_failure_num > 0: print('Channel extraction failure number is [%d].' % process_msg_failure_num) return True
extract localization message from bag files Usage: python path_extract.py file1 file2 ... """ import sys import datetime from cyber_py.record import RecordReader from modules.localization.proto import localization_pb2 kLocalizationTopic = '/apollo/localization/pose' if __name__ == '__main__': bag_files = sys.argv[1:] bag_file = bag_files[0] now = datetime.datetime.now().strftime("%Y-%m-%d_%H.%M.%S") f = open("path_" + bag_file.split('/')[-1] + ".txt", 'w') for bag_file in bag_files: print("begin to extract path from file :", bag_file) reader = RecordReader(bag_file) localization = localization_pb2.LocalizationEstimate() for msg in reader.read_messages(): if msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) x = localization.pose.position.x y = localization.pose.position.y f.write(str(x) + "," + str(y) + "\n") print("Finished extracting path from file :", bag_file) f.close()
def calculate(self, bag_file): """calculate body sensation, it should be after get driving mode""" localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) _t = msg.timestamp t = long(str(_t)) * pow(10, -9) self.timestamp = t diff_bump_time = t - self._last_bump_time if diff_bump_time <= BUMP_TIME_THRESHOLD: continue acc_x = localization.pose.linear_acceleration.x acc_y = localization.pose.linear_acceleration.y acc_z = localization.pose.linear_acceleration.z if abs(acc_z) >= SPEED_UP_THRESHOLD_2 and diff_bump_time >= BUMP_TIME_THRESHOLD: self._bumps_rollback(t) self._last_bump_time = t if self._check_status(t): self.auto_counts["bumps"] += 1 else: self.manual_counts["bumps"] += 1 else: if self._speed_down_2_flag: if acc_y <= SPEED_DOWN_THRESHOLD_4: self._speed_down_4_flag = 1 continue if acc_y <= SPEED_DOWN_THRESHOLD_2: continue if self._speed_down_4_flag == 1 \ and t - self._last_speed_down_4_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_down_4_time = t if self._check_status(t): self.auto_counts["speed_down_4"] += 1 else: self.manual_counts["speed_down_4"] += 1 elif t - self._last_speed_down_2_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_down_2_time = t if self._check_status(t): self.auto_counts["speed_down_2"] += 1 else: self.manual_counts["speed_down_2"] += 1 self._speed_down_2_flag = 0 self._speed_down_4_flag = 0 elif acc_y <= SPEED_DOWN_THRESHOLD_2: self._speed_down_2_flag = 1 if self._speed_up_2_flag: if acc_y >= SPEED_UP_THRESHOLD_4: self._speed_up_4_flag = 1 continue if acc_y >= SPEED_UP_THRESHOLD_2: continue if self._speed_up_4_flag == 1 \ and t - self._last_speed_up_4_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_up_4_time = t if self._check_status(t): self.auto_counts["speed_up_4"] += 1 else: self.manual_counts["speed_up_4"] += 1 elif t - self._last_speed_up_2_time >= ACCELERATE_TIME_THRESHOLD: self._last_speed_up_2_time = t if self._check_status(t): self.auto_counts["speed_up_2"] += 1 else: self.manual_counts["speed_up_2"] += 1 self._speed_up_2_flag = 0 self._speed_up_4_flag = 0 elif acc_y >= SPEED_UP_THRESHOLD_2: self._speed_up_2_flag = 1 if self._turning_flag: if abs(acc_x) >= SPEED_UP_THRESHOLD_2: continue if t - self._last_turning_time >= ACCELERATE_TIME_THRESHOLD: self._last_turning_time = t if self._check_status(t): self.auto_counts["turning"] += 1 else: self.manual_counts["turning"] += 1 self._turning_flag = 0 elif abs(acc_x) >= SPEED_UP_THRESHOLD_2: self._turning_flag = 1
parser = argparse.ArgumentParser( description='Process and analyze control and planning data') parser.add_argument('--bag', type=str, help='use Rosbag') args = parser.parse_args() fig, axarr = plt.subplots(2, 2) plt.tight_layout() axarr[0, 0].get_shared_x_axes().join(axarr[0, 0], axarr[1, 0]) axarr[1, 1].get_shared_x_axes().join(axarr[0, 0], axarr[1, 1]) controlinfo = ControlInfo(axarr) if args.bag: file_path = args.bag # bag = rosbag.Bag(file_path) reader = RecordReader(file_path) for msg in reader.read_messages(): print(msg.timestamp, msg.topic) if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) controlinfo.callback_localization(localization) elif msg.topic == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg.message) controlinfo.callback_planning(adc_trajectory) elif msg.topic == "/apollo/control": control_cmd = control_cmd_pb2.ControlCommand() control_cmd.ParseFromString(msg.message) controlinfo.callback_control(control_cmd) elif msg.topic == "/apollo/canbus/chassis":
class RecordParser(object): """Wrapper of a Cyber record.""" @staticmethod def Parse(record_file): """Simple interface to parse a cyber record.""" parser = RecordParser(record_file) if not parser.ParseMeta(): return None parser.ParseMessages() return parser.record def __init__(self, record_file): """Init input reader and output record.""" record_file = os.path.abspath(record_file) self.record = Record(path=record_file, dir=os.path.dirname(record_file)) self._reader = RecordReader(record_file) # State during processing messages. self._current_driving_mode = None self._last_position = None # To sample driving path. self._last_position_sampled = None self._last_position_sampled_time = None def ParseMeta(self): """ Parse meta info which doesn't need to scan the record. Currently we parse the record ID, header and channel list here. """ self.record.header.ParseFromString(self._reader.get_headerstring()) for chan in self._reader.get_channellist(): self.record.channels[chan] = self._reader.get_messagenumber(chan) if len(self.record.channels) == 0: glog.error('No message found in record') return False return True def ParseMessages(self): """Process all messages.""" for channel, msg, _type, timestamp in self._reader.read_messages(): if channel == kHMIStatusChannel: self.ProcessHMIStatus(msg) elif channel == kDriveEventChannel: self.ProcessDriveEvent(msg) elif channel == kChassisChannel: self.ProcessChassis(msg) elif channel == kLocalizationChannel: self.ProcessLocalization(msg) def ProcessHMIStatus(self, msg): """Save HMIStatus.""" # Keep the first message and assume it doesn't change in one recording. if not self.record.HasField('hmi_status'): self.record.hmi_status.ParseFromString(msg) def ProcessDriveEvent(self, msg): """Save DriveEvents.""" self.record.drive_events.add().ParseFromString(msg) def ProcessChassis(self, msg): """Process Chassis, save disengagements.""" chassis = Chassis() chassis.ParseFromString(msg) timestamp = chassis.header.timestamp_sec if self._current_driving_mode == chassis.driving_mode: # DrivingMode doesn't change. return # Save disengagement. if (self._current_driving_mode == Chassis.COMPLETE_AUTO_DRIVE and chassis.driving_mode == Chassis.EMERGENCY_MODE): glog.info('Disengagement found at', timestamp) disengagement = self.record.disengagements.add(time=timestamp) if self._last_position is not None: lat, lon = utm.to_latlon(self._last_position.x, self._last_position.y, gflags.FLAGS.utm_zone_id, gflags.FLAGS.utm_zone_letter) disengagement.location.lat = lat disengagement.location.lon = lon # Update DrivingMode. self._current_driving_mode = chassis.driving_mode def ProcessLocalization(self, msg): """Process Localization, stat mileages and save driving path.""" localization = LocalizationEstimate() localization.ParseFromString(msg) timestamp = localization.header.timestamp_sec cur_pos = localization.pose.position # Stat mileages. if (self._last_position is not None and self._current_driving_mode is not None): driving_mode = Chassis.DrivingMode.Name(self._current_driving_mode) meters = utm_distance_meters(self._last_position, cur_pos) if driving_mode in self.record.stat.mileages: self.record.stat.mileages[driving_mode] += meters else: self.record.stat.mileages[driving_mode] = meters # Sample driving path. G = gflags.FLAGS if (self._last_position_sampled is None or (timestamp - self._last_position_sampled_time > G.pos_sample_min_duration and utm_distance_meters(self._last_position_sampled, cur_pos) > G.pos_sample_min_distance)): self._last_position_sampled = cur_pos self._last_position_sampled_time = timestamp lat, lon = utm.to_latlon(cur_pos.x, cur_pos.y, G.utm_zone_id, G.utm_zone_letter) self.record.stat.driving_path.add(lat=lat, lon=lon) # Update position. self._last_position = cur_pos