def restore_record(input_record, output_record): """Entrance of processing.""" # Define working dirs that store intermediate results in the middle of processing work_dir = 'restore_video_work_dir_{}'.format( datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d-%H-%M-%S')) # Decode videos converters = {} for topic in VIDEO_CHANNELS: converters[topic] = VideoConverter(work_dir, topic) reader = RecordReader(input_record) for message in reader.read_messages(): if message.topic in VIDEO_CHANNELS: converters[message.topic].write_frame(message) image_dir = os.path.join(work_dir, 'images') makedirs(image_dir) for topic in VIDEO_CHANNELS: converters[topic].close_writer() converters[topic].decode() converters[topic].move_images(image_dir) # Restore target record file writer = RecordWriter(0, 0) writer.open(output_record) topic_descs = {} counter = 0 reader = RecordReader(input_record) for message in reader.read_messages(): message_content = message.message message_topic = message.topic if message.topic in VIDEO_CHANNELS: message_content = retrieve_image(image_dir, message) message_topic = VIDEO_IMAGE_MAP[message.topic] if not message_content: continue counter += 1 if counter % 1000 == 0: logging.info('rewriting {} th message to record {}'.format( counter, output_record)) writer.write_message(message_topic, message_content, message.timestamp) if message_topic not in topic_descs: topic_descs[message_topic] = reader.get_protodesc(message_topic) writer.write_channel(message_topic, message.data_type, topic_descs[message_topic]) writer.close() logging.info('All Done, converted record: {}'.format(output_record))
def process_file(self, record_file): """ Extract information from record file. Return True if we are done collecting all information. """ try: reader = RecordReader(record_file) print("Begin to process record file {}".format(record_file)) for msg in reader.read_messages(): print(msg.topic) if msg.topic == kChassisInfoTopic and self.vehicle_vin is None: chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) if chassis.license.vin: self.vehicle_vin = chassis.license.vin elif msg.topic == kHMIInfoTopic and self.vehicle_name is None: hmistatus = hmi_status_pb2.HMIStatus() hmistatus.ParseFromString(msg.message) if hmistatus.current_map: self.vehicle_name = hmistatus.current_map print(self.vehicle_name) if self.done(): return True except: return False print("Finished processing record file {}".format(record_file)) return self.done()
def calculate(self, bag_file): """ Calculate mileage """ last_pos = None last_mode = 'Unknown' mileage = collections.defaultdict(lambda: 0.0) chassis = chassis_pb2.Chassis() localization = localization_pb2.LocalizationEstimate() reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kChassisTopic: chassis.ParseFromString(msg.message) # Mode changed if last_mode != chassis.driving_mode: if (last_mode == Chassis.COMPLETE_AUTO_DRIVE and chassis.driving_mode == Chassis.EMERGENCY_MODE): self.disengagements += 1 last_mode = chassis.driving_mode # Reset start position. last_pos = None elif msg.topic == kLocalizationTopic: localization.ParseFromString(msg.message) cur_pos = localization.pose.position if last_pos: # Accumulate mileage, from xyz-distance to miles. mileage[last_mode] += 0.000621371 * math.sqrt( (cur_pos.x - last_pos.x)**2 + (cur_pos.y - last_pos.y)**2 + (cur_pos.z - last_pos.z)**2) last_pos = cur_pos self.auto_mileage += mileage[Chassis.COMPLETE_AUTO_DRIVE] self.manual_mileage += (mileage[Chassis.COMPLETE_MANUAL] + mileage[Chassis.EMERGENCY_MODE])
def process_record(cls, input_record): channel_size_stats = {} freader = RecordReader(input_record) print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in ChannelSizeStats.TOPICS: if channelname in channel_size_stats: channel_size_stats[channelname]['total'] += len(msg) channel_size_stats[channelname]['num'] += 1 else: channel_size_stats[channelname] = {} channel_size_stats[channelname]['total'] = len(msg) channel_size_stats[channelname]['num'] = 1.0 elif channelname == "/apollo/planning": adc_trajectory = planning_pb2.ADCTrajectory() adc_trajectory.ParseFromString(msg) name = "planning_no_debug" adc_trajectory.ClearField("debug") planning_str = adc_trajectory.SerializeToString() if name in channel_size_stats: channel_size_stats[name]['total'] += len(planning_str) channel_size_stats[name]['num'] += 1 else: channel_size_stats[name] = {} channel_size_stats[name]['total'] = len(planning_str) channel_size_stats[name]['num'] = 1.0 for channelname in channel_size_stats.keys(): print( channelname, " num:", channel_size_stats[channelname]['num'], " avg size:", channel_size_stats[channelname]['total'] / channel_size_stats[channelname]['num']) print('----- Finish processing record -----')
def dump_bag(in_bag, out_dir): """ out_bag = in_bag + routing_bag """ reader = RecordReader(in_bag) seq = 0 global g_args topic_name_map = { "/apollo/localization/pose": ["localization", None], "/apollo/canbus/chassis": ["chassis", None], "/apollo/routing_response": ["routing", None], "/apollo/routing_resquest": ["routing_request", None], "/apollo/perception/obstacles": ["perception", None], "/apollo/prediction": ["prediction", None], "/apollo/planning": ["planning", None], "/apollo/control": ["control", None] } first_time = None record_num = 0 for channel, message, _type, _timestamp in reader.read_messages(): t = _timestamp msg = message record_num += 1 if record_num % 1000 == 0: print('Processing record_num: %d' % record_num) if first_time is None: first_time = t if channel not in topic_name_map: continue dt1 = datetime.utcfromtimestamp(t / 1000000000) dt2 = datetime.utcfromtimestamp(first_time / 1000000000) relative_time = (dt1 - dt2).seconds - g_args.start_time print "relative_time", relative_time if ((g_args.time_duration > 0) and (relative_time < 0 or relative_time > g_args.time_duration)): continue if channel == '/apollo/planning': seq += 1 topic_name_map[channel][1] = msg print('Generating seq: %d' % seq) for t, name_pb in topic_name_map.items(): if name_pb[1] is None: continue file_path = os.path.join( out_dir, str(seq) + "_" + name_pb[0] + ".pb.txt") write_to_file(file_path, name_pb[1]) topic_name_map[channel][1] = msg
def dump_bag(in_dir, out_file): """ out_bag = in_bag """ print('Begin') gnss = gnss_pb2.RawData() global g_args bag_files = glob.glob(in_dir + "/*.record.*") with open(out_file, 'w') as fp: for bag_file in sorted(bag_files): print('Processing bag_file: %s' % bag_file) reader = RecordReader(bag_file) for msg in reader.read_messages(): if msg.topic == kRawDataTopic: gnss.ParseFromString(msg.message) f.write(str(gnss))
def process_record(cls, input_record, output_record): print("filtering: {} -> {}".format(input_record, output_record)) output_dir = os.path.dirname(output_record) if output_dir != "" and not os.path.exists(output_dir): os.makedirs(output_dir) freader = RecordReader(input_record) fwriter = RecordWriter() if not fwriter.open(output_record): print('writer open failed!') return print('----- Begin to process record -----') for channelname, msg, datatype, timestamp in freader.read_messages(): if channelname in SamplePNC.TOPICS: desc = freader.get_protodesc(channelname) fwriter.write_channel(channelname, datatype, desc) fwriter.write_message(channelname, msg, timestamp) print('----- Finish processing record -----')
def calculate(self, bag_file): """ Calculate mileage """ try: drive_event = drive_event_pb2.DriveEvent() reader = RecordReader(bag_file) except Exception: print('Cannot open bag file %s' % bag_file) else: with open('/apollo/test.txt', 'a') as fp: for msg in reader.read_messages(): if msg.topic == kEventTopic: drive_event.ParseFromString(msg.message) msg_time = time.localtime( drive_event.header.timestamp_sec) fp.write(time.strftime("%Y-%m-%d %H:%M:%S", msg_time)) fp.write(str(drive_event.type) + ':') fp.write(drive_event.event.encode('utf-8') + '\n')
def tf_stats(in_bag): """ """ reader = RecordReader(in_bag) global g_args stats = {} for channel, message, _type, _timestamp in reader.read_messages(): if channel != '/tf': continue tf_pb = transform_pb2.TransformStampeds() tf_pb.ParseFromString(message) for transform in tf_pb.transforms: key = transform.header.frame_id + "=>" + transform.child_frame_id if key in stats.keys(): stats[key] += 1 else: stats[key] = 1 print('tf stats: {}'.format(stats))
def read(self, topics): reader = RecordReader(self.record_file) for msg in reader.read_messages(): if msg.topic not in topics: continue if msg.topic == "/apollo/canbus/chassis": chassis = chassis_pb2.Chassis() chassis.ParseFromString(msg.message) data = {"chassis": chassis} yield data if msg.topic == "/apollo/localization/pose": location_est = localization_pb2.LocalizationEstimate() location_est.ParseFromString(msg.message) data = {"pose": location_est} yield data if msg.topic == "/apollo/planning": planning = planning_pb2.ADCTrajectory() planning.ParseFromString(msg.message) data = {"planning": planning} yield data
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### import sys from datetime import datetime from cyber_py3.record import RecordReader from modules.localization.proto import localization_pb2 if __name__ == '__main__': if len(sys.argv) < 2: print( "usage: python record_extractor.py record_file1 record_file2 ...") frecords = sys.argv[1:] now = datetime.now().strftime("%Y-%m-%d_%H.%M.%S") with open("path_" + frecords[0].split('/')[-1] + ".txt", 'w') as f: for frecord in frecords: print("processing " + frecord) reader = RecordReader(frecord) for msg in reader.read_messages(): if msg.topic == "/apollo/localization/pose": localization = localization_pb2.LocalizationEstimate() localization.ParseFromString(msg.message) x = localization.pose.position.x y = localization.pose.position.y f.write(str(x) + "," + str(y) + "\n")
def extract_data(record_files, output_path, channels, start_timestamp, end_timestamp, extraction_rates): """ Extract the desired channel messages if channel_list is specified. Otherwise extract all sensor calibration messages according to extraction rate, 10% by default. """ # all records have identical sensor channels. sensor_channels = get_sensor_channel_list(record_files[0]) if (len(channels) > 0 and not validate_channel_list(channels, sensor_channels)): print('The input channel list is invalid.') return False # Extract all the sensor channels if channel_list is empty(no input arguments). print(sensor_channels) if len(channels) == 0: channels = sensor_channels # Declare logging variables process_channel_success_num = len(channels) process_channel_failure_num = 0 process_msg_failure_num = 0 channel_success = {} channel_occur_time = {} channel_output_path = {} #channel_messages = {} channel_parsers = {} for channel in channels: channel_success[channel] = True channel_occur_time[channel] = -1 topic_name = channel.replace('/', '_') channel_output_path[channel] = os.path.join(output_path, topic_name) process_dir(channel_output_path[channel], operation='create') channel_parsers[channel] =\ build_parser(channel, channel_output_path[channel]) # if channel in SMALL_TOPICS: # channel_messages[channel] = list() for record_file in record_files: record_reader = RecordReader(record_file) for msg in record_reader.read_messages(): if msg.topic in channels: # Only care about messages in certain time intervals msg_timestamp_sec = msg.timestamp / 1e9 if not in_range(msg_timestamp_sec, start_timestamp, end_timestamp): continue channel_occur_time[msg.topic] += 1 # Extract the topic according to extraction_rate if channel_occur_time[msg.topic] % extraction_rates[ msg.topic] != 0: continue ret = channel_parsers[msg.topic].parse_sensor_message(msg) # Calculate parsing statistics if not ret: process_msg_failure_num += 1 if channel_success[msg.topic]: channel_success[msg.topic] = False process_channel_failure_num += 1 process_channel_success_num -= 1 print( 'Failed to extract data from channel: %s in record %s' % (msg.topic, record_file)) # traverse the dict, if any channel topic stored as a list # then save the list as a summary file, mostly binary file for channel, parser in channel_parsers.items(): save_combined_messages_info(parser, channel) # Logging statics about channel extraction print('Extracted sensor channel number [%d] from record files: %s' % (len(channels), ' '.join(record_files))) print('Successfully processed [%d] channels, and [%d] was failed.' % (process_channel_success_num, process_channel_failure_num)) if process_msg_failure_num > 0: print('Channel extraction failure number is [%d].' % process_msg_failure_num) return True