def test_basic_convert(self): # This test is just to test that the rosbag2_py wrapper parses input # It is not a comprehensive test of bag_rewrite. bag_a_path = RESOURCES_PATH / 'convert_a' bag_b_path = RESOURCES_PATH / 'convert_b' output_uri_1 = self.tmp_path / 'converted_1' output_uri_2 = self.tmp_path / 'converted_2' input_options = [ StorageOptions(uri=str(bag_a_path)), StorageOptions(uri=str(bag_b_path), storage_id='sqlite3'), ] output_options_path = self.tmp_path / 'simple_convert.yml' output_options_content = f""" output_bags: - uri: {output_uri_1} storage_id: sqlite3 topics: [a_empty] - uri: {output_uri_2} storage_id: sqlite3 exclude: ".*empty.*" """ with output_options_path.open('w') as f: f.write(output_options_content) bag_rewrite(input_options, str(output_options_path)) self.assertTrue(output_uri_1.exists() and output_uri_1.is_dir()) self.assertTrue((output_uri_1 / 'metadata.yaml').exists()) self.assertTrue(output_uri_2.exists() and output_uri_2.is_dir()) self.assertTrue((output_uri_2 / 'metadata.yaml').exists())
def main(self, *, args): input_options = [] for input_bag in args.input: if len(input_bag) > 2: raise argparse.ArgumentTypeError( f'--input expects 1 or 2 arguments, {len(input_bag)} provided' ) storage_options = StorageOptions(uri=input_bag[0]) if len(input_bag) > 1: storage_options.storage_id = input_bag[1] input_options.append(storage_options) bag_rewrite(input_options, args.output_options)
def open_reader(bag_file_path: str): reader = SequentialReader() storage_options = StorageOptions(uri=bag_file_path, storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader.open(storage_options, converter_options) return reader
def main(self, *, args): if not os.path.isdir(args.bag_directory): return print_error('Must specify a bag directory') storage_options = StorageOptions( uri=args.bag_directory, storage_id=args.storage_id, ) reindexer = Reindexer() reindexer.reindex(storage_options)
def count_messages(bag_path): """Count messages in a bag.""" from rosbag2_py import StorageOptions, ConverterOptions, SequentialReader storage_options = StorageOptions(uri=bag_path, storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader = SequentialReader() reader.open(storage_options, converter_options) count = 0 while reader.has_next(): reader.read_next() count += 1 return count
def test_data_frame_range(): reader = SequentialReader() storage_options = StorageOptions(uri='test/range.bag', storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader.open(storage_options, converter_options) dfs = read_data_frames(BagView(reader), {'/range': ['range']}) assert ('/range' in dfs) df = dfs['/range'] stamp0 = pd.Timestamp(90, unit='ns') stamp1 = pd.Timestamp(190, unit='ns') assert (df['header.stamp'][0] == stamp0) assert (df['header.stamp'][1] == stamp1) assert (df['range'][0] == 10.0) assert (df['range'][1] == 20.0)
def main(self, *, args): # noqa: D102 qos_profile_overrides = {} # Specify a valid default if args.qos_profile_overrides_path: qos_profile_dict = yaml.safe_load(args.qos_profile_overrides_path) try: qos_profile_overrides = convert_yaml_to_qos_profile( qos_profile_dict) except (InvalidQoSProfileException, ValueError) as e: return print_error(str(e)) storage_config_file = '' if args.storage_config_file: storage_config_file = args.storage_config_file.name topic_remapping = ['--ros-args'] for remap_rule in args.remap: topic_remapping.append('--remap') topic_remapping.append(remap_rule) storage_options = StorageOptions( uri=args.bag_file, storage_id=args.storage, storage_config_uri=storage_config_file, ) play_options = PlayOptions() play_options.read_ahead_queue_size = args.read_ahead_queue_size play_options.node_prefix = NODE_NAME_PREFIX play_options.rate = args.rate play_options.topics_to_filter = args.topics play_options.topic_qos_profile_overrides = qos_profile_overrides play_options.loop = args.loop play_options.topic_remapping_options = topic_remapping play_options.clock_publish_frequency = args.clock play_options.delay = args.delay play_options.playback_duration = args.playback_duration play_options.playback_until_timestamp = self.get_playback_until_from_arg_group( args.playback_until_sec, args.playback_until_nsec) play_options.disable_keyboard_controls = args.disable_keyboard_controls play_options.start_paused = args.start_paused play_options.start_offset = args.start_offset play_options.wait_acked_timeout = args.wait_for_all_acked play_options.disable_loan_message = args.disable_loan_message player = Player() player.play(storage_options, play_options)
def read_all_messages_of_topic(bag_path, topic, type): """Read all messages of given topic and type from a rosbag into a list.""" from rosbag2_py import StorageOptions, ConverterOptions, SequentialReader from rclpy.serialization import deserialize_message storage_options = StorageOptions(uri=bag_path, storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader = SequentialReader() reader.open(storage_options, converter_options) result = [] while reader.has_next(): (tpc, data, _) = reader.read_next() if tpc == topic: result.append(deserialize_message(data, type)) return result
def create_range_bag(path): writer = SequentialWriter() storage_options = StorageOptions(uri=path, storage_id='sqlite3') converter_options = ConverterOptions( input_serialization_format='cdr', output_serialization_format='cdr') writer.open(storage_options, converter_options) topic = TopicMetadata('/range', 'sensor_msgs/msg/Range', 'cdr') writer.create_topic(topic) msg = Range() msg.header.stamp.sec = 0 msg.header.stamp.nanosec = 90 msg.range = 10.0 writer.write('/range', serialize_message(msg), 100) msg.header.stamp.sec = 0 msg.header.stamp.nanosec = 190 msg.range = 20.0 writer.write('/range', serialize_message(msg), 200)
def main(self, *, args): # noqa: D102 qos_profile_overrides = {} # Specify a valid default if args.qos_profile_overrides_path: qos_profile_dict = yaml.safe_load(args.qos_profile_overrides_path) try: qos_profile_overrides = convert_yaml_to_qos_profile( qos_profile_dict) except (InvalidQoSProfileException, ValueError) as e: return print_error(str(e)) storage_config_file = '' if args.storage_config_file: storage_config_file = args.storage_config_file.name topic_remapping = ['--ros-args'] for remap_rule in args.remap: topic_remapping.append('--remap') topic_remapping.append(remap_rule) storage_options = StorageOptions( uri=args.bag_file, storage_id=args.storage, storage_config_uri=storage_config_file, ) play_options = PlayOptions() play_options.read_ahead_queue_size = args.read_ahead_queue_size play_options.node_prefix = NODE_NAME_PREFIX play_options.rate = 1.0 play_options.topics_to_filter = args.topics play_options.topic_qos_profile_overrides = qos_profile_overrides play_options.loop = False play_options.topic_remapping_options = topic_remapping play_options.clock_publish_frequency = 0 play_options.delay = 0.0 play_options.disable_keyboard_controls = False # Give the user control play_options.start_paused = True # Important for allowing the burst play_options.start_offset = args.start_offset play_options.wait_acked_timeout = -1 player = Player() player.burst(storage_options, play_options, args.num_messages)
def test_bag_view(): reader = SequentialReader() storage_options = StorageOptions(uri='test/range.bag', storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader.open(storage_options, converter_options) items = [] for item in BagView(reader): items.append(item) assert (len(items) == 2) msgs = [msg for (_, msg, _) in items] assert (isinstance(msgs[0], Range)) assert (isinstance(msgs[1], Range)) assert (msgs[0].header.stamp.sec == 0) assert (msgs[0].header.stamp.nanosec == 90) assert (msgs[1].header.stamp.sec == 0) assert (msgs[1].header.stamp.nanosec == 190) assert (msgs[0].range == 10.0) assert (msgs[1].range == 20.0) assert (items[0][0] == '/range') assert (items[1][0] == '/range')
def test_data_frame_multi_topic(): reader = SequentialReader() storage_options = StorageOptions(uri='test/multi_topic.bag', storage_id='sqlite3') converter_options = ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr') reader.open(storage_options, converter_options) fields = {'/range': ['range'], '/diagnostics': ['key', 'value']} dfs = read_data_frames(BagView(reader), fields) assert ('/range' in dfs) assert ('/diagnostics' in dfs) df = dfs['/range'] stamp0 = pd.Timestamp(90, unit='ns') assert (df['header.stamp'][0] == stamp0) assert (df['range'][0] == 10.0) df = dfs['/diagnostics'] assert (df['key'][0] == 'cpu') assert (df['value'][0] == 'warn')
def main(self, *, args): # noqa: D102 qos_profile_overrides = {} # Specify a valid default if args.qos_profile_overrides_path: qos_profile_dict = yaml.safe_load(args.qos_profile_overrides_path) try: qos_profile_overrides = convert_yaml_to_qos_profile( qos_profile_dict) except (InvalidQoSProfileException, ValueError) as e: return print_error(str(e)) storage_config_file = '' if args.storage_config_file: storage_config_file = args.storage_config_file.name topic_remapping = ['--ros-args'] for remap_rule in args.remap: topic_remapping.append('--remap') topic_remapping.append(remap_rule) storage_options = StorageOptions( uri=args.bag_file, storage_id=args.storage, storage_config_uri=storage_config_file, ) play_options = PlayOptions() play_options.read_ahead_queue_size = args.read_ahead_queue_size play_options.node_prefix = NODE_NAME_PREFIX play_options.rate = args.rate play_options.topics_to_filter = args.topics play_options.topic_qos_profile_overrides = qos_profile_overrides play_options.loop = args.loop play_options.topic_remapping_options = topic_remapping play_options.clock_publish_frequency = args.clock player = Player() player.play(storage_options, play_options)
def main(self, *, args): # noqa: D102 if not os.path.exists(args.bag_file): return print_error("bag file '{}' does not exist!".format( args.bag_file)) if not args.topic: args.topic = [] reader = SequentialReader() in_storage_options = StorageOptions(uri=args.bag_file, storage_id=args.storage) in_converter_options = ConverterOptions( input_serialization_format=args.serialization_format, output_serialization_format=args.serialization_format) reader.open(in_storage_options, in_converter_options) info = Info() metadata = info.read_metadata(args.bag_file, args.storage) message_counts = {} for entry in metadata.topics_with_message_count: message_counts[entry.topic_metadata.name] = entry.message_count bag_duration_s = metadata.duration.total_seconds() type_name_to_type_map = {} topic_to_type_map = {} summaries = {} for topic_metadata in reader.get_all_topics_and_types(): if args.topic and topic_metadata.name not in args.topic: continue if topic_metadata.type not in type_name_to_type_map: try: type_name_to_type_map[topic_metadata.type] = get_message( topic_metadata.type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError( f"Cannot load message type '{topic_metadata.type}'") topic_to_type_map[topic_metadata.name] = type_name_to_type_map[ topic_metadata.type] summaries[topic_metadata.name] = { 'frame_ids': set(), 'write_delays_ns': [], 'custom': default_summary_output(topic_metadata.type) } reader.set_filter(StorageFilter(topics=args.topic)) progress = ProgressTracker() if args.progress: progress.add_estimated_work(metadata, 1.0) while reader.has_next(): (topic, data, t) = reader.read_next() msg_type = topic_to_type_map[topic] msg = deserialize_message(data, msg_type) for custom in summaries[topic]['custom']: custom.update(msg) if hasattr(msg, 'header'): summaries[topic]['frame_ids'].add(msg.header.frame_id) delay = t - Time.from_msg(msg.header.stamp).nanoseconds summaries[topic]['write_delays_ns'].append(delay) if args.progress: progress.print_update(progress.update(topic), every=100) if args.progress: progress.print_finish() for topic, summary in summaries.items(): print(topic) if not message_counts[topic]: print('\tNo messages') continue frame_id_str = ', '.join(summary['frame_ids']) print(f'\tframe_id: {frame_id_str}') freq = message_counts[topic] / bag_duration_s print(f'\tfrequency: {freq:.2f} hz') if summary['write_delays_ns']: # only messages with header.stamp have delays write_delays = np.array(summary['write_delays_ns']) delay_ms_mean = np.mean(write_delays) / 1000 / 1000 delay_ms_stddev = np.std(write_delays) / 1000 / 1000 print( f'\twrite delay: {delay_ms_mean:.2f}ms (stddev {delay_ms_stddev:.2f})' ) for custom in summaries[topic]['custom']: custom.write()
def main(self, *, args): # noqa: D102 # both all and topics cannot be true if (args.all and (args.topics or args.regex)) or (args.topics and args.regex): return print_error('Must specify only one option out of topics, --regex or --all') # one out of "all", "topics" and "regex" must be true if not(args.all or (args.topics and len(args.topics) > 0) or (args.regex)): return print_error('Invalid choice: Must specify topic(s), --regex or --all') if args.topics and args.exclude: return print_error('--exclude argument cannot be used when specifying a list ' 'of topics explicitly') if args.exclude and not(args.regex or args.all): return print_error('--exclude argument requires either --all or --regex') uri = args.output or datetime.datetime.now().strftime('rosbag2_%Y_%m_%d-%H_%M_%S') if os.path.isdir(uri): return print_error("Output folder '{}' already exists.".format(uri)) if args.compression_format and args.compression_mode == 'none': return print_error('Invalid choice: Cannot specify compression format ' 'without a compression mode.') if args.compression_queue_size < 1: return print_error('Compression queue size must be at least 1.') args.compression_mode = args.compression_mode.upper() qos_profile_overrides = {} # Specify a valid default if args.qos_profile_overrides_path: qos_profile_dict = yaml.safe_load(args.qos_profile_overrides_path) try: qos_profile_overrides = convert_yaml_to_qos_profile( qos_profile_dict) except (InvalidQoSProfileException, ValueError) as e: return print_error(str(e)) storage_config_file = '' if args.storage_config_file: storage_config_file = args.storage_config_file.name storage_options = StorageOptions( uri=uri, storage_id=args.storage, max_bagfile_size=args.max_bag_size, max_bagfile_duration=args.max_bag_duration, max_cache_size=args.max_cache_size, storage_preset_profile=args.storage_preset_profile, storage_config_uri=storage_config_file, ) record_options = RecordOptions() record_options.all = args.all record_options.is_discovery_disabled = args.no_discovery record_options.topics = args.topics record_options.rmw_serialization_format = args.serialization_format record_options.topic_polling_interval = datetime.timedelta( milliseconds=args.polling_interval) record_options.regex = args.regex record_options.exclude = args.exclude record_options.node_prefix = NODE_NAME_PREFIX record_options.compression_mode = args.compression_mode record_options.compression_format = args.compression_format record_options.compression_queue_size = args.compression_queue_size record_options.compression_threads = args.compression_threads record_options.topic_qos_profile_overrides = qos_profile_overrides record_options.include_hidden_topics = args.include_hidden_topics recorder = Recorder() try: recorder.record(storage_options, record_options) except KeyboardInterrupt: pass if os.path.isdir(uri) and not os.listdir(uri): os.rmdir(uri)
def get_rosbag_options(path, serialization_format='cdr'): storage_options = StorageOptions(uri=path, storage_id='sqlite3') converter_options = ConverterOptions( input_serialization_format=serialization_format, output_serialization_format=serialization_format) return storage_options, converter_options
def main(self, *, args): # noqa: D102 for bag_file in args.bag_files: if not os.path.exists(bag_file): return print_error( "bag file '{}' does not exist!".format(bag_file)) uri = args.output or datetime.now().strftime( 'rosbag2_%Y_%m_%d-%H_%M_%S') if os.path.isdir(uri): return print_error( "Output folder '{}' already exists.".format(uri)) info = Info() metadatas = [ info.read_metadata(f, args.in_storage or '') for f in args.bag_files ] try: self._filter.set_args(metadatas, args) except argparse.ArgumentError as e: return print_error(str(e)) storage_filter = self._filter.get_storage_filter() progress = ProgressTracker() readers = [] for bag_file, metadata in zip(args.bag_files, metadatas): reader = SequentialReader() in_storage_options, in_converter_options = get_rosbag_options( bag_file) if args.in_storage: in_storage_options.storage_id = args.in_storage reader.open(in_storage_options, in_converter_options) if storage_filter: reader.set_filter(storage_filter) if args.progress: progress.add_estimated_work( metadata, self._filter.output_size_factor(metadata)) readers.append(reader) writer = SequentialWriter() out_storage_options = StorageOptions( uri=uri, storage_id=args.out_storage, max_bagfile_size=args.max_bag_size) out_converter_options = ConverterOptions( input_serialization_format=args.serialization_format, output_serialization_format=args.serialization_format) writer.open(out_storage_options, out_converter_options) for reader in readers: for topic_metadata in reader.get_all_topics_and_types(): result = self._filter.filter_topic(topic_metadata) if result: if not isinstance(result, list): result = [result] for item in result: writer.create_topic(item) for reader in readers: while reader.has_next(): msg = reader.read_next() result = self._filter.filter_msg(msg) if args.progress: prog_perc = progress.update(msg[0]) progress.print_update(prog_perc) if result == FilterResult.STOP_CURRENT_BAG: break elif result == FilterResult.DROP_MESSAGE: continue elif isinstance(result, list): for item in result: writer.write(item[0], item[1], item[2]) elif isinstance(result, tuple): writer.write(result[0], result[1], result[2]) else: return print_error( "Filter returned invalid result: '{}'.".format(result)) if args.progress: progress.print_finish()