def test_sequential_reader(): bag_path = str(Path(__file__).parent.parent / 'resources' / 'talker') storage_options, converter_options = get_rosbag_options(bag_path) reader = rosbag2_py.SequentialReader() reader.open(storage_options, converter_options) topic_types = reader.get_all_topics_and_types() # Create a map for quicker lookup type_map = { topic_types[i].name: topic_types[i].type for i in range(len(topic_types)) } # Set filter for topic of string type storage_filter = rosbag2_py.StorageFilter(topics=['/topic']) reader.set_filter(storage_filter) msg_counter = 0 while reader.has_next(): (topic, data, t) = reader.read_next() msg_type = get_message(type_map[topic]) msg = deserialize_message(data, msg_type) assert isinstance(msg, String) assert msg.data == f'Hello, world! {msg_counter}' msg_counter += 1 # No filter reader.reset_filter() reader = rosbag2_py.SequentialReader() reader.open(storage_options, converter_options) msg_counter = 0 while reader.has_next(): (topic, data, t) = reader.read_next() msg_type = get_message(type_map[topic]) msg = deserialize_message(data, msg_type) assert isinstance(msg, Log) or isinstance(msg, String) if isinstance(msg, String): assert msg.data == f'Hello, world! {msg_counter}' msg_counter += 1
def __init__(self, node: Node, topic_name: str, message_type: str, expected_values: Dict[str, str], callback: Callable[[bool, Dict[str, str], Dict[str, str]], None] = default_callback): self.__callback = callback self.__expected_values = expected_values self.__message_type = message_type self.__node = node latching_qos = QoSProfile(depth=1, durability=QoSDurabilityPolicy. RMW_QOS_POLICY_DURABILITY_TRANSIENT_LOCAL) node.get_logger().info("Subscribing to \"{}\" of type \"{}\"".format( topic_name, message_type)) node.get_logger().info( "Expecting to recieve message with value \"{}\"".format( str(expected_values))) self.__sub = node.create_subscription(get_message(message_type), topic_name, self.__sub_callback, qos_profile=latching_qos)
def _get_msg_class(node, topic, include_hidden_topics): """ Get message module based on topic name. :param topic: topic name, ``list`` of ``str`` """ topic_names_and_types = get_topic_names_and_types( node=node, include_hidden_topics=include_hidden_topics) try: expanded_name = expand_topic_name(topic, node.get_name(), node.get_namespace()) except ValueError as e: raise RuntimeError(e) try: validate_full_topic_name(expanded_name) except rclpy.exceptions.InvalidTopicNameException as e: raise RuntimeError(e) for n, t in topic_names_and_types: if n == expanded_name: if len(t) > 1: raise RuntimeError( "Cannot echo topic '%s', as it contains more than one type: [%s]" % (topic, ', '.join(t))) message_type = t[0] break else: # Could not determine the type for the passed topic return None try: return get_message(message_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError("The message type '%s' is invalid" % message_type)
def publisher( node: Node, message_type: MsgType, topic_name: str, values: dict, period: float, print_nth: int, times: int, wait_matching_subscriptions: int, qos_profile: QoSProfile, keep_alive: float, ) -> Optional[str]: """Initialize a node with a single publisher and run its publish loop (maybe only once).""" try: msg_module = get_message(message_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError('The passed message type is invalid') values_dictionary = yaml.safe_load(values) if not isinstance(values_dictionary, dict): return 'The passed value needs to be a dictionary in YAML format' pub = node.create_publisher(msg_module, topic_name, qos_profile) times_since_last_log = 0 while pub.get_subscription_count() < wait_matching_subscriptions: # Print a message reporting we're waiting each 1s, check condition each 100ms. if not times_since_last_log: print( f'Waiting for at least {wait_matching_subscriptions} matching subscription(s)...' ) times_since_last_log = (times_since_last_log + 1) % 10 time.sleep(0.1) msg = msg_module() try: set_message_fields(msg, values_dictionary) except Exception as e: return 'Failed to populate field: {0}'.format(e) print('publisher: beginning loop') count = 0 def timer_callback(): nonlocal count count += 1 if print_nth and count % print_nth == 0: print('publishing #%d: %r\n' % (count, msg)) pub.publish(msg) timer_callback() if times != 1: timer = node.create_timer(period, timer_callback) while times == 0 or count < times: rclpy.spin_once(node) # give some time for the messages to reach the wire before exiting time.sleep(keep_alive) node.destroy_timer(timer) else: # give some time for the messages to reach the wire before exiting time.sleep(keep_alive)
def main(args): if not args.csv: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb(truncate_length, args.no_arr, args.no_str) else: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb_csv(truncate_length, args.no_arr, args.no_str) qos_profile = qos_profile_from_short_keys(args.qos_profile, reliability=args.qos_reliability, durability=args.qos_durability, depth=args.qos_depth, history=args.qos_history) with NodeStrategy(args) as node: if args.message_type is None: message_type = get_msg_class(node, args.topic_name, include_hidden_topics=True) else: message_type = get_message(args.message_type) if message_type is None: raise RuntimeError( 'Could not determine the type for the passed topic') future = None if args.once: future = Future() callback = subscriber_cb_once_decorator(callback, future) subscriber(node, args.topic_name, message_type, callback, qos_profile, args.lost_messages, future, args.timeout)
def main(args): if not args.csv: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb(truncate_length, args.no_arr, args.no_str) else: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb_csv(truncate_length, args.no_arr, args.no_str) qos_profile = qos_profile_from_short_keys(args.qos_profile, reliability=args.qos_reliability, durability=args.qos_durability, depth=args.qos_depth, history=args.qos_history) with NodeStrategy(args) as node: if args.message_type is None: message_type = get_msg_class(node, args.topic_name, include_hidden_topics=True) else: try: message_type = get_message(args.message_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError('The passed message type is invalid') if message_type is None: raise RuntimeError( 'Could not determine the type for the passed topic') subscriber(node, args.topic_name, message_type, callback, qos_profile, args.lost_messages, args.raw)
def subscriber(node: Node, topic_name: str, message_type: MsgType, callback: Callable[[MsgType], Any], qos_profile: QoSProfile) -> Optional[str]: """Initialize a node with a single subscription and spin.""" if message_type is None: topic_names_and_types = get_topic_names_and_types( node=node, include_hidden_topics=True) try: expanded_name = expand_topic_name(topic_name, node.get_name(), node.get_namespace()) except ValueError as e: raise RuntimeError(e) try: validate_full_topic_name(expanded_name) except rclpy.exceptions.InvalidTopicNameException as e: raise RuntimeError(e) for n, t in topic_names_and_types: if n == expanded_name: if len(t) > 1: raise RuntimeError( "Cannot echo topic '%s', as it contains more than one type: [%s]" % (topic_name, ', '.join(t))) message_type = t[0] break else: raise RuntimeError( 'Could not determine the type for the passed topic') msg_module = get_message(message_type) node.create_subscription(msg_module, topic_name, callback, qos_profile) rclpy.spin(node)
def __init__(self, reader: Union[str, SequentialReader], storage_filter=None): """Open bag view from reader or file path.""" if isinstance(reader, str): self._reader = open_reader(reader) else: self._reader = reader self._topic_type_map = {} self._storage_filter = storage_filter if storage_filter is not None: self._reader.set_filter(storage_filter) type_map = {} chosen_topics = set() if self._storage_filter: chosen_topics = set(self._storage_filter.topics) for topic_metadata in self._reader.get_all_topics_and_types(): if not chosen_topics or topic_metadata.name in chosen_topics: topic_type = topic_metadata.type if topic_type not in type_map: try: type_map[topic_metadata.type] = get_message(topic_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError( f"Cannot load message type '{topic_type}'") self._topic_type_map[ topic_metadata.name] = type_map[topic_type]
def __init__(self, node_name): self.node_name = node_name super().__init__(self.node_name) self.get_logger().info("init node with name: %s" % self.node_name) static_time = time.localtime(time.time()) str_time_for_name_file = str(static_time.tm_mday) + "." + str(static_time.tm_mon) + "." + str(static_time.tm_year) + "-" + str(static_time.tm_hour) + ":" + str(static_time.tm_min) + ":" + str(static_time.tm_sec) print("Time: " + str_time_for_name_file) time.sleep(0.2) # получаем список существующих топиков и их типов self.list_topics = self.get_topic_names_and_types() print(self.list_topics) # creating topic objects topic_objects_list = [] for topic in self.list_topics: topic_objects_list.append(Topic_example(topic[0], utilities.get_message(topic[1][0]), str_time_for_name_file)) # utilities.get_message(topic[1][0]): позволяет импортировать сообщение в код получая на вход его строковое название # print(topic_list) #initing subscriptions for topic in topic_objects_list: self.sub = self.create_subscription(topic.type, topic.topic_name, topic._topic_cb, qos_profile_sensor_data) self.sub
def __init__(self, bag_file=''): self.conn = sqlite3.connect(bag_file) self.cursor = self.conn.cursor() topics_data = self.cursor.execute('SELECT id, name, type FROM topics').fetchall() self.topic_type = {name_of: type_of for id_of, name_of, type_of in topics_data} self.topic_id = {name_of: id_of for id_of, name_of, type_of in topics_data} self.topic_msg_message = {name_of: get_message(type_of) for id_of, name_of, type_of in topics_data}
def filter_topic(self, topic_metadata): if topic_metadata.name == self._topic: topic_type_name = topic_metadata.type try: self._message_type = get_message(topic_type_name) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError( f"Cannot load message type '{topic_type_name}'") return topic_metadata
def filter_topic(self, topic_metadata): topic_type = topic_metadata.type if topic_type not in self._type_map: try: self._type_map[topic_type] = get_message(topic_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError(f"Cannot load message type '{topic_type}'") self._topic_type_map[topic_metadata.name] = self._type_map[topic_type] return topic_metadata
def initialize_subscribers(self): """Initialize the subscribers.""" for topic, msg_type in self.test_suite.msg_value_subscribers: if topic not in self.initialized_subscribers: msg_module = get_message(msg_type) self.initialized_subscribers[topic] = self.get_subscriber( topic, msg_module, self.test_suite.store_message) if topic not in self.test_suite.msg_value_subscribers: self.test_suite.msg_value_subscribers[topic] = \ self.initialized_subscribers[topic] for topic, msg_type in self.test_suite.msg_received_subscribers: if topic not in self.initialized_subscribers: msg_module = get_message(msg_type) self.initialized_subscribers[topic] = self.get_subscriber( topic, msg_module, self.test_suite.store_message) if topic not in self.test_suite.msg_received_subscribers: self.test_suite.msg_received_subscribers[topic] = \ self.initialized_subscribers[topic]
def __init__(self, node, topic, topic_type, field=None): """Initialize the message evaluator.""" super().__init__(None) self.received = 0 self.node = node self.topic = topic self.topic_type = topic_type self.field = field self.data = [] msg_type = get_message(topic_type) self.subscriber = self.node.create_subscription( msg_type, topic, self.callback, 10)
def _field_type(msg_type, attr): """Get ROS type of message field.""" fields = attr.split('.') parents = fields[:-1] last = fields[-1:][0] for parent in parents: parent_type_name = msg_type.get_fields_and_field_types()[parent] try: msg_type = get_message(parent_type_name) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError( f"Cannot load message type '{parent_type_name}'") return msg_type.get_fields_and_field_types()[last]
def publisher( node: Node, message_type: MsgType, topic_name: str, values: dict, period: float, print_nth: int, once: bool, qos_profile: QoSProfile, ) -> Optional[str]: """Initialize a node with a single publisher and run its publish loop (maybe only once).""" msg_module = get_message(message_type) values_dictionary = yaml.safe_load(values) if not isinstance(values_dictionary, dict): return 'The passed value needs to be a dictionary in YAML format' publisher_callbacks = PublisherEventCallbacks( incompatible_qos=handle_incompatible_qos_event) try: pub = node.create_publisher(msg_module, topic_name, qos_profile, event_callbacks=publisher_callbacks) except UnsupportedEventTypeError: pub = node.create_publisher(msg_module, topic_name, qos_profile) msg = msg_module() try: set_message_fields(msg, values_dictionary) except Exception as e: return 'Failed to populate field: {0}'.format(e) print('publisher: beginning loop') count = 0 def timer_callback(): nonlocal count count += 1 if print_nth and count % print_nth == 0: print('publishing #%d: %r\n' % (count, msg)) pub.publish(msg) timer = node.create_timer(period, timer_callback) if once: rclpy.spin_once(node) time.sleep( 0.1) # make sure the message reaches the wire before exiting else: rclpy.spin(node) node.destroy_timer(timer)
def main(self, *, args): if args.lost_messages: print( "WARNING: '--lost-messages' is deprecated; lost messages are reported by default", file=sys.stderr) self.csv = args.csv # Validate field selection self.field = args.field if self.field is not None: self.field = list(filter(None, self.field.split('.'))) if not self.field: raise RuntimeError(f"Invalid field value '{args.field}'") self.truncate_length = args.truncate_length if not args.full_length else None self.no_arr = args.no_arr self.no_str = args.no_str self.flow_style = args.flow_style self.filter_fn = None if args.filter_expr: self.filter_fn = _expr_eval(args.filter_expr) self.future = None if args.once: self.future = Future() self.include_message_info = args.include_message_info with NodeStrategy(args) as node: qos_profile = self.choose_qos(node, args) if args.message_type is None: message_type = get_msg_class(node, args.topic_name, include_hidden_topics=True) else: try: message_type = get_message(args.message_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError('The passed message type is invalid') if message_type is None: raise RuntimeError( 'Could not determine the type for the passed topic') self.subscribe_and_spin(node, args.topic_name, message_type, qos_profile, args.no_lost_messages, args.raw)
def test_sequential_writer(tmp_path): """ Test for sequential writer. :return: """ bag_path = str(tmp_path / 'tmp_write_test') storage_options, converter_options = get_rosbag_options(bag_path) writer = rosbag2_py.SequentialWriter() writer.open(storage_options, converter_options) # create topic topic_name = '/chatter' create_topic(writer, topic_name, 'std_msgs/msg/String') for i in range(10): msg = String() msg.data = f'Hello, world! {str(i)}' time_stamp = i * 100 writer.write(topic_name, serialize_message(msg), time_stamp) # close bag and create new storage instance del writer storage_options, converter_options = get_rosbag_options(bag_path) reader = rosbag2_py.SequentialReader() reader.open(storage_options, converter_options) topic_types = reader.get_all_topics_and_types() # Create a map for quicker lookup type_map = { topic_types[i].name: topic_types[i].type for i in range(len(topic_types)) } msg_counter = 0 while reader.has_next(): topic, data, t = reader.read_next() msg_type = get_message(type_map[topic]) msg_deserialized = deserialize_message(data, msg_type) assert isinstance(msg_deserialized, String) assert msg_deserialized.data == f'Hello, world! {msg_counter}' assert t == msg_counter * 100 msg_counter += 1
def get_invariant_subscriber(self, invariant): """Create a subscriber for monitoring the invariant.""" def subscribe(data): """Process data received on the invariant.""" evaluation = invariant.evaluator.evaluate( invariant.condition, get_field_or_message(data, invariant.evaluator.field)) self.invariants_evaluations[invariant].append(evaluation) if not evaluation.nominal: self.invariant_failed = True msg_module = get_message(invariant.evaluator.topic_type) return self.subscription_manager.get_subscriber( invariant.evaluator.topic, msg_module, subscribe)
def main(args): if not args.csv: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb(truncate_length, args.no_arr, args.no_str) else: truncate_length = args.truncate_length if not args.full_length else None callback = subscriber_cb_csv(truncate_length, args.no_arr, args.no_str) qos_profile = qos_profile_from_short_keys( args.qos_profile, reliability=args.qos_reliability, durability=args.qos_durability) with NodeStrategy(args) as node: if args.message_type is None: message_type = get_msg_class(node, args.topic_name, include_hidden_topics=True) else: message_type = get_message(args.message_type) subscriber( node, args.topic_name, message_type, callback, qos_profile)
def publisher( node: Node, message_type: MsgType, topic_name: str, values: dict, period: float, print_nth: int, times: int, qos_profile: QoSProfile, keep_alive: float, ) -> Optional[str]: """Initialize a node with a single publisher and run its publish loop (maybe only once).""" msg_module = get_message(message_type) values_dictionary = yaml.safe_load(values) if not isinstance(values_dictionary, dict): return 'The passed value needs to be a dictionary in YAML format' pub = node.create_publisher(msg_module, topic_name, qos_profile) msg = msg_module() try: set_message_fields(msg, values_dictionary) except Exception as e: return 'Failed to populate field: {0}'.format(e) print('publisher: beginning loop') count = 0 def timer_callback(): nonlocal count count += 1 if print_nth and count % print_nth == 0: print('publishing #%d: %r\n' % (count, msg)) pub.publish(msg) timer = node.create_timer(period, timer_callback) while times == 0 or count < times: rclpy.spin_once(node) # give some time for the messages to reach the wire before exiting time.sleep(keep_alive) node.destroy_timer(timer)
def execute(self): """Publish a message to a topic.""" msg = ('Publishing message to topic {} ' 'for a duration of {} with a rate of {}') self.node.get_logger().info( msg.format(self.topic, self.duration, self.rate)) once = self.duration == 1 and self.rate == 1 qos_profile = \ qos_profile_from_short_keys(self.qos_profile_str, reliability=self.qos_reliability_str, durability=self.qos_durability_str) msg_module = get_message(self.msg_type) pub = self.node.create_publisher(msg_module, self.topic, qos_profile) msg = msg_module() if self.use_substitution: # deep copy so we don't lose the variables for future executions parameters_copy = copy.deepcopy(self.parameters) substitude_values(parameters_copy, self.parameter_values) set_message_fields(msg, parameters_copy) else: set_message_fields(msg, self.parameters) if not once: clock = self.node.get_clock() sched_time = clock.now() end_time = sched_time + Duration(nanoseconds=self.duration * 10**9) while clock.now() < end_time: while clock.now() < sched_time: time.sleep((sched_time - clock.now()).nanoseconds / 10**9) if clock.now() > end_time: break pub.publish(msg) sched_time = sched_time + Duration( nanoseconds=(1. / self.rate) * 10**9) else: pub.publish(msg) self.node.destroy_publisher(pub) return True
def __init__(self, args): super().__init__(f'transform_{os.getpid()}') self.modules = {} for module in args.modules: try: mod = importlib.import_module(module) except ImportError: print(f'Failed to import module: {module}', file=sys.stderr) else: self.modules[module] = mod self.expression = args.expression input_topic_in_ns = args.input if not input_topic_in_ns.startswith('/'): input_topic_in_ns = self.get_namespace() + args.input input_class = get_msg_class(self, input_topic_in_ns, blocking=args.wait_for_start, include_hidden_topics=True) if input_class is None: raise RuntimeError( f'ERROR: Wrong input topic: {input_topic_in_ns}') self.field = args.field if self.field is not None: self.field = list(filter(None, self.field.split('.'))) if not self.field: raise RuntimeError(f"Invalid field value '{args.field}'") self.output_class = get_message(args.output_type) qos_profile = self.choose_qos(args, input_topic_in_ns) self.pub = self.create_publisher(self.output_class, args.output_topic, qos_profile) self.sub = self.create_subscription(input_class, input_topic_in_ns, self.callback, qos_profile)
def main(self, *, args): # Select print function self.print_func = _print_yaml if args.csv: self.print_func = _print_csv # Validate field selection self.field = args.field if self.field is not None: self.field = list(filter(None, self.field.split('.'))) if not self.field: raise RuntimeError(f"Invalid field value '{args.field}'") self.truncate_length = args.truncate_length if not args.full_length else None self.no_arr = args.no_arr self.no_str = args.no_str qos_profile = qos_profile_from_short_keys( args.qos_profile, reliability=args.qos_reliability, durability=args.qos_durability, depth=args.qos_depth, history=args.qos_history) with NodeStrategy(args) as node: if args.message_type is None: message_type = get_msg_class(node, args.topic_name, include_hidden_topics=True) else: try: message_type = get_message(args.message_type) except (AttributeError, ModuleNotFoundError, ValueError): raise RuntimeError('The passed message type is invalid') if message_type is None: raise RuntimeError( 'Could not determine the type for the passed topic') self.subscribe_and_spin(node, args.topic_name, message_type, qos_profile, args.lost_messages, args.raw)
def __sub_callback(self, actual_msg): # Test the equality: expected_msg = get_message(self.__message_type)() equal = True try: set_message_fields(expected_msg, self.__expected_values) except Exception as e: self.__node.get_logger().error( 'Failed to populate field: {0}'.format(e)) raise e for field in expected_msg.get_fields_and_field_types(): expected = str(getattr(expected_msg, field)) actual = str(getattr(actual_msg, field)) if expected != actual: equal = False # Call the callback with the result self.__callback(equal, actual_msg, expected_msg)
def main(): # hardcoded bag path is bad, but ok since main reccomended running path is Dockerfile bag_path = "/bag.db3" storage_options, converter_options = get_rosbag_options(bag_path) reader = rosbag2_py.SequentialReader() reader.open(storage_options, converter_options) topic_types = reader.get_all_topics_and_types() # Create a map for quicker lookup type_map = { topic_types[i].name: topic_types[i].type for i in range(len(topic_types)) } msg_counter = 0 while reader.has_next(): print("\n-----------------") print("msg id {}".format(msg_counter)) (topic, data, t) = reader.read_next() print(topic) msg_type = get_message(type_map[topic]) msg = deserialize_message(data, msg_type) print(msg) msg_counter += 1
def __call__(self, prefix, parsed_args, **kwargs): message = get_message(getattr(parsed_args, self.topic_type_key)) return [message_to_yaml(message())]
def main(): path = '../bags/TEB' dirs = next(walk(path))[1] dirs.sort() print(dirs) iter = 0 avg_x_vel = [] avg_y_vel = [] avg_th_vel = [] list_x = list() list_y = list() plot_number = 0 for dir in dirs: mypath = f'{path}/{dir}' bag_files = [f for f in listdir(mypath) if f.endswith(".db3")] for bag_file in bag_files: plot_number += 1 conn = sqlite3.connect(f'{mypath}/{bag_file}') c = conn.cursor() c.execute('SELECT * FROM messages') message_data = c.fetchall() c.execute('SELECT * FROM topics') topic_data = c.fetchall() msg_types = list() topic_names = list() for row in topic_data: msg_types.append(row[2]) topic_names.append(row[1]) time0 = message_data[0][2] dfs = {k: {} for k in topic_names} for df in dfs: dfs[df] = {k: [] for k in ['x', 'y', 'th', 'time']} for row in message_data: id = row[1] - 1 msg_type = get_message(msg_types[id]) x = deserialize_message(row[3], msg_type) pos, th = get_pose(msg_types, topic_names, x, id) dfs[topic_names[id]]['x'].append(pos.x) dfs[topic_names[id]]['y'].append(pos.y) dfs[topic_names[id]]['th'].append(th) dfs[topic_names[id]]['time'].append((row[2] - time0) / 1e9) list_x.append(dfs['/amcl/filtered']) list_y.append(dfs['/cmd_vel']) iter += 1 fig, ax = plt.subplots() plt.title(f'Trajektoria') plt.xlabel('x [m]') plt.ylabel('y [m]') ax.plot(list_x[0]['x'], list_x[0]['y'], label='DWB') ax.plot(list_x[1]['x'], list_x[1]['y'], label='TEB') # fig.legend() ax.legend(loc=4) plt.grid() axins = zoomed_inset_axes(ax, 5, loc=2, borderpad=3.0) axins.plot(list_x[0]['x'], list_x[0]['y'], label='DWB') axins.plot(list_x[1]['x'], list_x[1]['y'], label='TEB') x1, x2, y1, y2 = 36, 40, 29, 33 # specify the limits axins.set_xlim(x1, x2) # apply the x-limits axins.set_ylim(y1, y2) # apply the y-limits axins.grid() plt.yticks(visible=True) plt.xticks(visible=True) mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5") plt.savefig(f'{mypath}/trajectory.pdf', bbox_inches='tight') plt.figure(2) plt.grid() plt.plot(list_y[0]['time'], list_y[0]['x'], label='DWB') plt.plot(list_y[1]['time'], list_y[1]['x'], label='TEB') plt.xlabel('czas [s]') plt.ylabel('v_x [m/s]') plt.legend() plt.title(f'Zadana prędkość liniowa x') plt.savefig(f'{mypath}/x.pdf', bbox_inches='tight') plt.figure(3) plt.grid() plt.plot(list_y[0]['time'], list_y[0]['y'], label='DWB') plt.plot(list_y[1]['time'], list_y[1]['y'], label='TEB') plt.xlabel('czas [s]') plt.ylabel('v_y [m/s]') plt.legend() plt.title(f'Zadana prędkość liniowa y') plt.savefig(f'{mypath}/y.pdf', bbox_inches='tight') plt.figure(4) plt.grid() plt.plot(list_y[0]['time'], list_y[0]['th'], label='DWB') plt.plot(list_y[1]['time'], list_y[1]['th'], label='TEB') plt.xlabel('czas [s]') plt.ylabel('omega [rad/s]') plt.legend() plt.title(f'Zadana prędkość kątowa') plt.savefig(f'{mypath}/theta.pdf', bbox_inches='tight') plt.close('all') avg_x_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['x']))) avg_y_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['y']))) avg_th_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['th']))) time_TEB = (list_y[1]['time'][-1] - list_y[1]['time'][0]) avg_x_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['x']))) avg_y_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['y']))) avg_th_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['th']))) time_DWB = (list_y[0]['time'][-1] - list_y[0]['time'][0]) print(f"time TEB: {time_TEB} time DWB: {time_DWB}") print(f"x TEB: {avg_x_TEB} x DWB: {avg_x_DWB}") print(f"y TEB: {avg_y_TEB} y DWB: {avg_y_DWB}") print(f"th TEB: {avg_th_TEB} th DWB: {avg_th_DWB}")
def main(): path = '../bags/global_plan/path1' dirs = next(walk(path))[1] dirs.sort() print(dirs) list_x = list() list_y = list() distances = [] n_poses = [] for idx, dir in enumerate(dirs): mypath = f'{path}/{dir}' bag_files = [f for f in listdir(mypath) if f.endswith(".db3")] print(bag_files) for bag_file in bag_files: Dist = 0 y_poses = list() x_poses = list() conn = sqlite3.connect(f'{mypath}/{bag_file}') c = conn.cursor() c.execute('SELECT * FROM messages') message_data = c.fetchall() message = message_data[0][3] msg_type = get_message('nav_msgs/msg/Path') x = deserialize_message(message, msg_type) for i, msg in enumerate(x.poses[:-1]): dx = x.poses[i + 1].pose.position.x - msg.pose.position.x dy = x.poses[i + 1].pose.position.y - msg.pose.position.y Dist = Dist + math.sqrt(dx * dx + dy * dy) x_poses.append(msg.pose.position.x) y_poses.append(msg.pose.position.y) x_poses.append(x.poses[-1].pose.position.x) y_poses.append(x.poses[-1].pose.position.y) list_x.append(x_poses) list_y.append(y_poses) n_poses.append(len(x.poses)) distances.append(Dist) print(n_poses) print(distances) fig, ax = plt.subplots() plt.title(f'trajektoria') plt.xlabel('x [m]') plt.ylabel('y [m]') ax.plot(list_x[0], list_y[0], label='Djikstra') ax.plot(list_x[1], list_y[1], label='A*') # fig.legend() ax.legend(loc=0) plt.grid() plt.savefig(f'{mypath}/trajectory.pdf') plt.close('all')
def main(): path = '../bags/test' dirs = next(walk(path))[1] dirs.sort() print(dirs) avg_MSE_odom = [] avg_MSE_odom_filtered = [] avg_MSE_th = [] avg_MSE_th_filtered = [] plot_number = 0 for dir in dirs: mypath = f'{path}/{dir}' bag_files = [f for f in listdir(mypath) if f.endswith(".db3")] # print(bag_files) for bag_file in bag_files: plot_number += 1 conn = sqlite3.connect(f'{mypath}/{bag_file}') c = conn.cursor() c.execute('SELECT * FROM messages') message_data = c.fetchall() c.execute('SELECT * FROM topics') topic_data = c.fetchall() msg_types = list() topic_names = list() for row in topic_data: msg_types.append(row[2]) topic_names.append(row[1]) time0 = message_data[0][2] dfs = {k: {} for k in topic_names} for df in dfs: dfs[df] = {k: [] for k in ['x', 'y', 'th', 'time']} for row in message_data: id = row[1] - 1 msg_type = get_message(msg_types[id]) x = deserialize_message(row[3], msg_type) pos, th = get_pose(msg_types, topic_names, x, id) dfs[topic_names[id]]['x'].append(pos.x) dfs[topic_names[id]]['y'].append(pos.y) if th == 0: dfs[topic_names[id]]['th'].append( dfs[topic_names[id]]['th'][-1]) else: dfs[topic_names[id]]['th'].append(th) dfs[topic_names[id]]['time'].append((row[2] - time0) / 1e9) fig, ax = plt.subplots() plt.title(f'Trajektoria') plt.xlabel('x [m]') plt.ylabel('y [m]') ax.plot(dfs['/odom/noisy']['x'], dfs['/odom/noisy']['y'], label='odometria') ax.plot(dfs['/odom']['x'], dfs['/odom']['y'], label='odometria przefiltrowana') ax.plot(dfs['/omnivelma/pose']['x'], dfs['/omnivelma/pose']['y'], label='pozycja w symulatorze') # fig.legend() ax.legend(loc=3) plt.grid() axins = zoomed_inset_axes(ax, 15, loc=1) axins.plot(dfs['/odom/noisy']['x'], dfs['/odom/noisy']['y'], label='odometria') axins.plot(dfs['/odom']['x'], dfs['/odom']['y'], label='odometria przefiltrowana') axins.plot(dfs['/omnivelma/pose']['x'], dfs['/omnivelma/pose']['y'], label='pozycja w symulatorze') x1, x2, y1, y2 = -0.1, 0.1, -0.1, 0.1 # specify the limits axins.set_xlim(x1, x2) # apply the x-limits axins.set_ylim(y1, y2) # apply the y-limits axins.grid() plt.yticks(visible=True) plt.xticks(visible=True) mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5") plt.savefig(f'{mypath}/trajectory.pdf', bbox_inches='tight') plt.figure(2) plt.grid() plt.plot(dfs['/odom/noisy']['time'], dfs['/odom/noisy']['x'], label='odometria') plt.plot(dfs['/odom']['time'], dfs['/odom']['x'], label='odometria przefiltrowana') plt.plot(dfs['/omnivelma/pose']['time'], dfs['/omnivelma/pose']['x'], label='pozycja w symulatorze') plt.xlabel('czas [s]') plt.ylabel('x [m]') plt.legend() plt.title(f'Wartość położenia w osi x') plt.savefig(f'{mypath}/x.pdf', bbox_inches='tight') plt.figure(3) plt.grid() plt.plot(dfs['/odom/noisy']['time'], dfs['/odom/noisy']['y'], label='odometria') plt.plot(dfs['/odom']['time'], dfs['/odom']['y'], label='odometria przefiltrowana') plt.plot(dfs['/omnivelma/pose']['time'], dfs['/omnivelma/pose']['y'], label='pozycja w symulatorze') plt.xlabel('czas [s]') plt.ylabel('y [m]') plt.legend() plt.title(f'Wartość położenia w osi y') plt.savefig(f'{mypath}/y.pdf', bbox_inches='tight') plt.figure(4) plt.grid() plt.plot(dfs['/odom/noisy']['time'], dfs['/odom/noisy']['th'], label='odometria') plt.plot(dfs['/odom']['time'], dfs['/odom']['th'], label='odometria przefiltrowana') plt.plot(dfs['/omnivelma/pose']['time'], dfs['/omnivelma/pose']['th'], label='pozycja w symulatorze') plt.xlabel('czas [s]') plt.ylabel('kąt [rad]') plt.legend() plt.title(f'Wartość orientacji') plt.savefig(f'{mypath}/theta.pdf', bbox_inches='tight') plt.figure(5) plt.grid() odom_x_error = np.asarray(dfs['/error/odom']['x']) odom_y_error = np.asarray(dfs['/error/odom']['y']) odom_th_error = np.asarray(dfs['/error/odom']['th']) odom_error = np.sqrt( np.add(np.power(odom_x_error, 2), np.power(odom_y_error, 2))) odom_filtered_x_error = np.asarray( dfs['/error/odom_filtered']['x']) odom_filtered_y_error = np.asarray( dfs['/error/odom_filtered']['y']) odom_filtered_th_error = np.asarray( dfs['/error/odom_filtered']['th']) odom_filtered_error = np.sqrt( np.add(np.power(odom_filtered_x_error, 2), np.power(odom_filtered_y_error, 2))) MSE_odom = np.mean(np.power(odom_error, 2)) MSE_Th_odom = np.mean(np.power(odom_th_error, 2)) plt.plot(dfs['/error/odom']['time'], odom_error, label='błąd odometrii') MSE_odom_filtered = np.mean(np.power(odom_filtered_error, 2)) MSE_Th_odom_filtered = np.mean(np.power(odom_filtered_th_error, 2)) plt.plot(dfs['/error/odom_filtered']['time'], odom_filtered_error, label='błąd przefiltrowanej odometrii') avg_MSE_odom.append(MSE_odom) avg_MSE_odom_filtered.append(MSE_odom_filtered) avg_MSE_th.append(MSE_Th_odom) avg_MSE_th_filtered.append(MSE_Th_odom_filtered) # last_x = np.asarray(dfs['/omnivelma/pose']['x'])[-1] # print(f'Last x: {odom_x_error[-1]:.4} , last y: {odom_y_error[-1]:.4} las th: {odom_th_error[-1]:.4}, path length: {last_x:.4}') plt.xlabel('czas [s]') plt.ylabel('błąd [m]') plt.legend() plt.title(f'Absolutny błąd położenia') plt.savefig(f'{mypath}/error_abs.pdf', bbox_inches='tight') plt.figure(6) fig, axs = plt.subplots(2, sharex=True, sharey=False) axs[0].plot(dfs['/error/odom']['time'], dfs['/error/odom']['x'], label='odometria') axs[1].plot(dfs['/error/odom']['time'], dfs['/error/odom']['y'], label='odometria') axs[0].plot(dfs['/error/odom_filtered']['time'], dfs['/error/odom_filtered']['x'], label='przefiltrowana odometria') axs[1].plot(dfs['/error/odom_filtered']['time'], dfs['/error/odom_filtered']['y'], label='przefiltrowana odometria') plt.xlabel('czas [s]') for ax in axs.flat: ax.set(ylabel='błąd [m]') ax.grid() ax.legend() axs[0].set_title('Błąd x') axs[1].set_title('Błąd y') plt.savefig(f'{mypath}/error.pdf', bbox_inches='tight') plt.figure(10) plt.plot(dfs['/error/odom']['time'], dfs['/error/odom']['th'], label='odometria', linewidth=1) plt.plot(dfs['/error/odom_filtered']['time'], dfs['/error/odom_filtered']['th'], label='przefiltrowana odometria', linewidth=1) plt.xlabel('time [s]') plt.xlabel('błąd [rad]') plt.grid() plt.legend() plt.title(f'Błąd orientacji') plt.savefig(f'{mypath}/error_theta.pdf', bbox_inches='tight') plt.figure(11) plt.scatter(dfs['/error/odom']['time'], dfs['/error/odom']['th'], s=1, label='odometria') plt.scatter(dfs['/error/odom_filtered']['time'], dfs['/error/odom_filtered']['th'], s=1, label='przefiltrowana odometria') plt.xlabel('time [s]') plt.xlabel('błąd [rad]') plt.grid() plt.legend() plt.title(f'Błąd orientacji') plt.savefig(f'{mypath}/error_theta_scatter.pdf', bbox_inches='tight') plt.close('all') print("---Polozenie ---") for i in range(len(avg_MSE_odom)): print( f"{i+1} & {avg_MSE_odom[i]:.4} & {avg_MSE_odom_filtered[i]:.4} \\\ \hline" ) print( f"Średnia & {statistics.mean(avg_MSE_odom):.4} & {statistics.mean(avg_MSE_odom_filtered):.4} \\\ \hline" ) print("---Orientacja ---") for i in range(len(avg_MSE_odom)): print( f"{i+1} & {avg_MSE_th[i]:.4} & {avg_MSE_th_filtered[i]:.4} \\\ \hline" ) print( f"Średnia & {statistics.mean(avg_MSE_th):.4} & {statistics.mean(avg_MSE_th_filtered):.4} \\\ \hline" )