def test_sequential_reader():
    bag_path = str(Path(__file__).parent.parent / 'resources' / 'talker')
    storage_options, converter_options = get_rosbag_options(bag_path)

    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)

    topic_types = reader.get_all_topics_and_types()

    # Create a map for quicker lookup
    type_map = {
        topic_types[i].name: topic_types[i].type
        for i in range(len(topic_types))
    }

    # Set filter for topic of string type
    storage_filter = rosbag2_py.StorageFilter(topics=['/topic'])
    reader.set_filter(storage_filter)

    msg_counter = 0

    while reader.has_next():
        (topic, data, t) = reader.read_next()
        msg_type = get_message(type_map[topic])
        msg = deserialize_message(data, msg_type)

        assert isinstance(msg, String)
        assert msg.data == f'Hello, world! {msg_counter}'

        msg_counter += 1

    # No filter
    reader.reset_filter()

    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)

    msg_counter = 0

    while reader.has_next():
        (topic, data, t) = reader.read_next()
        msg_type = get_message(type_map[topic])
        msg = deserialize_message(data, msg_type)

        assert isinstance(msg, Log) or isinstance(msg, String)

        if isinstance(msg, String):
            assert msg.data == f'Hello, world! {msg_counter}'
            msg_counter += 1
def test_set_float32():
    """Test message serialization/deserialization of float32 type."""
    # During (de)serialization we convert to a C float before converting to a PyObject.
    # This can result in a loss of precision
    msg = BasicTypes()
    msg.float32_value = 1.125  # can be represented without rounding
    msg_serialized = serialize_message(msg)
    msg_deserialized = deserialize_message(msg_serialized, BasicTypes)
    assert msg.float32_value == msg_deserialized.float32_value

    msg = BasicTypes()
    msg.float32_value = 3.14  # can NOT be represented without rounding
    msg_serialized = serialize_message(msg)
    msg_deserialized = deserialize_message(msg_serialized, BasicTypes)
    assert msg.float32_value == round(msg_deserialized.float32_value, 2)
Beispiel #3
0
def get_position_ik(request: GetPositionIK.Request, approximate=False):
    global ik_fk_state
    if ik_fk_state is None:
        ik_fk_state = BitbotsMoveitBindings([])
    request_str = serialize_message(request)
    result_str = ik_fk_state.getPositionIK(request_str, approximate)
    return deserialize_message(result_str, GetPositionIK.Response)
Beispiel #4
0
    def test_sequential_reader(self):
        info = baggie._BagInfo()
        meta = info.read_metadata(self.s_opt.uri, self.s_opt.storage_id)
        self.assertEquals(meta.storage_identifier, self.s_opt.storage_id)
        self.assertEquals(meta.message_count, N_MSGS)
        self.assertEquals(meta.compression_format,
                          self.comp_opt.compression_format)
        self.assertEquals(meta.compression_mode,
                          self.comp_opt.mode_to_string())

        reader = baggie._Reader(self.comp_opt)
        reader.open(self.s_opt, self.c_opt)

        i = 0
        while reader.has_next():
            topic, ser_msg, ts = reader.read_next()
            msg = deserialize_message(ser_msg, Int32)

            self.assertIsInstance(msg, Int32)
            self.assertEquals(i, msg.data)
            self.assertEquals(self.stamps[i], ts)

            i += 1

        self.assertEquals(i, N_MSGS)
Beispiel #5
0
 def filter_msg(self, msg):
     (topic, data, t) = msg
     if topic == self._topic:
         msg = deserialize_message(data, self._message_type)
         assert hasattr(msg, 'header')
         msg.header.frame_id = self._frame_id
         return (topic, serialize_message(msg), t)
     return msg
Beispiel #6
0
    def step_open_loop(self, dt: float, cmdvel_msg: Twist):
        if dt == 0.0:
            # preventing weird spline interpolation errors on edge case
            dt = 0.001
        stepi = self.py_walk_wrapper.step_open_loop(
            dt, serialize_message(cmdvel_msg))

        result = deserialize_message(stepi, PoseArray)
        return result
Beispiel #7
0
 def filter_msg(self, msg):
     (topic, data, t) = msg
     if topic == self._topic:
         msg = deserialize_message(data, self._message_type)
         result = self.filter_typed_msg((topic, msg, t))
         if isinstance(result, FilterResult):
             return result
         (topic, msg, t) = result
         return (topic, serialize_message(msg), t)
     return msg
Beispiel #8
0
    def step_relative(self, dt: float, step_msg: Twist, imu_msg,
                      jointstate_msg, pressure_left, pressure_right):
        if dt == 0.0:
            # preventing weird spline interpolation errors on edge case
            dt = 0.001
        stepi = self.py_walk_wrapper.step_relative(
            dt, serialize_message(step_msg), serialize_message(imu_msg),
            serialize_message(jointstate_msg),
            serialize_message(pressure_left),
            serialize_message(pressure_right))

        result = deserialize_message(stepi, JointCommand)
        return result
def test_sequential_writer(tmp_path):
    """
    Test for sequential writer.

    :return:
    """
    bag_path = str(tmp_path / 'tmp_write_test')

    storage_options, converter_options = get_rosbag_options(bag_path)

    writer = rosbag2_py.SequentialWriter()
    writer.open(storage_options, converter_options)

    # create topic
    topic_name = '/chatter'
    create_topic(writer, topic_name, 'std_msgs/msg/String')

    for i in range(10):
        msg = String()
        msg.data = f'Hello, world! {str(i)}'
        time_stamp = i * 100

        writer.write(topic_name, serialize_message(msg), time_stamp)

    # close bag and create new storage instance
    del writer
    storage_options, converter_options = get_rosbag_options(bag_path)

    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)

    topic_types = reader.get_all_topics_and_types()

    # Create a map for quicker lookup
    type_map = {
        topic_types[i].name: topic_types[i].type
        for i in range(len(topic_types))
    }

    msg_counter = 0
    while reader.has_next():
        topic, data, t = reader.read_next()
        msg_type = get_message(type_map[topic])
        msg_deserialized = deserialize_message(data, msg_type)

        assert isinstance(msg_deserialized, String)
        assert msg_deserialized.data == f'Hello, world! {msg_counter}'
        assert t == msg_counter * 100

        msg_counter += 1
Beispiel #10
0
def read_all_messages_of_topic(bag_path, topic, type):
    """Read all messages of given topic and type from a rosbag into a list."""
    from rosbag2_py import StorageOptions, ConverterOptions, SequentialReader
    from rclpy.serialization import deserialize_message

    storage_options = StorageOptions(uri=bag_path, storage_id='sqlite3')
    converter_options = ConverterOptions(input_serialization_format='cdr',
                                         output_serialization_format='cdr')

    reader = SequentialReader()
    reader.open(storage_options, converter_options)

    result = []
    while reader.has_next():
        (tpc, data, _) = reader.read_next()
        if tpc == topic:
            result.append(deserialize_message(data, type))
    return result
Beispiel #11
0
def test_reframe_filter():
    filter = ReframeFilter()

    parser = argparse.ArgumentParser('reframe')
    filter.add_arguments(parser)
    args = parser.parse_args(['-t', '/data', '--frame', 'frame1'])
    filter.set_args(None, args)

    topic_metadata = TopicMetadata(
        '/data', 'diagnostic_msgs/msg/DiagnosticArray', 'cdr')
    assert(filter.filter_topic(topic_metadata) == topic_metadata)

    msg = DiagnosticArray()
    msg.header.frame_id = 'frame0'
    msg.header.stamp.sec = 0
    msg.header.stamp.nanosec = 1

    # timestamp within the bag and cut duration
    bag_msg = ('/data', serialize_message(msg), 1)
    (_, data, _) = filter.filter_msg(bag_msg)
    new_msg = deserialize_message(data, DiagnosticArray)
    assert(new_msg.header.frame_id == 'frame1')
def main():
    # hardcoded bag path is bad, but ok since main reccomended running path is Dockerfile
    bag_path = "/bag.db3"
    storage_options, converter_options = get_rosbag_options(bag_path)
    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)

    topic_types = reader.get_all_topics_and_types()
    # Create a map for quicker lookup
    type_map = {
        topic_types[i].name: topic_types[i].type
        for i in range(len(topic_types))
    }

    msg_counter = 0
    while reader.has_next():
        print("\n-----------------")
        print("msg id {}".format(msg_counter))
        (topic, data, t) = reader.read_next()
        print(topic)
        msg_type = get_message(type_map[topic])
        msg = deserialize_message(data, msg_type)
        print(msg)
        msg_counter += 1
Beispiel #13
0
def test_replace_filter():
    filter = ReplaceFilter()

    parser = argparse.ArgumentParser('replace')
    filter.add_arguments(parser)
    args = parser.parse_args(['-t', '/data', '-v', 'test/data.yaml'])
    filter.set_args(None, args)

    string_msg = String()
    string_msg.data = 'in'
    msg = ('/data', serialize_message(string_msg), 0)

    with pytest.raises(RuntimeError):
        # if topic hasn't been found, filter_msg fails with error
        filter.filter_msg(msg)

    topic_metadata = TopicMetadata(
        '/data', 'example_interfaces/msg/String', 'cdr')
    assert(filter.filter_topic(topic_metadata) == topic_metadata)
    (topic, result_data, t) = filter.filter_msg(msg)
    assert(topic == '/data')
    result_msg = deserialize_message(result_data, String)
    assert(result_msg.data == 'out')
    assert(t == 0)
def main():
    path = '../bags/test'
    dirs = next(walk(path))[1]
    dirs.sort()
    print(dirs)

    avg_MSE_odom = []
    avg_MSE_odom_filtered = []
    avg_MSE_th = []
    avg_MSE_th_filtered = []

    plot_number = 0
    for dir in dirs:
        mypath = f'{path}/{dir}'

        bag_files = [f for f in listdir(mypath) if f.endswith(".db3")]
        # print(bag_files)

        for bag_file in bag_files:
            plot_number += 1
            conn = sqlite3.connect(f'{mypath}/{bag_file}')
            c = conn.cursor()

            c.execute('SELECT * FROM messages')
            message_data = c.fetchall()

            c.execute('SELECT * FROM topics')
            topic_data = c.fetchall()
            msg_types = list()
            topic_names = list()
            for row in topic_data:
                msg_types.append(row[2])
                topic_names.append(row[1])

            time0 = message_data[0][2]

            dfs = {k: {} for k in topic_names}
            for df in dfs:
                dfs[df] = {k: [] for k in ['x', 'y', 'th', 'time']}

            for row in message_data:
                id = row[1] - 1
                msg_type = get_message(msg_types[id])
                x = deserialize_message(row[3], msg_type)
                pos, th = get_pose(msg_types, topic_names, x, id)
                dfs[topic_names[id]]['x'].append(pos.x)
                dfs[topic_names[id]]['y'].append(pos.y)
                if th == 0:
                    dfs[topic_names[id]]['th'].append(
                        dfs[topic_names[id]]['th'][-1])
                else:
                    dfs[topic_names[id]]['th'].append(th)
                dfs[topic_names[id]]['time'].append((row[2] - time0) / 1e9)

            fig, ax = plt.subplots()
            plt.title(f'Trajektoria')
            plt.xlabel('x [m]')
            plt.ylabel('y [m]')
            ax.plot(dfs['/odom/noisy']['x'],
                    dfs['/odom/noisy']['y'],
                    label='odometria')
            ax.plot(dfs['/odom']['x'],
                    dfs['/odom']['y'],
                    label='odometria przefiltrowana')
            ax.plot(dfs['/omnivelma/pose']['x'],
                    dfs['/omnivelma/pose']['y'],
                    label='pozycja w symulatorze')
            # fig.legend()
            ax.legend(loc=3)
            plt.grid()
            axins = zoomed_inset_axes(ax, 15, loc=1)
            axins.plot(dfs['/odom/noisy']['x'],
                       dfs['/odom/noisy']['y'],
                       label='odometria')
            axins.plot(dfs['/odom']['x'],
                       dfs['/odom']['y'],
                       label='odometria przefiltrowana')
            axins.plot(dfs['/omnivelma/pose']['x'],
                       dfs['/omnivelma/pose']['y'],
                       label='pozycja w symulatorze')
            x1, x2, y1, y2 = -0.1, 0.1, -0.1, 0.1  # specify the limits
            axins.set_xlim(x1, x2)  # apply the x-limits
            axins.set_ylim(y1, y2)  # apply the y-limits
            axins.grid()

            plt.yticks(visible=True)
            plt.xticks(visible=True)
            mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
            plt.savefig(f'{mypath}/trajectory.pdf', bbox_inches='tight')

            plt.figure(2)
            plt.grid()
            plt.plot(dfs['/odom/noisy']['time'],
                     dfs['/odom/noisy']['x'],
                     label='odometria')
            plt.plot(dfs['/odom']['time'],
                     dfs['/odom']['x'],
                     label='odometria przefiltrowana')
            plt.plot(dfs['/omnivelma/pose']['time'],
                     dfs['/omnivelma/pose']['x'],
                     label='pozycja w symulatorze')
            plt.xlabel('czas [s]')
            plt.ylabel('x [m]')
            plt.legend()
            plt.title(f'Wartość położenia w osi x')
            plt.savefig(f'{mypath}/x.pdf', bbox_inches='tight')

            plt.figure(3)
            plt.grid()
            plt.plot(dfs['/odom/noisy']['time'],
                     dfs['/odom/noisy']['y'],
                     label='odometria')
            plt.plot(dfs['/odom']['time'],
                     dfs['/odom']['y'],
                     label='odometria przefiltrowana')
            plt.plot(dfs['/omnivelma/pose']['time'],
                     dfs['/omnivelma/pose']['y'],
                     label='pozycja w symulatorze')
            plt.xlabel('czas [s]')
            plt.ylabel('y [m]')
            plt.legend()
            plt.title(f'Wartość położenia w osi y')
            plt.savefig(f'{mypath}/y.pdf', bbox_inches='tight')

            plt.figure(4)
            plt.grid()
            plt.plot(dfs['/odom/noisy']['time'],
                     dfs['/odom/noisy']['th'],
                     label='odometria')
            plt.plot(dfs['/odom']['time'],
                     dfs['/odom']['th'],
                     label='odometria przefiltrowana')
            plt.plot(dfs['/omnivelma/pose']['time'],
                     dfs['/omnivelma/pose']['th'],
                     label='pozycja w symulatorze')
            plt.xlabel('czas [s]')
            plt.ylabel('kąt [rad]')
            plt.legend()
            plt.title(f'Wartość orientacji')
            plt.savefig(f'{mypath}/theta.pdf', bbox_inches='tight')

            plt.figure(5)
            plt.grid()

            odom_x_error = np.asarray(dfs['/error/odom']['x'])
            odom_y_error = np.asarray(dfs['/error/odom']['y'])
            odom_th_error = np.asarray(dfs['/error/odom']['th'])
            odom_error = np.sqrt(
                np.add(np.power(odom_x_error, 2), np.power(odom_y_error, 2)))

            odom_filtered_x_error = np.asarray(
                dfs['/error/odom_filtered']['x'])
            odom_filtered_y_error = np.asarray(
                dfs['/error/odom_filtered']['y'])
            odom_filtered_th_error = np.asarray(
                dfs['/error/odom_filtered']['th'])
            odom_filtered_error = np.sqrt(
                np.add(np.power(odom_filtered_x_error, 2),
                       np.power(odom_filtered_y_error, 2)))

            MSE_odom = np.mean(np.power(odom_error, 2))
            MSE_Th_odom = np.mean(np.power(odom_th_error, 2))
            plt.plot(dfs['/error/odom']['time'],
                     odom_error,
                     label='błąd odometrii')

            MSE_odom_filtered = np.mean(np.power(odom_filtered_error, 2))
            MSE_Th_odom_filtered = np.mean(np.power(odom_filtered_th_error, 2))
            plt.plot(dfs['/error/odom_filtered']['time'],
                     odom_filtered_error,
                     label='błąd przefiltrowanej odometrii')

            avg_MSE_odom.append(MSE_odom)
            avg_MSE_odom_filtered.append(MSE_odom_filtered)
            avg_MSE_th.append(MSE_Th_odom)
            avg_MSE_th_filtered.append(MSE_Th_odom_filtered)

            # last_x = np.asarray(dfs['/omnivelma/pose']['x'])[-1]
            # print(f'Last x: {odom_x_error[-1]:.4} , last y: {odom_y_error[-1]:.4} las th: {odom_th_error[-1]:.4}, path length: {last_x:.4}')

            plt.xlabel('czas [s]')
            plt.ylabel('błąd [m]')
            plt.legend()
            plt.title(f'Absolutny błąd położenia')
            plt.savefig(f'{mypath}/error_abs.pdf', bbox_inches='tight')

            plt.figure(6)
            fig, axs = plt.subplots(2, sharex=True, sharey=False)
            axs[0].plot(dfs['/error/odom']['time'],
                        dfs['/error/odom']['x'],
                        label='odometria')
            axs[1].plot(dfs['/error/odom']['time'],
                        dfs['/error/odom']['y'],
                        label='odometria')
            axs[0].plot(dfs['/error/odom_filtered']['time'],
                        dfs['/error/odom_filtered']['x'],
                        label='przefiltrowana odometria')
            axs[1].plot(dfs['/error/odom_filtered']['time'],
                        dfs['/error/odom_filtered']['y'],
                        label='przefiltrowana odometria')

            plt.xlabel('czas [s]')

            for ax in axs.flat:
                ax.set(ylabel='błąd [m]')
                ax.grid()
                ax.legend()

            axs[0].set_title('Błąd x')
            axs[1].set_title('Błąd y')

            plt.savefig(f'{mypath}/error.pdf', bbox_inches='tight')

            plt.figure(10)
            plt.plot(dfs['/error/odom']['time'],
                     dfs['/error/odom']['th'],
                     label='odometria',
                     linewidth=1)
            plt.plot(dfs['/error/odom_filtered']['time'],
                     dfs['/error/odom_filtered']['th'],
                     label='przefiltrowana odometria',
                     linewidth=1)
            plt.xlabel('time [s]')
            plt.xlabel('błąd [rad]')

            plt.grid()
            plt.legend()
            plt.title(f'Błąd orientacji')
            plt.savefig(f'{mypath}/error_theta.pdf', bbox_inches='tight')

            plt.figure(11)
            plt.scatter(dfs['/error/odom']['time'],
                        dfs['/error/odom']['th'],
                        s=1,
                        label='odometria')
            plt.scatter(dfs['/error/odom_filtered']['time'],
                        dfs['/error/odom_filtered']['th'],
                        s=1,
                        label='przefiltrowana odometria')
            plt.xlabel('time [s]')
            plt.xlabel('błąd [rad]')

            plt.grid()
            plt.legend()
            plt.title(f'Błąd orientacji')
            plt.savefig(f'{mypath}/error_theta_scatter.pdf',
                        bbox_inches='tight')

            plt.close('all')

    print("---Polozenie ---")
    for i in range(len(avg_MSE_odom)):
        print(
            f"{i+1} & {avg_MSE_odom[i]:.4} & {avg_MSE_odom_filtered[i]:.4} \\\ \hline"
        )

    print(
        f"Średnia & {statistics.mean(avg_MSE_odom):.4} & {statistics.mean(avg_MSE_odom_filtered):.4} \\\ \hline"
    )

    print("---Orientacja ---")
    for i in range(len(avg_MSE_odom)):
        print(
            f"{i+1} & {avg_MSE_th[i]:.4} & {avg_MSE_th_filtered[i]:.4} \\\ \hline"
        )
    print(
        f"Średnia & {statistics.mean(avg_MSE_th):.4} & {statistics.mean(avg_MSE_th_filtered):.4} \\\ \hline"
    )
Beispiel #15
0
def main():
    path = '../bags/global_plan/path1'
    dirs = next(walk(path))[1]
    dirs.sort()
    print(dirs)

    list_x = list()
    list_y = list()
    distances = []
    n_poses = []
    for idx, dir in enumerate(dirs):

        mypath = f'{path}/{dir}'

        bag_files = [f for f in listdir(mypath) if f.endswith(".db3")]
        print(bag_files)

        for bag_file in bag_files:
            Dist = 0
            y_poses = list()
            x_poses = list()
            conn = sqlite3.connect(f'{mypath}/{bag_file}')
            c = conn.cursor()

            c.execute('SELECT * FROM messages')
            message_data = c.fetchall()

            message = message_data[0][3]
            msg_type = get_message('nav_msgs/msg/Path')

            x = deserialize_message(message, msg_type)
            for i, msg in enumerate(x.poses[:-1]):
                dx = x.poses[i + 1].pose.position.x - msg.pose.position.x
                dy = x.poses[i + 1].pose.position.y - msg.pose.position.y
                Dist = Dist + math.sqrt(dx * dx + dy * dy)
                x_poses.append(msg.pose.position.x)
                y_poses.append(msg.pose.position.y)
            x_poses.append(x.poses[-1].pose.position.x)
            y_poses.append(x.poses[-1].pose.position.y)

            list_x.append(x_poses)
            list_y.append(y_poses)
            n_poses.append(len(x.poses))
            distances.append(Dist)

    print(n_poses)
    print(distances)
    fig, ax = plt.subplots()
    plt.title(f'trajektoria')
    plt.xlabel('x [m]')
    plt.ylabel('y [m]')

    ax.plot(list_x[0], list_y[0], label='Djikstra')
    ax.plot(list_x[1], list_y[1], label='A*')

    # fig.legend()
    ax.legend(loc=0)
    plt.grid()
    plt.savefig(f'{mypath}/trajectory.pdf')

    plt.close('all')
Beispiel #16
0
def get_joint_states():
    global current_state
    if current_state is None:
        current_state = BitbotsMoveitBindings([])
    result_str = current_state.get_joint_states()
    return deserialize_message(result_str, JointState)
Beispiel #17
0
def get_bioik_ik():
    global ik_fk_state
    if ik_fk_state is None:
        ik_fk_state = BitbotsMoveitBindings([])
    result_str = ik_fk_state.getBioIKIK()
    return deserialize_message(result_str, GetIK.Response)
Beispiel #18
0
def get_position_fk(request: GetPositionFK.Request):
    global ik_fk_state
    if ik_fk_state is None:
        ik_fk_state = BitbotsMoveitBindings([])
    result_str = ik_fk_state.getPositionFK(serialize_message(request))
    return deserialize_message(result_str, GetPositionFK.Response)
Beispiel #19
0
    def main(self, *, args):  # noqa: D102
        if not os.path.exists(args.bag_file):
            return print_error("bag file '{}' does not exist!".format(
                args.bag_file))

        if not args.topic:
            args.topic = []

        reader = SequentialReader()
        in_storage_options = StorageOptions(uri=args.bag_file,
                                            storage_id=args.storage)
        in_converter_options = ConverterOptions(
            input_serialization_format=args.serialization_format,
            output_serialization_format=args.serialization_format)
        reader.open(in_storage_options, in_converter_options)

        info = Info()
        metadata = info.read_metadata(args.bag_file, args.storage)
        message_counts = {}
        for entry in metadata.topics_with_message_count:
            message_counts[entry.topic_metadata.name] = entry.message_count
        bag_duration_s = metadata.duration.total_seconds()

        type_name_to_type_map = {}
        topic_to_type_map = {}
        summaries = {}

        for topic_metadata in reader.get_all_topics_and_types():
            if args.topic and topic_metadata.name not in args.topic:
                continue
            if topic_metadata.type not in type_name_to_type_map:
                try:
                    type_name_to_type_map[topic_metadata.type] = get_message(
                        topic_metadata.type)
                except (AttributeError, ModuleNotFoundError, ValueError):
                    raise RuntimeError(
                        f"Cannot load message type '{topic_metadata.type}'")
            topic_to_type_map[topic_metadata.name] = type_name_to_type_map[
                topic_metadata.type]
            summaries[topic_metadata.name] = {
                'frame_ids': set(),
                'write_delays_ns': [],
                'custom': default_summary_output(topic_metadata.type)
            }

        reader.set_filter(StorageFilter(topics=args.topic))

        progress = ProgressTracker()
        if args.progress:
            progress.add_estimated_work(metadata, 1.0)

        while reader.has_next():
            (topic, data, t) = reader.read_next()
            msg_type = topic_to_type_map[topic]
            msg = deserialize_message(data, msg_type)
            for custom in summaries[topic]['custom']:
                custom.update(msg)
            if hasattr(msg, 'header'):
                summaries[topic]['frame_ids'].add(msg.header.frame_id)
                delay = t - Time.from_msg(msg.header.stamp).nanoseconds
                summaries[topic]['write_delays_ns'].append(delay)
            if args.progress:
                progress.print_update(progress.update(topic), every=100)

        if args.progress:
            progress.print_finish()

        for topic, summary in summaries.items():
            print(topic)
            if not message_counts[topic]:
                print('\tNo messages')
                continue
            frame_id_str = ', '.join(summary['frame_ids'])
            print(f'\tframe_id: {frame_id_str}')
            freq = message_counts[topic] / bag_duration_s
            print(f'\tfrequency: {freq:.2f} hz')
            if summary['write_delays_ns']:
                # only messages with header.stamp have delays
                write_delays = np.array(summary['write_delays_ns'])
                delay_ms_mean = np.mean(write_delays) / 1000 / 1000
                delay_ms_stddev = np.std(write_delays) / 1000 / 1000
                print(
                    f'\twrite delay: {delay_ms_mean:.2f}ms (stddev {delay_ms_stddev:.2f})'
                )
                for custom in summaries[topic]['custom']:
                    custom.write()
def main():
    path = '../bags/TEB'
    dirs = next(walk(path))[1]
    dirs.sort()
    print(dirs)
    iter = 0
    avg_x_vel = []
    avg_y_vel = []
    avg_th_vel = []
    list_x = list()
    list_y = list()
    plot_number = 0
    for dir in dirs:
        mypath = f'{path}/{dir}'

        bag_files = [f for f in listdir(mypath) if f.endswith(".db3")]

        for bag_file in bag_files:
            plot_number += 1
            conn = sqlite3.connect(f'{mypath}/{bag_file}')
            c = conn.cursor()

            c.execute('SELECT * FROM messages')
            message_data = c.fetchall()

            c.execute('SELECT * FROM topics')
            topic_data = c.fetchall()
            msg_types = list()
            topic_names = list()
            for row in topic_data:
                msg_types.append(row[2])
                topic_names.append(row[1])

            time0 = message_data[0][2]

            dfs = {k: {} for k in topic_names}
            for df in dfs:
                dfs[df] = {k: [] for k in ['x', 'y', 'th', 'time']}

            for row in message_data:
                id = row[1] - 1
                msg_type = get_message(msg_types[id])
                x = deserialize_message(row[3], msg_type)
                pos, th = get_pose(msg_types, topic_names, x, id)
                dfs[topic_names[id]]['x'].append(pos.x)
                dfs[topic_names[id]]['y'].append(pos.y)
                dfs[topic_names[id]]['th'].append(th)
                dfs[topic_names[id]]['time'].append((row[2] - time0) / 1e9)
            list_x.append(dfs['/amcl/filtered'])
            list_y.append(dfs['/cmd_vel'])
            iter += 1

    fig, ax = plt.subplots()
    plt.title(f'Trajektoria')
    plt.xlabel('x [m]')
    plt.ylabel('y [m]')
    ax.plot(list_x[0]['x'], list_x[0]['y'], label='DWB')
    ax.plot(list_x[1]['x'], list_x[1]['y'], label='TEB')
    # fig.legend()
    ax.legend(loc=4)
    plt.grid()
    axins = zoomed_inset_axes(ax, 5, loc=2, borderpad=3.0)
    axins.plot(list_x[0]['x'], list_x[0]['y'], label='DWB')
    axins.plot(list_x[1]['x'], list_x[1]['y'], label='TEB')
    x1, x2, y1, y2 = 36, 40, 29, 33  # specify the limits
    axins.set_xlim(x1, x2)  # apply the x-limits
    axins.set_ylim(y1, y2)  # apply the y-limits
    axins.grid()

    plt.yticks(visible=True)
    plt.xticks(visible=True)
    mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
    plt.savefig(f'{mypath}/trajectory.pdf', bbox_inches='tight')

    plt.figure(2)
    plt.grid()
    plt.plot(list_y[0]['time'], list_y[0]['x'], label='DWB')
    plt.plot(list_y[1]['time'], list_y[1]['x'], label='TEB')
    plt.xlabel('czas [s]')
    plt.ylabel('v_x [m/s]')
    plt.legend()
    plt.title(f'Zadana prędkość liniowa x')
    plt.savefig(f'{mypath}/x.pdf', bbox_inches='tight')

    plt.figure(3)
    plt.grid()
    plt.plot(list_y[0]['time'], list_y[0]['y'], label='DWB')
    plt.plot(list_y[1]['time'], list_y[1]['y'], label='TEB')
    plt.xlabel('czas [s]')
    plt.ylabel('v_y [m/s]')
    plt.legend()
    plt.title(f'Zadana prędkość liniowa y')
    plt.savefig(f'{mypath}/y.pdf', bbox_inches='tight')

    plt.figure(4)
    plt.grid()
    plt.plot(list_y[0]['time'], list_y[0]['th'], label='DWB')
    plt.plot(list_y[1]['time'], list_y[1]['th'], label='TEB')
    plt.xlabel('czas [s]')
    plt.ylabel('omega [rad/s]')
    plt.legend()
    plt.title(f'Zadana prędkość kątowa')
    plt.savefig(f'{mypath}/theta.pdf', bbox_inches='tight')

    plt.close('all')

    avg_x_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['x'])))
    avg_y_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['y'])))
    avg_th_TEB = statistics.mean(np.absolute(np.asarray(list_y[1]['th'])))
    time_TEB = (list_y[1]['time'][-1] - list_y[1]['time'][0])

    avg_x_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['x'])))
    avg_y_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['y'])))
    avg_th_DWB = statistics.mean(np.absolute(np.asarray(list_y[0]['th'])))
    time_DWB = (list_y[0]['time'][-1] - list_y[0]['time'][0])

    print(f"time TEB: {time_TEB} time DWB: {time_DWB}")
    print(f"x TEB: {avg_x_TEB} x DWB: {avg_x_DWB}")
    print(f"y TEB: {avg_y_TEB} y DWB: {avg_y_DWB}")
    print(f"th TEB: {avg_th_TEB} th DWB: {avg_th_DWB}")
Beispiel #21
0
def test_sequential_reader_seek():
    bag_path = str(RESOURCES_PATH / 'talker')
    storage_options, converter_options = get_rosbag_options(bag_path)

    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)

    topic_types = reader.get_all_topics_and_types()

    # Create a map for quicker lookup
    type_map = {
        topic_types[i].name: topic_types[i].type
        for i in range(len(topic_types))
    }

    # Seek No Filter
    reader = rosbag2_py.SequentialReader()
    reader.open(storage_options, converter_options)
    reader.seek(1585866237113147888)

    msg_counter = 5

    (topic, data, t) = reader.read_next()
    msg_type = get_message(type_map[topic])
    msg = deserialize_message(data, msg_type)

    assert isinstance(msg, Log)

    (topic, data, t) = reader.read_next()
    msg_type = get_message(type_map[topic])
    msg = deserialize_message(data, msg_type)

    isinstance(msg, String)
    assert msg.data == f'Hello, world! {msg_counter}'
    msg_counter += 1

    # Set Filter will continue
    storage_filter = rosbag2_py.StorageFilter(topics=['/topic'])
    reader.set_filter(storage_filter)

    (topic, data, t) = reader.read_next()
    msg_type = get_message(type_map[topic])
    msg = deserialize_message(data, msg_type)
    isinstance(msg, String)
    assert msg.data == f'Hello, world! {msg_counter}'

    # Seek will keep filter
    reader.seek(1585866239113147888)

    msg_counter = 8

    (topic, data, t) = reader.read_next()
    msg_type = get_message(type_map[topic])
    msg = deserialize_message(data, msg_type)
    isinstance(msg, String)
    assert msg.data == f'Hello, world! {msg_counter}'
    msg_counter += 1

    (topic, data, t) = reader.read_next()
    msg_type = get_message(type_map[topic])
    msg = deserialize_message(data, msg_type)
    isinstance(msg, String)
    assert msg.data == f'Hello, world! {msg_counter}'
Beispiel #22
0
 def get_odom(self):
     odom = self.py_walk_wrapper.get_odom()
     result = deserialize_message(odom, Odometry)
     return result
Beispiel #23
0
 def get_messages(self, topic_name):
     topic_id = self.topic_id[topic_name]
     rows = self.cursor.execute(
         'SELECT timestamp, data FROM messages WHERE topic_id = {}'.format(topic_id)).fetchall()
     return [(timestamp, deserialize_message(data, self.topic_msg_message[topic_name])) for
             timestamp, data in rows]
Beispiel #24
0
 def get_right_foot_pose(self):
     foot_pose = self.py_walk_wrapper.get_right_foot_pose()
     result = deserialize_message(foot_pose, Pose)
     return result
Beispiel #25
0
 def filter_msg(self, serialized_msg):
     (topic, data, t) = serialized_msg
     msg = deserialize_message(data, self._topic_type_map[topic])
     if hasattr(msg, 'header'):
         t = Time.from_msg(msg.header.stamp).nanoseconds
     return (topic, data, t)
def test_serialize_deserialize(msgs, msg_type):
    """Test message serialization/deserialization."""
    for msg in msgs:
        msg_serialized = serialize_message(msg)
        msg_deserialized = deserialize_message(msg_serialized, msg_type)
        assert msg == msg_deserialized
Beispiel #27
0
    def read_messages(self, topics=None, start_time=None, end_time=None):
        """
        Generator function used to produce each message in sequence from a
        Baggie opened as a reader ("r" mode).

        Parameters
        ----------
        topics : list of str
          A list of fully qualified topic names to include in the generated
          results. If this parameter is specified, topics not listed will be
          omitted from the generated output. Works together with `start_time`
          and `end_time`.

        start_time : rclpy.time.Time
          Only messages stamped with this start time or later will be included
          in the generated output. Works together with `topics` and
          `end_time`.

        end_time : rclpy.time.Time
          Only messages stamped with this end time or earlier will be included
          in the generated output. Works together with the `topics` and
          `start_time`.

        Returns
        -------
        3-tuple : str, msg, rclpy.time.Time
          [0] is the topic name
          [1] is the deserialized message
          [2] is the timestamp on the message

        If the Baggie is opened as a writer ("w" mode), a
        baggie.BaggieException is raised.

        """
        if self.reader_ is None:
            raise (BaggieException("Cannot read in write-only mode"))

        start_time_filter = Time(nanoseconds=0)
        if ((start_time is not None) and (isinstance(start_time, Time))):
            start_time_filter = start_time

        end_time_filter = Time(nanoseconds=sys.maxsize)
        if ((end_time is not None) and (isinstance(end_time, Time))):
            end_time_filter = end_time

        self.reader_.reset_filter()
        if topics is not None:
            filt = _StorageFilter()
            filt.topics = topics
            self.reader_.set_filter(filt)

        type_lut = {}
        t_meta_list = self.reader_.get_all_topics_and_types()
        for t_meta in t_meta_list:
            if ((topics is not None) and (t_meta.name not in topics)):
                continue
            type_lut[t_meta.name] = baggie.util.typestr2msgtype(t_meta.type)

        while self.reader_.has_next():
            topic, ser_msg, ts = self.reader_.read_next()
            msg_time = Time(nanoseconds=ts)
            if ((msg_time < start_time_filter)
                    or (msg_time > end_time_filter)):
                continue

            msg = deserialize_message(ser_msg, type_lut[topic])
            yield topic, msg, msg_time
Beispiel #28
0
 def __next__(self):
     if not self._reader.has_next():
         raise StopIteration()
     (topic, data, t) = self._reader.read_next()
     msg = deserialize_message(data, self._topic_type_map[topic])
     return (topic, msg, t)
Beispiel #29
0
 def get_support_state(self):
     return deserialize_message(self.py_walk_wrapper.get_support_state(),
                                Phase)