def _process_position(self, pose: Pose):
     if float(f"{rospy.get_time()}".split('.')[-1]
              [0]) % 5 == 0:  # print every 500ms
         cprint(f'received pose {pose}',
                self._logger,
                msg_type=MessageType.debug)
     robot_global_translation = np.asarray(
         [pose.position.x, pose.position.y, pose.position.z])
     robot_global_orientation = rotation_from_quaternion(
         (pose.orientation.x, pose.orientation.y, pose.orientation.z,
          pose.orientation.w))
     points_global_frame = transform(points=self._local_frame,
                                     orientation=robot_global_orientation,
                                     translation=robot_global_translation)
     points_camera_frame = transform(
         points=points_global_frame,
         orientation=self._camera_global_orientation,
         translation=self._camera_global_translation,
         invert=True
     )  # camera_global transformation is given, but should be inverted
     points_image_frame = project(points=points_camera_frame,
                                  fx=self._fx,
                                  fy=self._fy,
                                  cx=self._cx,
                                  cy=self._cy)
     if self._previous_position is None:
         self._previous_position = points_image_frame[
             0]  # store origin of position
         self._frame_points.append(points_image_frame)
     elif get_distance(self._previous_position,
                       points_image_frame[0]) > self._minimum_distance_px:
         self._previous_position = points_image_frame[
             0]  # store origin of position
         self._frame_points.append(points_image_frame)
    def _reference_update(self, pose_ref: PointStamped):
        if pose_ref.header.frame_id == "agent":
            cprint(f'got pose_ref in agent frame: {pose_ref.point}', self._logger)
            # TODO:
            # Use time stamp of message to project reference point to global frame
            # corresponding to the agent's pose at that time
            # Although, as references should be said once per second the small delay probably won't matter that much.

            # transform from agent to world frame.
            # Note that agent frame is not necessarily the same as agent_horizontal, so a small error is induced here
            # as pose_estimate.orientation only incorporates yaw turn because KF does not maintain roll and pitch.
            pose_ref.point = transform(points=[pose_ref.point],
                                       orientation=self.pose_est.pose.orientation,
                                       translation=self.pose_est.pose.position)[0]
            pose_ref.header.frame_id = "global"
            cprint(f'mapped to global frame: {pose_ref.point}', self._logger)
        if pose_ref != self.pose_ref:
            # reset pose error when new reference comes in.
            self.prev_pose_error = PointStamped(header=Header(frame_id="global"))
            self.prev_vel_error = PointStamped(header=Header(frame_id="global_rotated"))
            self.last_cmd = TwistStamped(header=Header(stamp=rospy.Time().now()))
            self.pose_ref = pose_ref
            _, _, yaw = euler_from_quaternion(self.pose_est.pose.orientation)
            self.desired_yaw = yaw + calculate_relative_orientation(robot_pose=self.pose_est,
                                                                    reference_pose=self.pose_ref)
            cprint(f'set pose_ref: {self.pose_ref.point}', self._logger)
 def _store_datapoint(self, data: Union[Odometry, TwistStamped], label: str = "observed"):
     if not self.show_graph:
         return
     if isinstance(data, Odometry):
         # Note that pose is expressed in global frame ==> rotate to rotated global frame
         # While velocity is expressed in rotated global frame following yaw.
         stamp = float(data.header.stamp.to_sec())
         _, _, yaw = euler_from_quaternion((data.pose.pose.orientation.x,
                                            data.pose.pose.orientation.y,
                                            data.pose.pose.orientation.z,
                                            data.pose.pose.orientation.w))
         rotated_position = transform(points=[data.pose.pose.position],
                                      orientation=self.pose_est.pose.orientation,
                                      invert=True)[0]
         self.data['pose']['x'][label].append((stamp, rotated_position.x))
         self.data['pose']['y'][label].append((stamp, rotated_position.y))
         self.data['pose']['z'][label].append((stamp, rotated_position.z))
         self.data['pose']['yaw'][label].append((stamp, yaw))
         self.data['velocity']['x'][label].append((stamp, data.twist.twist.linear.x))
         self.data['velocity']['y'][label].append((stamp, data.twist.twist.linear.y))
         self.data['velocity']['z'][label].append((stamp, data.twist.twist.linear.z))
         self.data['velocity']['yaw'][label].append((stamp, data.twist.twist.angular.z))
     elif isinstance(data, TwistStamped):
         stamp = float(data.header.stamp.to_sec())
         if len(self.data['command']['x']['observed']) > 1 and \
                 abs(stamp - self.data['command']['x']['observed'][-1][0]) < 0.1:
             return
         self.data['command']['x'][label].append((stamp, data.twist.linear.x))
         self.data['command']['y'][label].append((stamp, data.twist.linear.y))
         self.data['command']['z'][label].append((stamp, data.twist.linear.z))
         self.data['command']['yaw'][label].append((stamp, data.twist.angular.z))
    def _feedback(self):
        '''Whenever the target is reached, apply position feedback to the
        desired end position to remain in the correct spot and compensate for
        drift. Tustin discretized PID controller for x and y, PI for z.
        Returns a twist containing the control command.
        '''
        cmd = Twist()
        if self.pose_ref is None:
            return cmd
        # PID
        prev_pose_error = self.prev_pose_error

        pose_error = PointStamped()
        pose_error.point.x = (self.pose_ref.point.x - self.pose_est.pose.position.x)
        pose_error.point.y = (self.pose_ref.point.y - self.pose_est.pose.position.y)
        pose_error.point.z = (self.pose_ref.point.z - self.pose_est.pose.position.z)
        pose_error.point = transform(points=[pose_error.point],
                                     orientation=self.pose_est.pose.orientation,
                                     invert=True)[0]
        pose_error.header.frame_id = "global_rotated"

        prev_vel_error = self.prev_vel_error
        vel_error = PointStamped()
        vel_error.header.frame_id = "global_rotated"
        vel_error.point.x = self.vel_ref.twist.linear.x - self.vel_est.point.x
        vel_error.point.y = self.vel_ref.twist.linear.y - self.vel_est.point.y

        # calculations happen in global_rotated frame
        cmd.linear.x = max(-self.max_input, min(self.max_input, (
                self.last_cmd.twist.linear.x +
                (self.Kp_x + self.Ki_x * self._control_period / 2) * pose_error.point.x +
                (-self.Kp_x + self.Ki_x * self._control_period / 2) * prev_pose_error.point.x +
                self.Kd_x * (vel_error.point.x - prev_vel_error.point.x))))

        cmd.linear.y = max(-self.max_input, min(self.max_input, (
                self.last_cmd.twist.linear.y +
                (self.Kp_y + self.Ki_y * self._control_period / 2) * pose_error.point.y +
                (-self.Kp_y + self.Ki_y * self._control_period / 2) * prev_pose_error.point.y +
                self.Kd_y * (vel_error.point.y - prev_vel_error.point.y))))

        cmd.linear.z = max(-self.max_input, min(self.max_input, (
                self.last_cmd.twist.linear.z +
                (self.Kp_z + self.Ki_z * self._control_period / 2) * pose_error.point.z +
                (-self.Kp_z + self.Ki_z * self._control_period / 2) * prev_pose_error.point.z +
                self.Kd_z * (vel_error.point.z - prev_vel_error.point.z))))  # Added derivative term

        # if target is more than 1m away, look in that direction
        _, _, yaw = euler_from_quaternion(self.pose_est.pose.orientation)
        if self.desired_yaw is not None:  # in case there is a desired yaw
            angle_error = ((((self.desired_yaw - yaw) - np.pi) % (2*np.pi)) - np.pi)
            K_theta = self.K_theta + (np.pi - abs(angle_error))/np.pi*0.2
            cmd.angular.z = (K_theta * angle_error)

        self.prev_pose_error = pose_error
        self.prev_vel_error = vel_error
        return cmd
예제 #5
0
def save(reference_pos, experience, json_data, hdf5_data,
         prediction) -> Union[float, None]:
    loss = None
    # only save in running state
    if experience.done != TerminationType.NotDone:
        return loss

    # store previous observation
    hdf5_data["observation"].append(deepcopy(experience.observation))

    # store velocity
    action = experience.action.value
    json_data["velocities"].append(
        [action[0], action[1], action[2], action[5]])

    # store prediction
    json_data["predictions"].append([float(p) for p in prediction])

    # store relative target location
    relative_reference_position = transform(
        points=[np.asarray(reference_pos)],
        orientation=experience.info["position"][3:],
        translation=np.asarray(experience.info["position"][:3]),
        invert=True,
    )[0]
    if TARGET == "line":  # scale to fixed length
        norm = np.sqrt(np.sum(relative_reference_position**2))
        ref_distance = 0.5
        relative_reference_position = relative_reference_position * ref_distance / norm

    json_data["relative_target_location"].append(
        list(relative_reference_position))

    # store global target location
    json_data["global_target_location"].append(list(reference_pos))

    # store global drone location
    json_data["global_drone_pose"].append(list(experience.info["position"]))

    # calculate loss
    if DS_TASK == "velocities":
        output = [
            prediction[0], prediction[1], prediction[2], 0, 0, prediction[3]
        ]
        loss = np.sqrt(
            np.mean([(action[x] - output[x])**2 for x in [0, 1, 2, 5]]))
    else:
        loss = np.sqrt(
            np.mean([(relative_reference_position[x] - prediction[x])**2
                     for x in [0, 1, 2]]))
    json_data["rmse"].append(loss)
    return loss
    def test_transform_points(self):
        pos_a = PointStamped()
        pos_a.header.frame_id = 'world_a'
        pos_a.point.x = 1
        pos_b = PointStamped()
        pos_b.header.frame_id = 'world_b'
        pos_b.point.y = 1
        pos_a.point, pos_b.point = transform([pos_a.point, pos_b.point],
                                             R.from_euler(
                                                 'XYZ', (0, 0, 3.14 / 2),
                                                 degrees=False).as_matrix())
        results_array = transform(
            [np.asarray([1, 0, 0]),
             np.asarray([0, 1, 0])],
            R.from_euler('XYZ', (0, 0, 3.14 / 2), degrees=False).as_matrix())

        self.assertAlmostEqual(pos_a.point.x, results_array[0][0])
        self.assertAlmostEqual(pos_a.point.y, results_array[0][1])
        self.assertAlmostEqual(pos_a.point.z, results_array[0][2])

        self.assertAlmostEqual(pos_b.point.x, results_array[1][0])
        self.assertAlmostEqual(pos_b.point.y, results_array[1][1])
        self.assertAlmostEqual(pos_b.point.z, results_array[1][2])
def save(reference_pos, experience, json_data, hdf5_data, mask=None):
    # only save in running state
    if experience.done != TerminationType.NotDone:
        return

    if mask is None:
        # calculate mask
        mask = deepcopy(experience.observation)
        mask = np.mean(mask, axis=2)
        mask[mask == 1] = 0
        mask[mask != 0] = 1

    # don't save is item not in view
    if np.sum(mask) < (1000 if TARGET == 'gate' else 20):
        return

    # store mask
    hdf5_data["mask"].append(mask)

    # store previous observation
    hdf5_data["observation"].append(deepcopy(experience.observation))

    # store velocity
    action = experience.action.value
    json_data["velocities"].append(
        [action[0], action[1], action[2], action[5]])

    # store relative target location
    relative_reference_position = transform(
        points=[np.asarray(reference_pos)],
        orientation=experience.info['position'][3:],
        translation=np.asarray(experience.info['position'][:3]),
        invert=True)[0]
    if TARGET == 'line' or TARGET == 'red_line':  # scale to fixed length
        norm = np.sqrt(np.sum(relative_reference_position**2))
        ref_distance = 0.5
        relative_reference_position = relative_reference_position * ref_distance / norm

    json_data["relative_target_location"].append(
        list(relative_reference_position))

    # store global target location
    json_data["global_target_location"].append(list(reference_pos))

    # store global drone location
    json_data["global_drone_pose"].append(list(experience.info['position']))
예제 #8
0
    def _select_next_waypoint_transform(self) -> Union[PointStamped, None]:
        # transform detected tags to agents base_link
        tags_in_base_link = {}
        for k in self._tag_transforms.keys():
            tags_in_base_link[k] = transform(
                points=[self._tag_transforms[k].pose.pose.position],
                orientation=self._optical_to_base_transformation['rotation'],
                translation=np.asarray(
                    self._optical_to_base_transformation['translation']),
                invert=True)[0]
        print(f'tags_in_base_link: {tags_in_base_link}')
        # for all tag transforms lying in front (x>0 in base_link frame), measure the distance
        distances = {
            k:
            np.sqrt(sum([tags_in_base_link[k].x**2,
                         tags_in_base_link[k].y**2]))
            for k in self._tag_transforms.keys() if tags_in_base_link[k].x > 0
        }
        # ignore tags that are closer than the 'distance-reached'
        further_distances = {
            k: distances[k]
            for k in distances.keys()
            if distances[k] > self._waypoint_reached_distance
        }
        print(f'distances: {distances}')
        # TODO: ignore tags with a too large covariance

        # sort tags and take closest
        sorted_distances = dict(
            sorted(further_distances.items(), key=lambda item: item[1]))
        if len(sorted_distances) == 0:
            return None
        else:
            tag_id = list(sorted_distances.keys())[0]
        # create a PointStamped message
        print(f'tag_id: {tag_id}')
        reference_point = PointStamped(point=Point(
            x=tags_in_base_link[tag_id].x,
            y=tags_in_base_link[tag_id].y,
            z=0  # as tag lies probably on the ground just fly towards and above it
        ))
        reference_point.header.frame_id = 'agent'
        reference_point.header.stamp = self._tag_transforms[
            tag_id].header.stamp
        return reference_point
 def _reference_update(self, message: PointStamped) -> None:
     if isinstance(message, PointStamped):
         pose_ref = message
         if message.header.frame_id == "agent":
             # transform from agent to world frame.
             pose_ref.point = transform(
                 points=[pose_ref.point],
                 orientation=R.from_euler('XYZ', (0, 0, self.real_yaw),
                                          degrees=False).as_matrix(),
                 translation=self.pose_est.position)[0]
     elif isinstance(message,
                     Float32MultiArray):  # in case of global waypoint
         pose_ref = message.data
         pose_ref = PointStamped(
             point=Point(x=pose_ref[0],
                         y=pose_ref[1],
                         z=1 if len(pose_ref) == 2 else pose_ref[2]))
     if pose_ref != self.pose_ref:  # reset pose error when new reference comes in.
         self.prev_pose_error = PointStamped()
         self.prev_vel_error = PointStamped()
         self.prev_cmd = Twist()
     self.pose_ref = pose_ref
    def _process_odometry(self, msg: Odometry, args: tuple) -> None:
        sensor_topic, sensor_stats = args
        # impose 7Hz to avoid 1000Hz simulation updates
        if self.last_measurement is not None:
            difference = get_timestamp(msg) - self.last_measurement

            if difference < 1./7:
                return
        self.last_measurement = get_timestamp(msg)

        # simulated robots use /gt_states where twist message of odometry is expressed globally instead of locally
        if 'real' not in self._robot:
            _, _, yaw = euler_from_quaternion(msg.pose.pose.orientation)
            msg.twist.twist.linear = transform(points=[msg.twist.twist.linear],
                                               orientation=R.from_euler('XYZ', (0, 0, yaw)).as_matrix(),
                                               invert=True)[0]
        result = self.filter.kalman_correction(msg, self._control_period)
        if result is not None:
            self._store_datapoint(msg, 'observed')
            self._store_datapoint(result, 'adjusted')
            self.pose_est.pose = result.pose.pose
            self.vel_est.point.x = result.twist.twist.linear.x
            self.vel_est.point.y = result.twist.twist.linear.y
            self.vel_est.point.z = result.twist.twist.linear.z
    def _feedback(self):
        '''Whenever the target is reached, apply position feedback to the
        desired end position to remain in the correct spot and compensate for
        drift. Tustin discretized PID controller for x and y, PI for z.
        Returns a twist containing the control command.
        '''

        # PID
        prev_pose_error = self.prev_pose_error

        pose_error = PointStamped()
        pose_error.header.frame_id = "world"
        pose_error.point.x = (self.pose_ref.point.x - self.pose_est.position.x)
        pose_error.point.y = (self.pose_ref.point.y - self.pose_est.position.y)
        pose_error.point.z = (self.pose_ref.point.z - self.pose_est.position.z)

        prev_vel_error = self.prev_vel_error
        vel_error = PointStamped()
        vel_error.header.frame_id = "world"
        vel_error.point.x = self.vel_ref.linear.x - self.vel_est.x
        vel_error.point.y = self.vel_ref.linear.y - self.vel_est.y

        pose_error.point, vel_error.point = transform(
            points=[pose_error.point, vel_error.point],
            orientation=R.from_euler('XYZ', (0, 0, -self.real_yaw),
                                     degrees=False).as_matrix())

        cmd = Twist()
        cmd.linear.x = max(
            -self.max_input,
            min(self.max_input,
                (self.prev_cmd.linear.x +
                 (self.Kp_x + self.Ki_x * self._sample_time / 2) *
                 pose_error.point.x +
                 (-self.Kp_x + self.Ki_x * self._sample_time / 2) *
                 prev_pose_error.point.x + self.Kd_x *
                 (vel_error.point.x - prev_vel_error.point.x))))

        cmd.linear.y = max(
            -self.max_input,
            min(self.max_input,
                (self.prev_cmd.linear.y +
                 (self.Kp_y + self.Ki_y * self._sample_time / 2) *
                 pose_error.point.y +
                 (-self.Kp_y + self.Ki_y * self._sample_time / 2) *
                 prev_pose_error.point.y + self.Kd_y *
                 (vel_error.point.y - prev_vel_error.point.y))))

        cmd.linear.z = max(
            -self.max_input,
            min(self.max_input,
                (self.prev_cmd.linear.z +
                 (self.Kp_z + self.Ki_z * self._sample_time / 2) *
                 pose_error.point.z +
                 (-self.Kp_z + self.Ki_z * self._sample_time / 2) *
                 prev_pose_error.point.z + self.Kd_z *
                 (vel_error.point.z - prev_vel_error.point.z)
                 )))  # Added derivative term

        # if target is more than 1m away, look in that direction
        if np.sqrt(pose_error.point.x**2 + pose_error.point.y**2) > 1:
            angle_error = np.arctan(pose_error.point.y / pose_error.point.x)
            # compensate for second and third quadrant:
            if np.sign(pose_error.point.x) == -1:
                angle_error += np.pi
            # turn in direction of smallest angle
            angle_error = -(
                2 * np.pi - angle_error
            ) if 2 * np.pi - angle_error < angle_error else angle_error
            cmd.angular.z = (self.K_theta * angle_error)
            self.desired_yaw = self.real_yaw  # update desired looking direction
        else:  # else look at reference point
            if self.desired_yaw is not None:  # in case there is a desired yaw
                angle_error = ((((self.desired_yaw - self.real_yaw) - np.pi) %
                                (2 * np.pi)) - np.pi)
                K_theta = self.K_theta + (np.pi -
                                          abs(angle_error)) / np.pi * 0.2
                cmd.angular.z = (K_theta * angle_error)

        self.prev_pose_error = pose_error
        self.prev_vel_error = vel_error
        self.prev_cmd = cmd
        return cmd
    def test_april_tag_detector_gazebo_KF(self):
        self.output_dir = f'{get_data_dir(os.environ["CODEDIR"])}/test_dir/{get_filename_without_extension(__file__)}'
        os.makedirs(self.output_dir, exist_ok=True)

        self._config = {
            'output_path':
            self.output_dir,
            'world_name':
            'april_tag',
            'robot_name':
            'drone_sim_down_cam',
            'gazebo':
            True,
            'fsm':
            True,
            'fsm_mode':
            'TakeOverRun',
            'control_mapping':
            True,
            'control_mapping_config':
            'mathias_controller_keyboard',
            'april_tag_detector':
            True,
            'altitude_control':
            False,
            'mathias_controller_with_KF':
            True,
            'starting_height':
            1.,
            'keyboard':
            True,
            'mathias_controller_config_file_path_with_extension':
            f'{os.environ["CODEDIR"]}/src/sim/ros/config/actor/mathias_controller_with_KF.yml',
        }

        # spinoff roslaunch
        self._ros_process = RosWrapper(launch_file='load_ros.launch',
                                       config=self._config,
                                       visible=True)

        # subscribe to command control
        self.visualisation_topic = '/actor/mathias_controller/visualisation'
        subscribe_topics = [
            TopicConfig(
                topic_name=rospy.get_param('/robot/position_sensor/topic'),
                msg_type=rospy.get_param('/robot/position_sensor/type')),
            TopicConfig(topic_name='/fsm/state', msg_type='String'),
            TopicConfig(topic_name='/reference_pose', msg_type='PointStamped'),
            TopicConfig(topic_name=self.visualisation_topic, msg_type='Image')
        ]
        publish_topics = [
            TopicConfig(topic_name='/fsm/reset', msg_type='Empty'),
        ]

        self.ros_topic = TestPublisherSubscriber(
            subscribe_topics=subscribe_topics, publish_topics=publish_topics)

        safe_wait_till_true(
            '"/fsm/state" in kwargs["ros_topic"].topic_values.keys()',
            True,
            235,
            0.1,
            ros_topic=self.ros_topic)
        self.assertEqual(self.ros_topic.topic_values['/fsm/state'].data,
                         FsmState.Unknown.name)

        index = 0
        while True:
            print(f'start loop: {index} with resetting')
            # publish reset
            self.ros_topic.publishers['/fsm/reset'].publish(Empty())
            rospy.sleep(0.5)

            print(f'waiting in overtake state')
            while self.ros_topic.topic_values[
                    "/fsm/state"].data != FsmState.Running.name:
                rospy.sleep(0.5)

            # safe_wait_till_true('"/reference_ground_point" in kwargs["ros_topic"].topic_values.keys()',
            #                    True, 10, 0.1, ros_topic=self.ros_topic)
            waypoints = []
            print(f'waiting in running state')
            while self.ros_topic.topic_values[
                    "/fsm/state"].data != FsmState.TakenOver.name:
                if '/reference_pose' in self.ros_topic.topic_values.keys() \
                        and '/bebop/odom' in self.ros_topic.topic_values.keys():
                    odom = self.ros_topic.topic_values[rospy.get_param(
                        '/robot/position_sensor/topic')]
                    point = transform(
                        [self.ros_topic.topic_values['/reference_pose'].point],
                        orientation=odom.pose.pose.orientation,
                        translation=odom.pose.pose.position)[0]
                    waypoints.append(point)
                rospy.sleep(0.5)
            if len(waypoints) != 0:
                plt.clf()
                fig = plt.figure()
                ax = fig.add_subplot(111, projection='3d')
                ax.scatter([_.x for _ in waypoints], [_.y for _ in waypoints],
                           [_.z for _ in waypoints],
                           label='waypoints')
                ax.legend()
                plt.savefig(os.path.join(self.output_dir,
                                         f'image_{index}.jpg'))
                # plt.show()
            index += 1
    def tweak_combined_axis_keyboard(self, measured_data, index,
                                     point_ref_drone):
        initial_point_ref_drone = copy.deepcopy(point_ref_drone)
        # gets fsm in taken over state
        safe_wait_till_true(
            'kwargs["ros_topic"].topic_values["/fsm/state"].data',
            FsmState.TakenOver.name,
            20,
            0.1,
            ros_topic=self.ros_topic)

        # once drone is in good starting position invoke 'go' with key m
        while self.ros_topic.topic_values[
                "/fsm/state"].data != FsmState.Running.name:
            # while taking off, update reference point for PID controller to remain at same height
            self.ros_topic.publishers[self._reference_topic].publish(
                PointStamped(header=Header(frame_id="agent"),
                             point=Point(x=point_ref_drone[0],
                                         y=point_ref_drone[1],
                                         z=point_ref_drone[2])))
            last_pose = self.get_pose()
            rospy.sleep(0.5)

        measured_data[index] = {'x': [], 'y': [], 'z': [], 'yaw': []}
        point_ref_global = transform(points=[np.asarray(point_ref_drone)],
                                     orientation=R.from_euler(
                                         'XYZ', (0, 0, last_pose[-1]),
                                         degrees=False).as_matrix(),
                                     translation=np.asarray(last_pose[:3]))[0]

        # Mathias controller should keep drone in steady pose
        while self.ros_topic.topic_values[
                "/fsm/state"].data != FsmState.TakenOver.name:
            point_drone_global = self.get_pose()
            point_ref_drone = Point(
                x=point_ref_global[0] - point_drone_global[0],
                y=point_ref_global[1] - point_drone_global[1],
                z=point_ref_global[2] - point_drone_global[2])
            # rotate pose error to rotated frame
            pose_error_local = transform(points=[point_ref_drone],
                                         orientation=R.from_euler(
                                             'XYZ', (0, 0, -last_pose[-1]),
                                             degrees=False).as_matrix())[0]
            measured_data[index]['x'].append(pose_error_local.x)
            measured_data[index]['y'].append(pose_error_local.y)
            measured_data[index]['z'].append(pose_error_local.z)
            # measured_data[index]['yaw'].append(last_pose[-1])
            rospy.sleep(0.5)

        if False and '/bebop/land' in self.ros_topic.publishers.keys():
            self.ros_topic.publishers['/bebop/land'].publish(Empty())

        colors = ['C0', 'C1', 'C2', 'C3', 'C4']
        styles = {'x': '-', 'y': '--', 'z': ':', 'yaw': '-.'}
        fig = plt.figure(figsize=(15, 15))
        for key in measured_data.keys():
            for a in measured_data[key].keys():
                if len(measured_data[key][a]) != 0:
                    plt.plot(measured_data[key][a],
                             linestyle=styles[a],
                             linewidth=3 if key == index else 1,
                             color=colors[key % len(colors)],
                             label=f'{key}: {a}')
        plt.legend()
        #plt.savefig(os.path.join(self.output_dir, 'measured_data.jpg'))
        plt.show()

        # print visualisation if it's in ros topic:
        if self.visualisation_topic in self.ros_topic.topic_values.keys():
            frame = process_image(
                self.ros_topic.topic_values[self.visualisation_topic])
            plt.figure(figsize=(15, 20))
            plt.imshow(frame)
            plt.axis('off')
            plt.tight_layout()
            plt.savefig(
                f'{os.environ["HOME"]}/kf_study/pid_tweak_{initial_point_ref_drone[0]}_{initial_point_ref_drone[1]}_{initial_point_ref_drone[2]}.png'
            )
        plt.clf()
        plt.close()

        index += 1
        index %= len(colors)
        return index