Exemplo n.º 1
0
def do_transform_transform(trafo, transform):
    f = transform_to_kdl(transform) * transform_to_kdl(trafo)
    res = geometry_msgs.msg.TransformStamped()
    res.transform.translation.x = f[(0, 3)]
    res.transform.translation.y = f[(1, 3)]
    res.transform.translation.z = f[(2, 3)]
    (res.transform.rotation.x, res.transform.rotation.y,
     res.transform.rotation.z, res.transform.rotation.w) = f.M.GetQuaternion()
    res.header = transform.header
    return res
    def set_workspace_shape(self, req):
        ray_tf = self.pointing_model.pointing_ray()

        req_ws_shape = self.ws_shape

        if req.robot_frame:
            self.robot_frame = req.robot_frame

        if ray_tf:
            ray_kdl_frame = transform_to_kdl(ray_tf)
            robot_kdl_frame = self.frame_from_tf(self.human_frame, self.robot_frame) #self.ws_shape.frame_id, self.robot_frame)
            ray_origin_kdl_frame = self.frame_from_tf(self.human_frame, self.ray_origin_frame)

            if robot_kdl_frame and ray_origin_kdl_frame:
                if req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_XY_PLANE:
                    req_ws_shape = self.xy_plane
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_VISUAL_PLANE:
                    req_ws_shape = self.vis_plane
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_CYLINDER:
                    req_ws_shape = self.cylinder
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_SPHERE:
                    req_ws_shape = self.sphere
                else:
                    raise rospy.ServiceException('Unsupported shape type')
                    # return None

                cur_pointer_pose = self.get_pointer(self.ws_shape, ray_kdl_frame, self.ws_shape.point)

                if self.switch_at_pointer:
                    switch_point = copy.deepcopy(tfc.fromMsg(cur_pointer_pose).p)
                else:
                    switch_point = copy.deepcopy(robot_kdl_frame.p)


                new_pointer_pose = self.get_pointer(req_ws_shape, ray_kdl_frame, switch_point)

                eyes_robot_vector = kdl.Vector(robot_kdl_frame.p - ray_origin_kdl_frame.p)

                robot_elev_frame = self.pointing_model._frame_from_direction(ray_origin_kdl_frame.p, eyes_robot_vector)
                _, pitch, _ = robot_elev_frame.M.GetRPY()

                if math.fabs(pitch) < self.min_elevation_angle:
                    raise rospy.ServiceException('Drone elevation angle is less than {} deg: {}. Ignoring'
                        .format(math.degrees(self.min_elevation_angle), math.degrees(pitch)))
                else:
                    # Since the safety check succeeded we can update the pass_point of the shape
                    new_pointer_pose = self.get_pointer(req_ws_shape, ray_kdl_frame, switch_point, cache = True)

            else:
                raise rospy.ServiceException('Unable to obtain robot\'s frame. Is robot localized?')
                # return None

        else:
            raise rospy.ServiceException('Internal error: failed to get ray frame')
            # return None

        self.ws_shape = req_ws_shape

        return SetWorkspaceShapeResponse()
Exemplo n.º 3
0
    def _cb_object(self, msg_bbox_array, msg_tracking_labels):

        if msg_bbox_array.header.frame_id != self._frame_fixed:
            rospy.logerr(
                'frame_id of bbox (which is {}) array must be the same `~frame_fixed` which is {}'
                .format(msg_bbox_array.header.frame_id, self._frame_fixed))
            return

        time_observed = msg_tracking_labels.header.stamp

        try:
            pykdl_transform_fixed_to_robot = tf2_geometry_msgs.transform_to_kdl(
                self._tf_buffer.lookup_transform(self._frame_fixed,
                                                 self._frame_robot,
                                                 time_observed,
                                                 timeout=rospy.Duration(
                                                     self._timeout_transform)))
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logerr('lookup transform failed. {}'.format(e))
            return

        with self._lock_for_dict_objects:
            # add new object and update existing object state
            for bbox, tracking_label in zip(msg_bbox_array.boxes,
                                            msg_tracking_labels.labels):

                if bbox.label not in self._thresholds_distance.keys():
                    continue

                if tracking_label.id not in self._dict_objects:
                    if len(self._dict_objects) < self._num_max_track:
                        self._dict_objects[tracking_label.id] = \
                            TrackedObject(
                                bbox.label,
                                convert_msg_point_to_kdl_vector(bbox.pose.position),
                                time_observed
                                )
                    else:
                        rospy.logwarn(
                            'number of objects exceeds max track. dropped.')
                else:
                    self._dict_objects[tracking_label.id].update(
                        convert_msg_point_to_kdl_vector(bbox.pose.position),
                        time_observed, pykdl_transform_fixed_to_robot.p,
                        self._threshold_velocity,
                        self._thresholds_distance[str(bbox.label)])
            # check if there is lost object
            to_be_removed = []
            for key in self._dict_objects:
                is_lost = self._dict_objects[key].checkLost(
                    time_observed, self._threshold_lost)
                if is_lost:
                    to_be_removed.append(key)
            # remove lost object from dict
            for key in to_be_removed:
                self._dict_objects.pop(key)
Exemplo n.º 4
0
    def get_robot_pose(self):

        try:
            frame_odom_to_base = tf2_geometry_msgs.transform_to_kdl(
                self.tf_buffer.lookup_transform('odom', 'base_link',
                                                rospy.Time()))
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            return None
        return frame_odom_to_base
Exemplo n.º 5
0
def get_initial_state(bag: rosbag.Bag):
    i = 0
    j = 0
    tf_table = None
    tf_puck_start = None
    tf_puck_10 = None
    for topic, msg, t in bag.read_messages("/tf"):
        if msg.transforms[0].child_frame_id == "Puck":
            if i == 0:
                tf_puck_start = msg.transforms[0]
            if i == 10:
                tf_puck_10 = msg.transforms[0]
                break
            i += 1

        if msg.transforms[0].child_frame_id == "Table":
            if j == 0:
                tf_table = msg.transforms[0]
            j += 1

    puck_start_T = tf2_geometry_msgs.transform_to_kdl(tf_table).Inverse() * \
                   tf2_geometry_msgs.transform_to_kdl(tf_puck_start)
    puck_10_T = tf2_geometry_msgs.transform_to_kdl(tf_table).Inverse() * \
                tf2_geometry_msgs.transform_to_kdl(tf_puck_10)
    _, _, yaw_start = puck_start_T.M.GetRPY()
    _, _, yaw_10 = puck_10_T.M.GetRPY()

    t_start = tf_puck_start.header.stamp.to_sec()
    t_10 = tf_puck_10.header.stamp.to_sec()

    p_start = np.array([puck_start_T.p.x(), puck_start_T.p.y()])
    p_10 = np.array([puck_10_T.p.x(), puck_10_T.p.y()])
    lin_vel_start = (p_10 - p_start) / (t_10 - t_start)
    ang_vel_start = angles.shortest_angular_distance(yaw_start,
                                                     yaw_10) / (t_10 - t_start)

    return np.hstack([p_start,
                      yaw_start]), np.hstack([lin_vel_start,
                                              ang_vel_start]), tf_table
    def run(self):
        loop_rate = rospy.Rate(self.publish_rate)

        while not rospy.is_shutdown():
            try:
                loop_rate.sleep()

                if self.ws_shape:
                    if self.ws_shape != self.last_ws_shape:
                        self.pub_workspace_shape.publish(self.get_shape_name(self.ws_shape))
                        self.last_ws_shape = self.ws_shape

                ray_tf = self.pointing_model.pointing_ray()

                if ray_tf:
                    ray_kdl_frame = transform_to_kdl(ray_tf)
                    self.tf_br.sendTransform(ray_tf)

                    ray_pose = PoseStamped()
                    ray_pose.header = ray_tf.header
                    ray_pose.pose = tfc.toMsg(ray_kdl_frame)
                    self.pub_pointing_ray.publish(ray_pose)

                    ray_origin_kdl_frame = self.frame_from_tf(self.human_frame, self.ray_origin_frame)
                    shape_origin_kdl_frame = self.frame_from_tf(self.human_frame, self.ws_shape.frame_id)

                    if ray_origin_kdl_frame and shape_origin_kdl_frame:
                        pointer_pose = self.get_pointer(self.ws_shape, ray_kdl_frame)

                        if pointer_pose:
                            m_msg = self.create_rviz_marker(self.ws_shape, self.ws_shape.point - copy.deepcopy(shape_origin_kdl_frame.p))

                            if m_msg:
                                self.pub_markers.publish(m_msg)

                            pose_msg = PoseStamped()
                            pose_msg.header = ray_tf.header
                            pose_msg.pose = pointer_pose

                            self.pub_arm_pointer.publish(pose_msg)

                            eyes_pointer = tfc.fromMsg(pointer_pose).p - ray_origin_kdl_frame.p
                            pr_marker_msg = self.create_pointing_ray_marker(self.pointing_model.frame_id, eyes_pointer.Norm())

                            if pr_marker_msg:
                                self.pub_markers.publish(pr_marker_msg)

            except rospy.ROSException, e:
                if e.message == 'ROS time moved backwards':
                    rospy.logwarn('Saw a negative time change, resetting.')
Exemplo n.º 7
0
                def pose_cb(msg):
                    # Extract the actual message
                    if eval_func:
                        real_msg = copy.deepcopy(eval_func(msg))
                    else:
                        real_msg = copy.deepcopy(msg)

                    hor_d, full_d, yaw = self.distance(real_msg,
                                                       self.robot_current_pose)
                    dist = full_d

                    if goal.ignore_z or goal.override_z > 0.0:
                        try:
                            # Find transform from given frame to 'World' frame
                            trans = self.tf_buff.lookup_transform(
                                self.robot_current_pose.header.frame_id,
                                real_msg.header.frame_id, rospy.Time())
                        except (tf2_ros.LookupException,
                                tf2_ros.ConnectivityException,
                                tf2_ros.ExtrapolationException), e:
                            rospy.logerr_throttle(10.0, e)
                            return

                        # Convert pose to 'World' frame
                        pose_trans = tf2_geometry_msgs.do_transform_pose(
                            real_msg, trans)
                        # Override its altitude
                        if goal.override_z > 0.0:
                            pose_trans.pose.position.z = goal.override_z
                        else:
                            pose_trans.pose.position.z = self.robot_current_pose.pose.position.z

                        ## Convert the pose back to its original frame
                        # Get inversed transform as PyKDL.Frame
                        k = tf2_geometry_msgs.transform_to_kdl(trans).Inverse()
                        trans_inv = self.kdl_to_transform(k)

                        old_header = real_msg.header
                        real_msg = tf2_geometry_msgs.do_transform_pose(
                            pose_trans, trans_inv)
                        real_msg.header = old_header
                        dist = hor_d
Exemplo n.º 8
0
    def _cb_bbox_array(self, msg):

        frame_id_msg = msg.header.frame_id
        frame_id_robot = self._frame_id_robot

        try:
            frame_fixed_to_robot = tf2_geometry_msgs.transform_to_kdl(
                self._tf_buffer.lookup_transform(frame_id_robot,
                                                 frame_id_msg,
                                                 msg.header.stamp,
                                                 timeout=rospy.Duration(
                                                     self._timeout_transform)))
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logerr('lookup transform failed. {}'.format(e))
            return

        # check if there is a person
        is_person_visible = False
        people_pose_array = PoseArray()
        people_pose_array.header.stamp = msg.header.stamp
        people_pose_array.header.frame_id = frame_id_robot
        for bbox in msg.boxes:
            if bbox.label == self._label_person:
                vector_bbox = convert_msg_point_to_kdl_vector(
                    bbox.pose.position)
                vector_bbox_robotbased = frame_fixed_to_robot * vector_bbox
                rospy.logdebug('Person found at the distance {}'.format(
                    vector_bbox_robotbased.Norm()))
                if vector_bbox_robotbased.Norm() < self._dist_visible:
                    pose = Pose()
                    pose.position.x = vector_bbox_robotbased[0]
                    pose.position.y = vector_bbox_robotbased[1]
                    pose.position.z = vector_bbox_robotbased[2]
                    pose.orientation.w = 1.0
                    people_pose_array.poses.append(pose)
                    is_person_visible = True
                    break
        self._pub_people_pose_array.publish(people_pose_array)
        self._pub_visible.publish(Bool(is_person_visible))
Exemplo n.º 9
0
class FsmNode():
    def __init__(self):
        self.beacons_set = set(rospy.get_param('~beacons_set', [1, 2, 3]))
        self.beacons_ns = rospy.get_param('~beacons_ns', '/')
        self.beacons_prefix = rospy.get_param('~beacons_prefix', 'beacon')

        # Next target
        c_next = rospy.get_param('~color_next', {
            'index': 255,
            'color': {
                'r': 0.0,
                'g': 1.0,
                'b': 1.0,
                'a': 0.1
            }
        })
        self.color_next = NeoPixelColor(index=c_next['index'],
                                        color=ColorRGBA(**c_next['color']))

        # Wait for confirmation
        c_wait = rospy.get_param('~color_wait', {
            'index': 255,
            'color': {
                'r': 1.0,
                'g': 0.4,
                'b': 0.0,
                'a': 0.1
            }
        })
        self.color_wait = NeoPixelColor(index=c_wait['index'],
                                        color=ColorRGBA(**c_wait['color']))

        # Confirmed
        c_confirm = rospy.get_param('~color_confirm', {
            'index': 255,
            'color': {
                'r': 0.0,
                'g': 1.0,
                'b': 0.0,
                'a': 0.2
            }
        })
        self.color_confirm = NeoPixelColor(
            index=c_confirm['index'], color=ColorRGBA(**c_confirm['color']))

        # Complete
        c_complete = rospy.get_param('~color_complete', {
            'index': 255,
            'color': {
                'r': 0.0,
                'g': 1.0,
                'b': 1.0,
                'a': 0.2
            }
        })
        self.color_complete = NeoPixelColor(
            index=c_complete['index'], color=ColorRGBA(**c_complete['color']))

        # Clear pixels
        self.color_clear = NeoPixelColor(index=255, color=ColorRGBA())

        relloc_fsm_state_topic = rospy.get_param('~relloc_fsm_state_topic',
                                                 'drone_relloc_fsm/state')
        self.sub_relloc_fsm_state = rospy.Subscriber(relloc_fsm_state_topic,
                                                     String,
                                                     self.relloc_fsm_state_cb)
        self.relloc_fsm_event = threading.Event()
        self.relloc_fsm_state = None

        self.tf_buff = tf2_ros.Buffer()
        self.tf_ls = tf2_ros.TransformListener(self.tf_buff)

        robot_current_pose_topic = rospy.get_param('~robot_current_pose_topic',
                                                   '/optitrack/robot')
        self.sub_current_pose = rospy.Subscriber(robot_current_pose_topic,
                                                 PoseStamped,
                                                 self.robot_current_pose_cb)
        self.robot_current_pose = None

        self.ignore_z = rospy.get_param('~ignore_z', True)
        self.target_threshold_xy = rospy.get_param('~target_threshold_xy',
                                                   0.10)  # 10cm
        self.target_threshold_z = rospy.get_param('~target_threshold_z',
                                                  0.20)  # 10cm

        self.state_name_topic = rospy.get_param('~state_name_topic', '~state')
        self.pub_state_name = rospy.Publisher(self.state_name_topic,
                                              String,
                                              queue_size=10,
                                              latch=True)

        self.pubs_beacon_color = {
            it: rospy.Publisher(self.beacons_ns + self.beacons_prefix +
                                str(it) + '/color',
                                NeoPixelColor,
                                queue_size=10)
            for it in self.beacons_set
        }

        self.sm = smach.StateMachine(outcomes=['FINISH'])

        self.sm.userdata.targets_list = self.generate_targets_list(
            self.beacons_set)

        rospy.loginfo('Targets list: {}'.format(self.sm.userdata.targets_list))

        with self.sm:
            smach.StateMachine.add('INIT_ALL_CLEAR',
                                   smach.CBState(self.set_all_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color': self.color_clear,
                                                     'delay': 1.0
                                                 }),
                                   transitions={
                                       'done': 'WAIT_FOR_RELLOC',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('WAIT_FOR_RELLOC',
                                   smach.CBState(
                                       self.wait_for_relloc_fsm_state,
                                       cb_kwargs={
                                           'context': self,
                                           'expected_state': 'FOLLOW_POINTING'
                                       }),
                                   transitions={
                                       'ready': 'CHOOSE_NEXT',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('CHOOSE_NEXT',
                                   smach.CBState(self.choose_next_target,
                                                 cb_kwargs={'context': self}),
                                   transitions={
                                       'done': 'INDICATE_NEXT',
                                       'empty': 'WAIT_FOR_LANDING',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_NEXT',
                                   smach.CBState(self.set_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color': self.color_next
                                                 }),
                                   transitions={
                                       'done': 'WAIT_TO_ENTER',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('WAIT_TO_ENTER',
                                   smach.CBState(self.wait_to_enter,
                                                 cb_kwargs={'context': self}),
                                   transitions={
                                       'entered': 'INDICATE_WAIT',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_WAIT',
                                   smach.CBState(self.set_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color': self.color_wait
                                                 }),
                                   transitions={
                                       'done': 'WAIT_TO_CONFIRM',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('WAIT_TO_CONFIRM',
                                   smach.CBState(self.wait_to_confirm,
                                                 cb_kwargs={'context': self}),
                                   transitions={
                                       'confirmed': 'INDICATE_CONFIRMED',
                                       'canceled': 'INDICATE_NEXT',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_CONFIRMED',
                                   smach.CBState(self.set_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color':
                                                     self.color_confirm,
                                                     'delay': 0.5
                                                 }),
                                   transitions={
                                       'done': 'INDICATE_CLEAR',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_CLEAR',
                                   smach.CBState(self.set_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color': self.color_clear
                                                 }),
                                   transitions={
                                       'done': 'CHOOSE_NEXT',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('WAIT_FOR_LANDING',
                                   smach.CBState(
                                       self.wait_for_relloc_fsm_state,
                                       cb_kwargs={
                                           'context': self,
                                           'expected_state': 'LAND'
                                       }),
                                   transitions={
                                       'ready': 'INDICATE_ALL_COMPLETE',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_ALL_COMPLETE',
                                   smach.CBState(self.set_all_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color':
                                                     self.color_complete,
                                                     'delay': 3.0
                                                 }),
                                   transitions={
                                       'done': 'INDICATE_ALL_CLEAR',
                                       'preempted': 'FINISH'
                                   })

            smach.StateMachine.add('INDICATE_ALL_CLEAR',
                                   smach.CBState(self.set_all_color,
                                                 cb_kwargs={
                                                     'context': self,
                                                     'color': self.color_clear
                                                 }),
                                   transitions={
                                       'done': 'WAIT_FOR_RELLOC',
                                       'preempted': 'FINISH'
                                   })

            # smach.StateMachine.add('FINISH',
            #     smach.CBState(self.set_all_color, cb_kwargs = {'context': self, 'color': self.color_clear}),
            #     transitions = {'done': 'OVER',
            #                    'preempted': 'OVER'})

        self.sm.register_start_cb(self.state_transition_cb, cb_args=[self.sm])
        self.sm.register_transition_cb(self.state_transition_cb,
                                       cb_args=[self.sm])
        self.sis = smach_ros.IntrospectionServer('smach_server', self.sm,
                                                 '/SM_BEACONS')

    def generate_targets_list(self, targets):
        tt = np.random.choice(list(targets), size=len(targets), replace=False)
        targets_list = tt.tolist()
        targets_list.append(
            tt[0]
        )  # Add the first target again since we are discarding the first segment
        return targets_list

    def relloc_fsm_state_cb(self, msg):
        self.relloc_fsm_state = msg.data
        self.relloc_fsm_event.set()

    def state_transition_cb(self, *args, **kwargs):
        states = args[2].get_active_states()
        s = '-'.join(states)

        # rospy.loginfo(args[0].__dict__)

        self.pub_state_name.publish(String(s))

    def robot_current_pose_cb(self, msg):
        # E.g. /optitrack/bebop in 'World' frame
        self.robot_current_pose = msg

    def distance_to_target(self, target_id):
        target_frame_id = self.beacons_prefix + str(target_id)

        if not self.robot_current_pose:
            return float('inf'), float('inf')

        try:
            # Beacon in robot's frame
            t = self.tf_buff.lookup_transform(
                self.robot_current_pose.header.frame_id, target_frame_id,
                rospy.Time())
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException), e:
            rospy.logerr_throttle(1.0, e)
            return float('inf'), float('inf')

        robot_p = tfc.fromMsg(self.robot_current_pose.pose)
        beacon_p = tf2_geometry_msgs.transform_to_kdl(t)

        # Ignore z-axis?
        # if self.ignore_z:
        d_z = np.fabs(beacon_p.p[2] - robot_p.p[2])

        robot_p.p[2] = 0.0
        beacon_p.p[2] = 0.0

        d_xy = (beacon_p.p - robot_p.p).Norm()

        return d_xy, d_z
Exemplo n.º 10
0
class MocapRellocNode:
    def __init__(self):
        action_ns = rospy.get_param('~action_ns', '')
        robot_name = rospy.get_param('~robot_name')
        self.lock = threading.RLock()

        self.publish_rate = rospy.get_param('~publish_rate', 50)

        sec = rospy.get_param('~tf_exp_time', 90.0)
        self.tf_exp_time = rospy.Duration(sec)

        if self.tf_exp_time <= rospy.Duration(sys.float_info.epsilon):
            rospy.logwarn(
                "tf_exp_time is set to 0.0, the relloc transform will never expire!"
            )

        self.mocap_frame = rospy.get_param('~mocap_frame', 'World')

        self.human_frame = rospy.get_param('~human_frame_id',
                                           'human_footprint')
        self.robot_root_frame = rospy.get_param('~robot_root_frame',
                                                robot_name + '/odom')

        pointing_ray_topic = rospy.get_param('~pointing_ray_topic',
                                             'pointing_ray')
        self.sub_pointing_ray = rospy.Subscriber(pointing_ray_topic,
                                                 PoseStamped,
                                                 self.pointing_ray_cb)
        self.pointing_ray_msg = None

        self.ray_direction_frame = rospy.get_param('~ray_direction_frame',
                                                   'pointer')

        human_pose_topic = rospy.get_param('~human_pose_topic',
                                           '/optitrack/head')
        self.sub_human_pose = rospy.Subscriber(human_pose_topic, PoseStamped,
                                               self.human_pose_cb)
        self.human_pose_msg = None

        robot_pose_topic = rospy.get_param('~robot_pose_topic',
                                           '/optitrack/' + robot_name)
        self.sub_robot_pose = rospy.Subscriber(robot_pose_topic, PoseStamped,
                                               self.robot_pose_cb)
        self.robot_pose_msg = None

        self.cached_yaw = 0.0

        self.reset_state()

        self.tf_buff = tf2_ros.Buffer()
        self.tf_ls = tf2_ros.TransformListener(self.tf_buff)
        self.tf_br = tf2_ros.TransformBroadcaster()

        self.mocap_relloc_server = actionlib.SimpleActionServer(
            action_ns + '/relloc_action', EmptyAction, self.execute_relloc,
            False)
        self.mocap_relloc_server.start()

    def kdl_to_transform(self, k):
        t = TransformStamped()
        t.transform.translation.x = k.p.x()
        t.transform.translation.y = k.p.y()
        t.transform.translation.z = k.p.z()
        (t.transform.rotation.x, t.transform.rotation.y,
         t.transform.rotation.z, t.transform.rotation.w) = k.M.GetQuaternion()

        return t

    def reset_state(self):
        with self.lock:
            self.cached_tf = None
            self.estimated = False
            self.tf_expired = True

    def human_pose_cb(self, msg):
        self.human_pose_msg = msg

    def robot_pose_cb(self, msg):
        self.robot_pose_msg = msg

    def pointing_ray_cb(self, msg):
        self.pointing_ray_msg = msg

    def frame_from_tf(self, fixed_frame, target_frame):
        if fixed_frame == target_frame:
            return kdl.Frame.Identity()

        try:
            tf_frame = self.tf_buff.lookup_transform(fixed_frame, target_frame,
                                                     rospy.Time())
            dt = (rospy.Time.now() - tf_frame.header.stamp)
            if dt > rospy.Duration(5.0):
                rospy.logwarn_throttle(
                    10.0,
                    'Transformation [{}] -> [{}] is too old. Last seen {:.3f}s ago. Ignoring'
                    .format(fixed_frame, target_frame, dt.to_sec()))
                return None
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException), e:
            rospy.logerr_throttle(10.0, e.message)
            return None

        kdl_frame = transform_to_kdl(tf_frame)

        return kdl_frame
Exemplo n.º 11
0
    def execute_followme(self, goal):
        loop_rate = rospy.Rate(50)  # 10 Hz

        if not (self.last_known_flight_state == FlightState.Hovering
                or self.last_known_flight_state == FlightState.Flying):
            rospy.logerr(
                'Drone is neither flying nor hovering. Current state: {}'.
                format(flight_state_names[self.last_known_flight_state]))
            self.followme_server.set_aborted()
            return
        else:
            try:
                topic_class, real_topic, eval_func = rostopic.get_topic_class(
                    rospy.resolve_name(goal.topic))

                def pose_cb(msg):
                    # Extract the actual message
                    if eval_func:
                        real_msg = copy.deepcopy(eval_func(msg))
                    else:
                        real_msg = copy.deepcopy(msg)

                    hor_d, full_d, yaw = self.distance(real_msg,
                                                       self.robot_current_pose)
                    dist = full_d

                    if goal.ignore_z or goal.override_z > 0.0:
                        try:
                            # Find transform from given frame to 'World' frame
                            trans = self.tf_buff.lookup_transform(
                                self.robot_current_pose.header.frame_id,
                                real_msg.header.frame_id, rospy.Time())
                        except (tf2_ros.LookupException,
                                tf2_ros.ConnectivityException,
                                tf2_ros.ExtrapolationException), e:
                            rospy.logerr_throttle(10.0, e)
                            return

                        # Convert pose to 'World' frame
                        pose_trans = tf2_geometry_msgs.do_transform_pose(
                            real_msg, trans)
                        # Override its altitude
                        if goal.override_z > 0.0:
                            pose_trans.pose.position.z = goal.override_z
                        else:
                            pose_trans.pose.position.z = self.robot_current_pose.pose.position.z

                        ## Convert the pose back to its original frame
                        # Get inversed transform as PyKDL.Frame
                        k = tf2_geometry_msgs.transform_to_kdl(trans).Inverse()
                        trans_inv = self.kdl_to_transform(k)

                        old_header = real_msg.header
                        real_msg = tf2_geometry_msgs.do_transform_pose(
                            pose_trans, trans_inv)
                        real_msg.header = old_header
                        dist = hor_d

                    if goal.lookat:
                        try:
                            # Find transform from given frame to 'World' frame
                            trans = self.tf_buff.lookup_transform(
                                self.robot_current_pose.header.frame_id,
                                real_msg.header.frame_id, rospy.Time())
                        except (tf2_ros.LookupException,
                                tf2_ros.ConnectivityException,
                                tf2_ros.ExtrapolationException), e:
                            rospy.logerr_throttle(10.0, e)
                            return

                        # Convert pose to 'World' frame
                        pose_trans = tf2_geometry_msgs.do_transform_pose(
                            real_msg, trans)
                        # Target pose
                        tp = kdl.Vector(pose_trans.pose.position.x,
                                        pose_trans.pose.position.y, 0.0)
                        # Current pose
                        cp = kdl.Vector(
                            self.robot_current_pose.pose.position.x,
                            self.robot_current_pose.pose.position.y, 0.0)
                        # Direction vector
                        dir_v = tp - cp

                        dir_yaw = math.atan2(dir_v.y(), dir_v.x())
                        quat = kdl.Rotation.RPY(0.0, 0.0,
                                                dir_yaw).GetQuaternion()

                        pose_trans.pose.orientation.x = quat[0]
                        pose_trans.pose.orientation.y = quat[1]
                        pose_trans.pose.orientation.z = quat[2]
                        pose_trans.pose.orientation.w = quat[3]

                        ## Convert the pose back to its original frame
                        # Get inversed transform as PyKDL.Frame
                        k = tf2_geometry_msgs.transform_to_kdl(trans).Inverse()
                        trans_inv = self.kdl_to_transform(k)

                        old_header = real_msg.header
                        real_msg = tf2_geometry_msgs.do_transform_pose(
                            pose_trans, trans_inv)
                        real_msg.header = old_header

                        _, _, yaw = self.distance(real_msg,
                                                  self.robot_current_pose)

                    if goal.margin > 0.0:
                        if dist <= goal.margin:
                            return

                    f = FollowMeFeedback()
                    f.distance = dist
                    f.yaw = yaw
                    self.followme_server.publish_feedback(f)

                    self.last_desired_pose = real_msg
                    self.pub_desired_pose.publish(real_msg)

                # Set the source
                self.set_target_source(self.robot_desired_pose_topic)

                # Subscribe to the topic
                sub_pose = rospy.Subscriber(real_topic, topic_class, pose_cb)
class PointingNode:
    def __init__(self):
        self.publish_rate = rospy.get_param('~publish_rate', 50.0)

        self.ray_origin_frame = rospy.get_param('~ray_origin_frame', 'eyes')
        self.ray_pass_frame = rospy.get_param('~ray_pass_frame', 'finger')

        self.human_frame = rospy.get_param('~human_frame', 'human_footprint')
        self.robot_frame = rospy.get_param('~robot_frame', 'robot_base_link')

        arm_pointer_topic = rospy.get_param('~arm_pointer_topic', 'arm_pointer')
        self.pub_arm_pointer = rospy.Publisher(arm_pointer_topic, PoseStamped, queue_size = 100)

        pointing_ray_topic = rospy.get_param('~pointing_ray_topic', 'pointing_ray')
        self.pub_pointing_ray = rospy.Publisher(pointing_ray_topic, PoseStamped, queue_size = 100)

        self.pointer_switch_margin = rospy.get_param('~pointer_switch_margin', 0.10) # 10 cm
        self.switch_at_pointer = rospy.get_param('~switch_at_pointer', False)

        min_elev_angle_deg = rospy.get_param('~min_elev_angle_deg', 5.0) # 5 deg
        self.min_elevation_angle = math.radians(min_elev_angle_deg)

        markers_topic = rospy.get_param('~markers_topic', 'markers')
        self.pub_markers = rospy.Publisher(markers_topic, Marker, queue_size = 10)

        self.tf_buff = tf2_ros.Buffer()
        self.tf_ls = tf2_ros.TransformListener(self.tf_buff)
        self.tf_br = tf2_ros.TransformBroadcaster()

        self.pointing_model = HeadFingerModel(self.tf_buff, self.human_frame, self.ray_origin_frame, self.ray_pass_frame, 'pointer')

        self.xy_plane = Plane(self.human_frame, normal=kdl.Vector(0.0, 0.0, 1.0)) #, point=kdl.Vector(0.0, 0.0, 0.6))
        self.vis_plane = Plane(self.human_frame)
        self.cylinder = Cylinder(self.human_frame, axis=kdl.Vector(0.0, 0.0, 1.0))
        self.sphere = Sphere(self.ray_origin_frame)

        # self.ws_shape = self.xy_plane
        self.ws_shape = self.cylinder
        # self.ws_shape = self.sphere
        self.last_ws_shape = None

        self.srv_set_workspace_shape = rospy.Service('set_workspace_shape', SetWorkspaceShape, self.set_workspace_shape)

        self.pub_workspace_shape = rospy.Publisher('workspace_shape', String, queue_size = 10, latch = True)

    def get_pointer(self, ws_shape, ray_kdl_frame, pass_point=None, cache=False):
        robot_kdl_frame = self.frame_from_tf(self.human_frame, self.robot_frame)

        if not pass_point:
            if not ws_shape.point:
                if robot_kdl_frame:
                    # Initialize with robot's current position
                    ws_shape.point = copy.deepcopy(robot_kdl_frame.p)
                else:
                    return None

            pass_point = ws_shape.point


        ray_origin_kdl_frame = self.frame_from_tf(self.human_frame, self.ray_origin_frame)
        shape_origin_kdl_frame = self.frame_from_tf(self.human_frame, ws_shape.frame_id)

        pointer_pose = None

        if pass_point and ray_origin_kdl_frame and shape_origin_kdl_frame:
            actual_pass_point = pass_point - copy.deepcopy(shape_origin_kdl_frame.p)

            if cache: # or not ws_shape.point:
                ws_shape.point = pass_point

            if isinstance(ws_shape, Plane):
                normal = None

                if ws_shape is self.vis_plane:
                    tmp_point = copy.deepcopy(ray_origin_kdl_frame.p)
                    robot_dir = actual_pass_point - tmp_point
                    normal = kdl.Vector(robot_dir.x(), robot_dir.y(), 0.0)

                    if cache:
                        ws_shape.normal = normal

                pointer_pose = ws_shape.ray_to_location(ray_kdl_frame, normal=normal, point=actual_pass_point)
            else:
                pointer_pose = ws_shape.ray_to_location(ray_kdl_frame, point=actual_pass_point)

        return pointer_pose

    def set_workspace_shape(self, req):
        ray_tf = self.pointing_model.pointing_ray()

        req_ws_shape = self.ws_shape

        if req.robot_frame:
            self.robot_frame = req.robot_frame

        if ray_tf:
            ray_kdl_frame = transform_to_kdl(ray_tf)
            robot_kdl_frame = self.frame_from_tf(self.human_frame, self.robot_frame) #self.ws_shape.frame_id, self.robot_frame)
            ray_origin_kdl_frame = self.frame_from_tf(self.human_frame, self.ray_origin_frame)

            if robot_kdl_frame and ray_origin_kdl_frame:
                if req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_XY_PLANE:
                    req_ws_shape = self.xy_plane
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_VISUAL_PLANE:
                    req_ws_shape = self.vis_plane
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_CYLINDER:
                    req_ws_shape = self.cylinder
                elif req.workspace_shape == SetWorkspaceShapeRequest.WORKSPACE_SPHERE:
                    req_ws_shape = self.sphere
                else:
                    raise rospy.ServiceException('Unsupported shape type')
                    # return None

                cur_pointer_pose = self.get_pointer(self.ws_shape, ray_kdl_frame, self.ws_shape.point)

                if self.switch_at_pointer:
                    switch_point = copy.deepcopy(tfc.fromMsg(cur_pointer_pose).p)
                else:
                    switch_point = copy.deepcopy(robot_kdl_frame.p)


                new_pointer_pose = self.get_pointer(req_ws_shape, ray_kdl_frame, switch_point)

                eyes_robot_vector = kdl.Vector(robot_kdl_frame.p - ray_origin_kdl_frame.p)

                robot_elev_frame = self.pointing_model._frame_from_direction(ray_origin_kdl_frame.p, eyes_robot_vector)
                _, pitch, _ = robot_elev_frame.M.GetRPY()

                if math.fabs(pitch) < self.min_elevation_angle:
                    raise rospy.ServiceException('Drone elevation angle is less than {} deg: {}. Ignoring'
                        .format(math.degrees(self.min_elevation_angle), math.degrees(pitch)))
                else:
                    # Since the safety check succeeded we can update the pass_point of the shape
                    new_pointer_pose = self.get_pointer(req_ws_shape, ray_kdl_frame, switch_point, cache = True)

            else:
                raise rospy.ServiceException('Unable to obtain robot\'s frame. Is robot localized?')
                # return None

        else:
            raise rospy.ServiceException('Internal error: failed to get ray frame')
            # return None

        self.ws_shape = req_ws_shape

        return SetWorkspaceShapeResponse()

    def frame_from_tf(self, fixed_frame, target_frame):
        if fixed_frame == target_frame:
            return kdl.Frame.Identity()

        try:
            tf_frame = self.tf_buff.lookup_transform(fixed_frame, target_frame, rospy.Time())
            dt = (rospy.Time.now() - tf_frame.header.stamp)
            if  dt > rospy.Duration(5.0):
                rospy.logwarn_throttle(10.0, 'Transformation [{}] -> [{}] is too old. Last seen {:.3f}s ago. Ignoring'
                    .format(fixed_frame, target_frame, dt.to_sec()))
                return None
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException), e:
            rospy.logerr_throttle(10.0, e.message)
            return None

        kdl_frame = transform_to_kdl(tf_frame)

        return kdl_frame