Ejemplo n.º 1
0
    def execute(self, userdata=None):

        # Store current base position
        base_loc = self._robot.base.get_location()
        base_pose = base_loc.frame

        # Create automatic side detection state and execute
        # self._robot.speech.speak("I am now going to look for the table", block=False)
        # automatic_side_detection = AutomaticSideDetection2(self._robot)
        # side = automatic_side_detection.execute({})
        # self._robot.head.look_at_standing_person()

        # self._robot.speech.speak("The {} is to my {}".format(self._location_id, side))

        # self._robot.head.cancel_goal()

        # if side == "left":
        #     base_pose.M.DoRotZ(math.pi / 2)
        # elif side == "right":
        #     base_pose.M.DoRotZ(-math.pi / 2)

        # Add to param server
        loc_dict = {'x': base_pose.p.x(), 'y': base_pose.p.y(), 'phi': base_pose.M.GetRPY()[2]}
        rospy.set_param("/restaurant_locations/{name}".format(name=self._location_id), loc_dict)

        self._visualize_location(base_pose, self._location_id)
        self._robot.ed.update_entity(id=self._location_id, frame_stamped=FrameStamped(base_pose, "/map"),
                                     type="waypoint")

        return "done"
Ejemplo n.º 2
0
    def execute(self, userdata=None):
        return "succeeded"
        e = self._trashbin.resolve()

        if not e:
            raise Exception("trashbin designator is empty")

        # get original entity pose
        frame_original = self._robot.ed.get_entity(id=e.id)._pose

        # inspect and update entity
        states.look_at_segmentation_area(self._robot, self._robot.ed.get_entity(id=e.id), 'on_top_of')
        self._robot.ed.update_kinect("{} {}".format('on_top_of', e.id))
        frame_updated = self._robot.ed.get_entity(id=e.id)._pose

        # update entity with original orientation
        frame_updated.M = frame_original.M
        new_frame = FrameStamped(frame_updated, "map")

        # new_frame.header.stamp
        self._robot.ed.update_entity(self._robot, e.id, frame_stamped=new_frame)
        if e:
            return "succeeded"
        else:
            return "failed"
Ejemplo n.º 3
0
    def execute(self, userdata=None):
        self._robot.speech.speak("Is the bar on my left or on my right?")

        self._robot.head.look_at_standing_person()
        base_pose = self._robot.base.get_location().frame
        result = None
        while not result:
            result = self._robot.ears.recognize(
                '<side>', {'side': ['left', 'right']},
                time_out=rospy.Duration(10))  # Wait 100 secs

        if result.result == "left":
            base_pose.M.DoRotZ(math.pi / 2)
        elif result.result == "right":
            base_pose.M.DoRotZ(-math.pi / 2)
        else:
            print "\n\n WHUT?? No left or right lol? \n\n"
            print result
            print "\n"

        self._robot.ed.update_entity(id="beverages",
                                     kdl_frame_stamped=FrameStamped(
                                         base_pose, "/map"),
                                     type="waypoint")
        return "done"
Ejemplo n.º 4
0
            def move_table(userdata=None, manipulate_machine=None):
                """ Moves the entities for this challenge to the correct poses"""
                # Determine where to perform the challenge
                robot_pose = robot.base.get_location()
                ENTITY_POSES.sort(key=lambda tup:
                                  (tup[0].frame.p - robot_pose.frame.p).Norm())
                cabinet_id = ENTITY_POSES[0][2]
                table_id = ENTITY_POSES[0][3]

                # Update the world model
                robot.ed.update_entity(id="balcony_shelf",
                                       frame_stamped=FrameStamped(
                                           kdl.Frame(kdl.Rotation(),
                                                     kdl.Vector(0.0, 3.0,
                                                                0.0)),
                                           frame_id="map"))
                robot.ed.update_entity(id=cabinet_id,
                                       frame_stamped=ENTITY_POSES[0][0])
                robot.ed.update_entity(id=table_id,
                                       frame_stamped=ENTITY_POSES[0][1])

                # Update designators
                cabinet.id_ = ENTITY_POSES[0][2]
                room.id_ = ENTITY_POSES[0][4]

                # Update manipulate machine
                manipulate_machine.place_entity_designator.id_ = cabinet_id
                manipulate_machine.place_designator._area = ENTITY_POSES[0][5]
                manipulate_machine.place_designator.place_location_designator.id = cabinet_id
                manipulate_machine.table_designator.id_ = table_id
                manipulate_machine.cabinet.id_ = ENTITY_POSES[0][2]

                return "done"
Ejemplo n.º 5
0
 def test_describe_near_objects4(self):
     self.robot.base.get_location = lambda: FrameStamped(
         kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(7.5, 2, 0)), "/map"
     )
     self.assertEqual("We now enter the bedroom",
                      self.tour_guide.describe_near_objects())
     self.assertEqual("On our right you can see the bookcase",
                      self.tour_guide.describe_near_objects())
     self.assertEqual("", self.tour_guide.describe_near_objects())
Ejemplo n.º 6
0
        def execute(self, userdata=None):
            # Stop the base
            self._robot.base.local_planner.cancelCurrentPlan()

            base_pose = self._robot.base.get_location().frame

            choices = knowledge.guiding_choices

            self._robot.head.look_at_standing_person()
            self._robot.speech.speak("Location and side?")
            result = self._robot.ears.recognize(
                knowledge.guiding_spec, choices,
                time_out=rospy.Duration(10))  # Wait 100 secs
            self._robot.head.cancel_goal()

            if result:
                if "continue" in result.choices:
                    return "continue"
                if "side" in result.choices and "location" in result.choices:
                    side = result.choices["side"]
                    location = result.choices["location"]

                    self._robot.head.look_at_standing_person()
                    # self._robot.speech.speak("%s %s?"%(location, side))
                    # result = self._robot.ears.recognize("(yes|no)",{})
                    # self._robot.head.cancel_goal()
                    # if not result or result.result == "no":
                    #     self._robot.speech.speak("Sorry", block=False)
                    #     return "continue"

                    self._robot.speech.speak("%s %s, it is!" %
                                             (location, side))
                    self._robot.head.cancel_goal()

                    if side == "left":
                        base_pose.M.DoRotZ(math.pi / 2)
                    elif side == "right":
                        base_pose.M.DoRotZ(-math.pi / 2)

                    loc_dict = {
                        'x': base_pose.p.x(),
                        'y': base_pose.p.y(),
                        'phi': base_pose.M.GetRPY()[2]
                    }
                    rospy.set_param(
                        "/restaurant_locations/{name}".format(name=location),
                        loc_dict)
                    visualize_location(base_pose, location)
                    self._robot.ed.update_entity(
                        id=location,
                        kdl_frame_stamped=FrameStamped(base_pose, "/map"),
                        type="waypoint")

                    return "done"

            return "continue"
Ejemplo n.º 7
0
            def store_pose(userdata=None):
                base_loc = robot.base.get_location()
                base_pose = base_loc.frame
                location_id = INFORMATION_POINT_ID
                robot.ed.update_entity(id=location_id,
                                       frame_stamped=FrameStamped(
                                           base_pose, "/map"),
                                       type="waypoint")

                return "succeeded"
Ejemplo n.º 8
0
    def _resolve(self):
        # ToDo: Make this happen for the bin in the chosen room...

        try:
            frame = self._entity_des.resolve()._pose
        except Exception:
            rospy.logerr(Exception.message)
            return None

        frame.p.z(self._drop_height)

        return FrameStamped(frame, "/map")
Ejemplo n.º 9
0
    def _resolve(self):
        frame = None

        # Query ed
        try:
            frame = self._robot.ed.get_entity(id="trashbin")._pose
        except:
            return None

        frame.p.z(self._drop_height)

        return FrameStamped(frame, "/map")
Ejemplo n.º 10
0
 def __init__(self, *args, **kwargs):
     self.move = mock.MagicMock()
     self.force_drive = mock.MagicMock()
     self.get_location = lambda: FrameStamped(random_kdl_frame(), "/map")
     self.set_initial_pose = mock.MagicMock()
     self.go = mock.MagicMock()
     self.reset_costmap = mock.MagicMock()
     self.cancel_goal = mock.MagicMock()
     self.analyzer = mock.MagicMock()
     self.global_planner = mock.MagicMock()
     self.local_planner = mock.MagicMock()
     self.local_planner.getStatus = mock.MagicMock(return_value="arrived") #always arrive for now
     self.global_planner.getPlan = mock.MagicMock(return_value=["dummy_plan"]) #always arrive for now
Ejemplo n.º 11
0
def load_waypoints(robot, filename="/param/locations.yaml"):
        rp = rospkg.rospack.RosPack()
        restaurant_package = rp.get_path("challenge_restaurant")
        locations = rosparam.load_file(restaurant_package+filename)
        rospy.set_param("/restaurant_locations/", locations)

        for tablename in tables.values():
            location = locations[0][0][0][0]['locations'][tablename] #Don't ask why there's so many subindices to use...
            base_pose = msg_constructors.PoseStamped(location['x'], location['y'], z=0)
            base_pose.pose.orientation = transformations.euler_z_to_quaternion(location['phi'])

            visualize_location(base_pose, tablename)
            robot.ed.update_entity(id=tablename, kdlFrameStamped=FrameStamped(base_pose, "/map"), type="waypoint")
Ejemplo n.º 12
0
 def _set_waypoint(self, intersection_point, person_position):
     """ Puts the goal as a waypoint in ED
     :param intersection_point: geometry_msgs PointStamped of the last raytrace intersection (in map)
     :param person_position: geometry_msgs PointStamped of the last measured person position (in map)
     """
     yaw = math.atan2(person_position.point.y - intersection_point.point.y,
                      person_position.point.x - intersection_point.point.x)
     position = kdl.Vector(intersection_point.point.x,
                           intersection_point.point.y, 0.0)
     orientation = kdl.Rotation.RPY(0.0, 0.0, yaw)
     waypoint = FrameStamped(frame=kdl.Frame(orientation, position),
                             frame_id="/map")
     self.robot.ed.update_entity(id=self.waypoint_id,
                                 type="waypoint",
                                 frame_stamped=waypoint)
     # import ipdb;ipdb.set_trace()
     return
Ejemplo n.º 13
0
 def __init__(self, robot_name, tf_listener, *args, **kwargs):
     super(Base, self).__init__(robot_name, tf_listener)
     self.move = mock.MagicMock()
     self.turn_towards = mock.MagicMock()
     self.force_drive = mock.MagicMock()
     self.get_location = lambda: FrameStamped(random_kdl_frame(), "/map")
     self.set_initial_pose = mock.MagicMock()
     self.wait_for_motion_done = mock.MagicMock()
     self.go = mock.MagicMock()
     self.reset_costmap = mock.MagicMock()
     self.cancel_goal = mock.MagicMock()
     self.analyzer = mock.MagicMock()
     self.global_planner = mock.MagicMock()
     self.local_planner = mock.MagicMock()
     self.local_planner.getStatus = mock.MagicMock(
         return_value="arrived")  # always arrive for now
     self.global_planner.getPlan = mock.MagicMock(
         return_value=["dummy_plan"])  # always arrive for now
Ejemplo n.º 14
0
 def test_initialize(self):
     # initialize is run in setUp, but robot.base.get_location has been replaced, so running it again
     self.robot.base.get_location = lambda: FrameStamped(
         kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(3.5, 3.5, 0)),
         "/map")
     self.tour_guide.initialize()
     self.assertListEqual(self.tour_guide._passed_room_ids,
                          [self._kitchen.id])
     self.assertListEqual(self.tour_guide._passed_furniture_ids,
                          [self._cabinet.id])
     self.assertListEqual(sorted(self.tour_guide._furniture_entities),
                          sorted([self._cabinet, self._bookcase]))
     self.assertListEqual(sorted(self.tour_guide._room_entities),
                          sorted([self._kitchen, self._bedroom]))
     self.assertDictEqual(self.tour_guide._furniture_entities_room, {
         self._kitchen: [self._cabinet],
         self._bedroom: [self._bookcase]
     })
Ejemplo n.º 15
0
    def _resolve(self):
        """
        :return: Where can an object be placed
        :returns: FrameStamped
        """
        place_location = self.place_location_designator.resolve()
        place_frame = FrameStamped(frame=place_location._pose, frame_id="/map")

        # points_of_interest = []
        if self._area:
            vectors_of_interest = self.determine_points_of_interest_with_area(place_location, self._area)
        else:
            vectors_of_interest = self.determine_points_of_interest(place_frame.frame, z_max=place_location.shape.z_max,
                                                                    convex_hull=place_location.shape.convex_hull)

        assert all(isinstance(v, FrameStamped) for v in vectors_of_interest)

        open_POIs = filter(self.is_poi_occupied, vectors_of_interest)

        return self.select_best_feasible_poi(open_POIs)
Ejemplo n.º 16
0
            def move_table(userdata=None):
                """ 'Locks' a locking designator """
                # For now, don't do anything
                return "done"

                # Move away the cabinet
                robot.ed.update_entity(id="cabinet",
                                       frame_stamped=FrameStamped(frame=kdl.Frame(kdl.Rotation(),
                                                                                  kdl.Vector(12.0, 0, 0)),
                                                                  frame_id="map"))

                # Determine where to perform the challenge
                robot_pose = robot.base.get_location()
                ENTITY_POSES.sort(key=lambda tup: (tup[0].frame.p - robot_pose.frame.p).Norm())

                # Update the world model
                robot.ed.update_entity(id=CABINET, frame_stamped=ENTITY_POSES[0][0])
                robot.ed.update_entity(id=TABLE, frame_stamped=ENTITY_POSES[0][1])

                return "done"
Ejemplo n.º 17
0
    def _resolve(self):
        """
        :return: Where can an object be placed
        :returns: FrameStamped
        """
        place_location = self.place_location_designator.resolve()
        place_frame = FrameStamped(frame=place_location._pose, frame_id="/map")

        # points_of_interest = []
        if self._area:
            vectors_of_interest = self._determine_points_of_interest_with_area(place_location, self._area)
        else:
            vectors_of_interest = self._determine_points_of_interest(place_frame.frame, z_max=place_location.shape.z_max,
                                                                    convex_hull=place_location.shape.convex_hull)

        assert all(isinstance(v, FrameStamped) for v in vectors_of_interest)

        open_POIs = filter(lambda pose: self._is_poi_unoccupied(pose, place_location), vectors_of_interest)

        base_pose = self.robot.base.get_location()
        arm = self.arm_designator.resolve()
        open_POIs_dist = [(poi, self._distance_to_poi_area_heuristic(poi, base_pose, arm)) for poi in open_POIs]

        # We don't care about small differences
        open_POIs_dist.sort(key=lambda tup:tup[1])
        open_POIs_dist = [f for f in open_POIs_dist if (f[1] - open_POIs_dist[0][1]) < self._nav_threshold]

        open_POIs_dist.sort(key=lambda tup: tup[0].edge_score, reverse=True) # sorts in place

        for poi in open_POIs_dist:
            if self._distance_to_poi_area(poi[0], arm):
                selection = self._create_selection_marker(poi[0])
                self.marker_pub.publish(MarkerArray([selection]))
                return poi[0]

        rospy.logerr("Could not find an empty spot")
        return None
Ejemplo n.º 18
0
    def _resolve(self):
        place_location = self.place_location_designator.resolve()
        place_frame = FrameStamped(frame=place_location._pose, frame_id="/map")

        # points_of_interest = []
        if self._area:
            vectors_of_interest = self.determine_points_of_interest_with_area(
                place_location, self._area)
        else:
            vectors_of_interest = self.determine_points_of_interest(
                place_frame.frame,
                z_max=place_location.shape.z_max,
                convex_hull=place_location.shape.convex_hull)

        assert all(isinstance(v, FrameStamped) for v in vectors_of_interest)

        def is_poi_occupied(frame_stamped):
            entities_at_poi = self.robot.ed.get_entities(
                center_point=frame_stamped.extractVectorStamped(),
                radius=self._spacing)
            return not any(entities_at_poi)

        open_POIs = filter(is_poi_occupied, vectors_of_interest)

        def distance_to_poi_area(frame_stamped):
            # Derived from navigate_to_place
            radius = math.hypot(self.robot.grasp_offset.x,
                                self.robot.grasp_offset.y)
            x = frame_stamped.frame.p.x()
            y = frame_stamped.frame.p.y()
            ro = "(x-%f)^2+(y-%f)^2 < %f^2" % (x, y, radius + 0.075)
            ri = "(x-%f)^2+(y-%f)^2 > %f^2" % (x, y, radius - 0.075)
            pos_constraint = PositionConstraint(constraint=ri + " and " + ro,
                                                frame=frame_stamped.frame_id)

            plan_to_poi = self.robot.base.global_planner.getPlan(
                pos_constraint)

            if plan_to_poi:
                distance = len(plan_to_poi)
                # print "Distance to {fs}: {dist}".format(dist=distance, fs=frame_stamped.frame.p)
            else:
                distance = None
            return distance

        # List with tuples containing both the POI and the distance the
        # robot needs to travel in order to place there
        open_POIs_dist = [(poi, distance_to_poi_area(poi))
                          for poi in open_POIs]

        # Feasible POIS: discard
        feasible_POIs = []
        for tup in open_POIs_dist:
            if tup[1]:
                feasible_POIs.append(tup)

        if any(feasible_POIs):
            feasible_POIs.sort(key=lambda tup: tup[1])  # sorts in place

            # We don't care about small differences
            nav_threshold = 0.5 / 0.05  # Distance (0.5 m) divided by resolution (0.05)
            feasible_POIs = [
                f for f in feasible_POIs
                if (f[1] - feasible_POIs[0][1]) < nav_threshold
            ]

            feasible_POIs.sort(key=lambda tup: tup[0].edge_score, reverse=True)
            best_poi = feasible_POIs[0][0]  # Get the POI of the best match

            selection = self.create_selection_marker(best_poi)
            self.marker_pub.publish(MarkerArray([selection]))

            return best_poi
        else:
            rospy.logerr("Could not find an empty spot")
            return None
# ROS
import PyKDL as kdl

# TU/e Robotics
from robot_skills.util.kdl_conversions import FrameStamped
from robocup_knowledge import knowledge_loader
from collections import namedtuple

# Common knowledge
common = knowledge_loader.load_knowledge("common")

# Detection
cabinet_amcl = ["cabinet", "storage_shelf"]
cabinet_poses = [
    FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0.0, 0.0, 0),
                                 kdl.Vector(0.144, 3.274, 0.0)),
                 frame_id="map"),
    FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0.0, 0.0, math.pi),
                                 kdl.Vector(4.8, 2.5, 0.0)),
                 frame_id="map")
]
object_shelves = ["shelf3", "shelf4", "shelf5"]
object_types = [obj["name"] for obj in common.objects]

# Placing
default_place_entity = cabinet_amcl
"""
EntityConfiguration defines a name, an pose estimate for the Entity and which volumes of that entity to use for 
manipulation
"""
EntityConfiguration = namedtuple(
# ROS
import PyKDL as kdl

# TU/e Robotics
from robot_skills.util.kdl_conversions import FrameStamped
from robocup_knowledge import knowledge_loader

# Common knowledge
common = knowledge_loader.load_knowledge("common")

# Detection
cabinet_amcl = ["kitchen_shelf", "kitchen_shelf"]
cabinet_poses = [
    FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0.0, -0.0, 0.1292036732),
                                 kdl.Vector(2.39, -4.125, 0)),
                 frame_id="map"),
    FrameStamped(frame=kdl.Frame(kdl.Rotation(), kdl.Vector(-2.27, -7.55,
                                                            0.0)),
                 frame_id="map")
]
object_shelves = ["shelf2", "shelf3", "shelf4", "shelf5"]
object_types = [obj["name"] for obj in common.objects]

# Grasping
grasp_surface = ["kitchen_table", "bistro_table"]
room = ["kitchen", "balcony"]
#
# Placing
default_place_entity = cabinet_amcl
default_place_area = ["shelf3", "shelf3"]
Ejemplo n.º 21
0
import robot_smach_states.util.designators as ds
from robot_smach_states import Grab


if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="Put an imaginary object in the world model and grasp it using the "
                                                 "'Grab' smach state")
    parser.add_argument("x", type=float, help="x-coordinate (in map) of the imaginary object")
    parser.add_argument("y", type=float, help="y-coordinate (in map) of the imaginary object")
    parser.add_argument("z", type=float, help="z-coordinate (in map) of the imaginary object")
    parser.add_argument("--robot", default="hero", help="Robot name (amigo, hero, sergio)")
    args = parser.parse_args()

    rospy.init_node("test_grasping")

    robot = get_robot(args.robot)

    entity_id = "test_item"
    pose = FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0.0, 0.0, 0.0), kdl.Vector(args.x, args.y, args.z)),
                        frame_id="/map")

    robot.ed.update_entity(id=entity_id, frame_stamped=pose)

    item = ds.EdEntityDesignator(robot, id=entity_id)

    arm = ds.UnoccupiedArmDesignator(robot, {})

    grab_state = Grab(robot, item, arm)
    grab_state.execute()
Ejemplo n.º 22
0
 def pose(self):
     """ Returns the pose of the Entity as a FrameStamped"""
     return FrameStamped(frame=self._pose, frame_id=self.frame_id)
Ejemplo n.º 23
0
    def get_grasp_pose(self, entity, arm):
        """ Computes the most suitable grasp pose to grasp the specified entity with the specified arm

        :param entity: entity to grasp
        :param arm: arm to use
        :return FrameStamped with grasp pose in map frame
        """
        candidates = []
        starttime = rospy.Time.now()
        # ToDo: divide into functions
        ''' Create a grasp vector for every side of the convex hull '''
        ''' First: check if container actually has a convex hull '''
        if entity.shape is None:
            rospy.logerr(
                "Entity {0} has no shape. We need to do something with this".
                format(entity.id))
            return False
        ''' Second: turn points into KDL objects and offset chull to get it in map frame '''
        center_pose = entity._pose  #TODO: Access to private member

        # chull_obj = [point_msg_to_kdl_vector(p) for p in entity.shape._convex_hull]   # convex hull in object frame
        # chull = offsetConvexHull(chull_obj, center_pose)    # convex hull in map frame
        chull = offsetConvexHull(entity.shape.convex_hull,
                                 center_pose)  # convex hull in map frame
        # import ipdb;ipdb.set_trace()
        ''' Get robot pose as a kdl frame (is required later on) '''
        robot_frame = self._robot.base.get_location()
        robot_frame_inv = robot_frame.frame.Inverse()
        ''' Loop over lines of chull '''
        for i in xrange(len(chull)):
            j = (i + 1) % len(chull)

            dx = chull[j].x() - chull[i].x()
            dy = chull[j].y() - chull[i].y()
            if math.hypot(dx, dy) < 0.0001:
                # Points are probably too close to get a decent angle estimate
                continue

            yaw = math.atan2(dx, -dy)
            ''' Filter on object width '''
            # Normalize
            n = math.hypot(dx, dy)  # Norm
            vx = dx / n
            vy = dy / n

            # Loop over all points
            wmin = 0
            wmax = 0
            for c in chull:

                # Compute vector
                tx = c.x() - chull[i].x()
                ty = c.y() - chull[i].y()

                # Perform projection
                # ToDo: continue when offset in x direction is too large
                offset = tx * vx + ty * vy

                # Update min and max
                wmin = min(wmin, offset)
                wmax = max(wmax, offset)

            width = wmax - wmin

            if width > self._width_treshold:
                continue
            else:
                score = 1.0
            ''' Compute candidate vector '''
            # Middle between point i and point j
            # x = 0.5 * ( chull[i].x() + chull[j].x() )
            # y = 0.5 * ( chull[i].y() + chull[j].y() )
            # cvec = kdl.Frame(kdl.Rotation.RPY(0, 0, yaw),
            #                kdl.Vector(x, y, entity.pose.position.z))

            # Divide width in two
            # * kdl.Vector(0.5 * (wmin+wmax, 0, 0)
            tvec = FrameStamped(kdl.Frame(
                kdl.Rotation.RPY(0, 0, yaw),
                kdl.Vector(chull[i].x(), chull[i].y(),
                           entity.pose.frame.p.z())),
                                frame_id="/map")  # Helper frame

            cvec = FrameStamped(kdl.Frame(
                kdl.Rotation.RPY(0, 0, yaw),
                tvec.frame * kdl.Vector(0, -0.5 * (wmin + wmax), 0)),
                                frame_id="/map")
            ''' Optimize over yaw offset w.r.t. robot '''
            # robot_in_map * entity_in_robot = entity_in_map
            # --> entity_in_robot = robot_in_map^-1 * entity_in_map
            gvec = robot_frame_inv * cvec.frame
            (R, P, Y) = gvec.M.GetRPY()

            rscore = 1.0 - (abs(Y) / math.pi)
            score = min(score, rscore)

            candidates.append({'vector': cvec, 'score': score})

        candidates = sorted(candidates,
                            key=lambda candidate: candidate['score'],
                            reverse=True)

        self.visualize(candidates)
        rospy.loginfo("GPD took %f seconds" %
                      (rospy.Time.now() - starttime).to_sec())

        return candidates[0]['vector']
Ejemplo n.º 24
0
    def execute(self, userdata=None):

        point_entity = self.point_entity_designator.resolve()
        if not point_entity:
            rospy.logerr("Could not resolve grab_entity")
            return "failed"

        arm = self.arm_designator.resolve()
        if not arm:
            rospy.logerr("Could not resolve arm")
            return "failed"

        goal_map = VectorStamped(0, 0, 0, frame_id=point_entity.id)

        try:
            # Transform to base link frame
            goal_bl = goal_map.projectToFrame(self.robot.base_link_frame, tf_listener=self.robot.tf_listener)
            if goal_bl is None:
                rospy.logerr('Transformation of goal to base failed')
                return 'failed'
        except tf2_ros.TransformException as tfe:
            rospy.logerr('Transformation of goal to base failed: {0}'.format(tfe))
            return 'failed'

        # Make sure the torso and the arm are done
        self.robot.torso.wait_for_motion_done(cancel=True)
        arm.wait_for_motion_done(cancel=True)

        # This is needed because the head is not entirely still when the
        # look_at_point function finishes
        rospy.sleep(rospy.Duration(0.5))

        # Resolve the entity again because we want the latest pose
        updated_point_entity = self.point_entity_designator.resolve()

        rospy.loginfo("ID to update: {0}".format(point_entity.id))
        if not updated_point_entity:
            rospy.logerr("Could not resolve the updated grab_entity, "
                         "this should not happen [CHECK WHY THIS IS HAPPENING]")
            point_entity = self.associate(original_entity=point_entity)
        else:
            rospy.loginfo("Updated pose of entity (dx, dy, dz) : (%f, %f, %f)" %
                          (updated_point_entity.pose.frame.p.x() - point_entity.pose.frame.p.x(),
                           updated_point_entity.pose.frame.p.y() - point_entity.pose.frame.p.y(),
                           updated_point_entity.pose.frame.p.z() - point_entity.pose.frame.p.z()))
            point_entity = updated_point_entity

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        # Grasp point determination
        goal_bl = self._get_pointing_pose(point_entity)

        # Grasp
        rospy.loginfo('Start pointing')
        for x_offset in [-0.15, 0.0]:  # Hack because Hero does not pre-grasp reliably
            _goal_bl = FrameStamped(goal_bl.frame * kdl.Frame(kdl.Vector(x_offset, 0.0, 0.0)), goal_bl.frame_id)
            if not arm.send_goal(_goal_bl, timeout=20, pre_grasp=False, allowed_touch_objects=[point_entity.id]):
                self.robot.speech.speak('I am sorry but I cannot move my arm to the object position', block=False)
                rospy.logerr('Grasp failed')
                arm.reset()
                return 'failed'

        # Retract
        rospy.loginfo('Start retracting')
        roll = 0.0

        goal_bl.frame.p.x(goal_bl.frame.p.x() - 0.1)  # Retract 10 cm
        goal_bl.frame.p.z(goal_bl.frame.p.z() + 0.05)  # Go 5 cm higher
        goal_bl.frame.M = kdl.Rotation.RPY(roll, 0.0, 0.0)  # Update the roll
        rospy.loginfo("Start retract")
        if not arm.send_goal(goal_bl, timeout=0.0, allowed_touch_objects=[point_entity.id]):
            rospy.logerr('Failed retract')
        arm.wait_for_motion_done()
        self.robot.base.force_drive(-0.125, 0, 0, 2.0)

        # Close gripper
        arm.wait_for_motion_done(cancel=True)

        # Carrying pose
        arm.send_joint_goal('carrying_pose', timeout=0.0)

        # Reset head
        self.robot.head.cancel_goal()

        return "succeeded"
Ejemplo n.º 25
0
def main():
    rospy.init_node("demo")
    random.seed()

    skip = rospy.get_param('~skip', False)
    restart = rospy.get_param('~restart', False)
    robot_name = rospy.get_param('~robot_name')
    no_of_tasks = rospy.get_param('~number_of_tasks', 0)
    time_limit = rospy.get_param('~time_limit', 0)
    if no_of_tasks == 0:
        no_of_tasks = 999

    rospy.loginfo("[DEMO] Parameters:")
    rospy.loginfo("[DEMO] robot_name = {}".format(robot_name))
    if skip:
        rospy.loginfo("[DEMO] skip = {}".format(skip))
    if no_of_tasks:
        rospy.loginfo("[DEMO] number_of_tasks = {}".format(no_of_tasks))
    if restart:
        rospy.loginfo("[DEMO] running a restart")

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    if robot_name == 'amigo':
        from robot_skills.amigo import Amigo as Robot
    elif robot_name == 'sergio':
        from robot_skills.sergio import Sergio as Robot
    elif robot_name == 'hero':
        from robot_skills.hero import Hero as Robot
    else:
        raise ValueError('unknown robot')

    robot = Robot()

    action_client = ActionClient(robot.robot_name)

    knowledge = load_knowledge('challenge_demo')

    no_of_tasks_performed = 0

    user_instruction = "What can I do for you?"
    report = ""

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    # Start

    if not skip and not restart:

        # Wait for door, enter arena
        s = StartChallengeRobust(robot, knowledge.initial_pose)
        s.execute()

        # Move to the start location
        robot.speech.speak("Let's see if my operator has a task for me!",
                           block=False)

    if restart:
        robot.speech.speak(
            "Performing a restart. So sorry about that last time!",
            block=False)

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    finished = False
    start_time = rospy.get_time()

    trigger = WaitForTrigger(robot, ["gpsr"], "/" + robot_name + "/trigger")

    while True:
        # Navigate to the GPSR meeting point
        if not skip:
            robot.speech.speak("Moving to the meeting point.", block=False)
            nwc = NavigateToWaypoint(robot=robot,
                                     waypoint_designator=EntityByIdDesignator(
                                         robot=robot,
                                         id=knowledge.starting_pose),
                                     radius=0.3)
            nwc.execute()
            # Report to the user and ask for a new task

        # Report to the user
        robot.head.look_at_standing_person()
        robot.speech.speak(report, block=True)
        timeout_count = 0

        while True:
            rospy.loginfo("Waiting for trigger")
            trigger.execute()

            base_loc = robot.base.get_location()
            base_pose = base_loc.frame
            print base_pose
            location_id = "starting_point"
            robot.ed.update_entity(id=location_id,
                                   frame_stamped=FrameStamped(
                                       base_pose, "/map"),
                                   type="waypoint")

            robot.head.look_at_standing_person()

            robot.speech.speak(user_instruction, block=True)
            # Listen for the new task
            while True:
                try:
                    sentence, semantics = robot.hmi.query(
                        description="",
                        grammar=knowledge.grammar,
                        target=knowledge.grammar_target)
                    timeout_count = 0
                    break
                except hmi.TimeoutException:
                    robot.speech.speak(
                        random.sample(knowledge.not_understood_sentences,
                                      1)[0])
                    if timeout_count >= 3:
                        robot.hmi.restart_dragonfly()
                        timeout_count = 0
                        rospy.logwarn("[GPSR] Dragonfly restart")
                    else:
                        timeout_count += 1
                        rospy.logwarn(
                            "[GPSR] Timeout_count: {}".format(timeout_count))

            # check if we have heard this correctly
            robot.speech.speak('I heard %s, is this correct?' % sentence)
            try:
                if 'no' == robot.hmi.query('', 'T -> yes | no', 'T').sentence:
                    robot.speech.speak('Sorry')
                    continue
            except hmi.TimeoutException:
                # robot did not hear the confirmation, so lets assume its correct
                break

            break

        # Dump the output json object to a string
        task_specification = json.dumps(semantics)

        # Send the task specification to the action server
        task_result = action_client.send_task(task_specification)

        print task_result.missing_field
        # # Ask for missing information
        # while task_result.missing_field:
        #     request_missing_field(knowledge.task_result.missing_field)
        #     task_result = action_client.send_task(task_specification)

        # Write a report to bring to the operator
        report = task_result_to_report(task_result)

        robot.lights.set_color(0, 0, 1)  # be sure lights are blue

        robot.head.look_at_standing_person()
        for arm in robot.arms.itervalues():
            arm.reset()
            arm.send_gripper_goal('close', 0.0)
        robot.torso.reset()

        rospy.loginfo("Driving back to the starting point")
        nwc = NavigateToWaypoint(robot=robot,
                                 waypoint_designator=EntityByIdDesignator(
                                     robot=robot, id=location_id),
                                 radius=0.3)
        nwc.execute()
Ejemplo n.º 26
0
            smach.StateMachine.add('PREPARE_POINT', states.PrepareEdGrasp(robot, arm, item),
                                   transitions={'succeeded': 'POINT',
                                                'failed': 'RESET_FAILURE'})

            smach.StateMachine.add('POINT', PointAt(robot, arm, item),
                                   transitions={'succeeded': 'done',
                                                'failed': 'RESET_FAILURE'})

            smach.StateMachine.add("RESET_FAILURE", states.ResetOnFailure(robot, arm),
                                   transitions={'done': 'failed'})


if __name__ == "__main__":

    rospy.init_node("test_grasping")

    robot = get_robot_from_argv(index=1)

    entity_id = "test_item"
    pose = FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0, 0, -1.57), kdl.Vector(2.6, -0.95, 0.8)), frame_id="/map")

    robot.ed.update_entity(id=entity_id, frame_stamped=pose)

    item = ds.EdEntityDesignator(robot, id=entity_id)

    arm = ds.UnoccupiedArmDesignator(robot, {})

    grab_state = IdentifyObject(robot, item, arm)
    grab_state.execute()
# ROS
import PyKDL as kdl

# TU/e Robotics
from robot_skills.util.kdl_conversions import FrameStamped
from robocup_knowledge import knowledge_loader

# Common knowledge
common = knowledge_loader.load_knowledge("common")

# Detection
cabinet_amcl = "bookcase"
cabinet_poses = [
    FrameStamped(frame=kdl.Frame(kdl.Rotation(), kdl.Vector(0.144, 3.574,
                                                            0.0)),
                 frame_id="map"),
    FrameStamped(frame=kdl.Frame(kdl.Rotation.RPY(0.0, 0.0, 1.570796),
                                 kdl.Vector(2.271, -1.258, 0.0)),
                 frame_id="map")
]
object_shelves = ["shelf2", "shelf3", "shelf4", "shelf5"]
object_types = [obj["name"] for obj in common.objects]

# Grasping
grasp_surface = "cabinet"
room = "dining_room"

# Placing
default_place_entity = cabinet_amcl
default_place_area = "shelf2"
Ejemplo n.º 28
0
 def test_initialize2(self):
     self.robot.base.get_location = lambda: FrameStamped(
         kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(20, 20, 2)), "/map"
     )
     self.tour_guide.initialize()
     self.assertListEqual(self.tour_guide._passed_room_ids, [])
Ejemplo n.º 29
0
 def test_describe_near_objects2(self):
     self.robot.base.get_location = lambda: FrameStamped(
         kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(6, 4, 0)), "/map")
     self.assertEqual("We now enter the bedroom",
                      self.tour_guide.describe_near_objects())
Ejemplo n.º 30
0
 def setUp(self):
     self.robot.base.get_location = lambda: FrameStamped(
         kdl.Frame(kdl.Rotation().Identity(), kdl.Vector(-1, -1, 0)), "/map"
     )
     self.tour_guide.initialize()