예제 #1
0
    def determine_points_of_interest(self, center_frame, z_max, convex_hull):
        """
        Determine candidates for place poses
        :param center_frame: kdl.Frame, center of the Entity to place on top of
        :param z_max: float, height of the entity to place on, w.r.t. the entity
        :param convex_hull: [kdl.Vector], convex hull of the entity
        :return: [FrameStamped] of candidates for placing
        """

        points = []

        if len(convex_hull) == 0:
            rospy.logerr('determine_points_of_interest: Empty convex hull')
            return []

        # Convert convex hull to map frame
        ch = offsetConvexHull(convex_hull, center_frame)

        # Loop over hulls
        self.marker_array.markers = []

        for i in xrange(len(ch)):
            j = (i + 1) % len(ch)

            dx = ch[j].x() - ch[i].x()
            dy = ch[j].y() - ch[i].y()

            length = kdl.diff(ch[j], ch[i]).Norm()

            d = self._edge_distance
            while d < (length - self._edge_distance):
                # Point on edge
                xs = ch[i].x() + d / length * dx
                ys = ch[i].y() + d / length * dy

                # Shift point inwards and fill message
                fs = kdl_frame_stamped_from_XYZRPY(
                    x=xs - dy / length * self._edge_distance,
                    y=ys + dx / length * self._edge_distance,
                    z=center_frame.p.z() + z_max,
                    frame_id="/map")

                # It's nice to put an object on the middle of a long edge. In case of a cabinet, e.g., this might
                # prevent the robot from hitting the cabinet edges
                # print "Length: {}, edge score: {}".format(length, min(d, length-d))
                setattr(fs, 'edge_score', min(d, length - d))

                points += [fs]

                self.marker_array.markers.append(
                    self.create_marker(fs.frame.p.x(), fs.frame.p.y(),
                                       fs.frame.p.z()))

                # ToDo: check if still within hull???
                d += self._spacing

        self.marker_pub.publish(self.marker_array)

        return points
예제 #2
0
    def determine_points_of_interest(self, center_frame, z_max, convex_hull):
        """
        Determine candidates for place poses
        :param center_frame: kdl.Frame, center of the Entity to place on top of
        :param z_max: float, height of the entity to place on, w.r.t. the entity
        :param convex_hull: [kdl.Vector], convex hull of the entity
        :return: [FrameStamped] of candidates for placing
        """

        points = []

        if len(convex_hull) == 0:
            rospy.logerr('determine_points_of_interest: Empty convex hull')
            return []

        # Convert convex hull to map frame
        ch = offsetConvexHull(convex_hull, center_frame)

        # Loop over hulls
        self.marker_array.markers = []

        for i in xrange(len(ch)):
            j = (i + 1) % len(ch)

            dx = ch[j].x() - ch[i].x()
            dy = ch[j].y() - ch[i].y()

            length = kdl.diff(ch[j], ch[i]).Norm()

            d = self._edge_distance
            while d < (length - self._edge_distance):
                # Point on edge
                xs = ch[i].x() + d / length * dx
                ys = ch[i].y() + d / length * dy

                # Shift point inwards and fill message
                fs = kdl_frame_stamped_from_XYZRPY(x=xs - dy / length * self._edge_distance,
                                                   y=ys + dx / length * self._edge_distance,
                                                   z=center_frame.p.z() + z_max,
                                                   frame_id="/map")

                # It's nice to put an object on the middle of a long edge. In case of a cabinet, e.g., this might
                # prevent the robot from hitting the cabinet edges
                # print "Length: {}, edge score: {}".format(length, min(d, length-d))
                setattr(fs, 'edge_score', min(d, length-d))

                points += [fs]

                self.marker_array.markers.append(self.create_marker(fs.frame.p.x(), fs.frame.p.y(), fs.frame.p.z()))

                # ToDo: check if still within hull???
                d += self._spacing

        self.marker_pub.publish(self.marker_array)

        return points
예제 #3
0
    def get_grasp_pose(self, entity, arm):
        """ Computes the most suitable grasp pose to grasp the specified entity with the specified arm

        :param entity: entity to grasp
        :param arm: arm to use
        :return FrameStamped with grasp pose in map frame
        """
        candidates = []
        starttime = rospy.Time.now()
        # ToDo: divide into functions
        ''' Create a grasp vector for every side of the convex hull '''
        ''' First: check if container actually has a convex hull '''
        if entity.shape is None:
            rospy.logerr(
                "Entity {0} has no shape. We need to do something with this".
                format(entity.id))
            return False
        ''' Second: turn points into KDL objects and offset chull to get it in map frame '''
        center_pose = entity._pose  #TODO: Access to private member

        # chull_obj = [point_msg_to_kdl_vector(p) for p in entity.shape._convex_hull]   # convex hull in object frame
        # chull = offsetConvexHull(chull_obj, center_pose)    # convex hull in map frame
        chull = offsetConvexHull(entity.shape.convex_hull,
                                 center_pose)  # convex hull in map frame
        # import ipdb;ipdb.set_trace()
        ''' Get robot pose as a kdl frame (is required later on) '''
        robot_frame = self._robot.base.get_location()
        robot_frame_inv = robot_frame.frame.Inverse()
        ''' Loop over lines of chull '''
        for i in xrange(len(chull)):
            j = (i + 1) % len(chull)

            dx = chull[j].x() - chull[i].x()
            dy = chull[j].y() - chull[i].y()
            if math.hypot(dx, dy) < 0.0001:
                # Points are probably too close to get a decent angle estimate
                continue

            yaw = math.atan2(dx, -dy)
            ''' Filter on object width '''
            # Normalize
            n = math.hypot(dx, dy)  # Norm
            vx = dx / n
            vy = dy / n

            # Loop over all points
            wmin = 0
            wmax = 0
            for c in chull:

                # Compute vector
                tx = c.x() - chull[i].x()
                ty = c.y() - chull[i].y()

                # Perform projection
                # ToDo: continue when offset in x direction is too large
                offset = tx * vx + ty * vy

                # Update min and max
                wmin = min(wmin, offset)
                wmax = max(wmax, offset)

            width = wmax - wmin

            if width > self._width_treshold:
                continue
            else:
                score = 1.0
            ''' Compute candidate vector '''
            # Middle between point i and point j
            # x = 0.5 * ( chull[i].x() + chull[j].x() )
            # y = 0.5 * ( chull[i].y() + chull[j].y() )
            # cvec = kdl.Frame(kdl.Rotation.RPY(0, 0, yaw),
            #                kdl.Vector(x, y, entity.pose.position.z))

            # Divide width in two
            # * kdl.Vector(0.5 * (wmin+wmax, 0, 0)
            tvec = FrameStamped(kdl.Frame(
                kdl.Rotation.RPY(0, 0, yaw),
                kdl.Vector(chull[i].x(), chull[i].y(),
                           entity.pose.frame.p.z())),
                                frame_id="/map")  # Helper frame

            cvec = FrameStamped(kdl.Frame(
                kdl.Rotation.RPY(0, 0, yaw),
                tvec.frame * kdl.Vector(0, -0.5 * (wmin + wmax), 0)),
                                frame_id="/map")
            ''' Optimize over yaw offset w.r.t. robot '''
            # robot_in_map * entity_in_robot = entity_in_map
            # --> entity_in_robot = robot_in_map^-1 * entity_in_map
            gvec = robot_frame_inv * cvec.frame
            (R, P, Y) = gvec.M.GetRPY()

            rscore = 1.0 - (abs(Y) / math.pi)
            score = min(score, rscore)

            candidates.append({'vector': cvec, 'score': score})

        candidates = sorted(candidates,
                            key=lambda candidate: candidate['score'],
                            reverse=True)

        self.visualize(candidates)
        rospy.loginfo("GPD took %f seconds" %
                      (rospy.Time.now() - starttime).to_sec())

        return candidates[0]['vector']
    def get_grasp_pose(self, entity, arm):
        """ Computes the most suitable grasp pose to grasp the specified entity with the specified arm

        :param entity: entity to grasp
        :param arm: arm to use
        :return FrameStamped with grasp pose in map frame
        """
        candidates = []
        starttime = rospy.Time.now()
        # ToDo: divide into functions
        ''' Create a grasp vector for every side of the convex hull '''
        ''' First: check if container actually has a convex hull '''
        if entity.shape is None:
            print 'Error, entity {0} has no shape. We need to do something with this'.format(entity.id)
            return False

        ''' Second: turn points into KDL objects and offset chull to get it in map frame '''
        center_pose = entity._pose  #TODO: Access to private member

        # chull_obj = [point_msg_to_kdl_vector(p) for p in entity.shape._convex_hull]   # convex hull in object frame
        # chull = offsetConvexHull(chull_obj, center_pose)    # convex hull in map frame
        chull = offsetConvexHull(entity.shape.convex_hull, center_pose)  # convex hull in map frame
        # import ipdb;ipdb.set_trace()

        ''' Get robot pose as a kdl frame (is required later on) '''
        robot_frame = self._robot.base.get_location()
        robot_frame_inv = robot_frame.frame.Inverse()

        ''' Loop over lines of chull '''
        for i in xrange(len(chull)):
            j = (i+1)%len(chull)

            dx = chull[j].x() - chull[i].x()
            dy = chull[j].y() - chull[i].y()
            if math.hypot(dx, dy) < 0.0001:
                # Points are probably too close to get a decent angle estimate
                continue

            yaw = math.atan2(dx, -dy)

            ''' Filter on object width '''
            # Normalize
            n = math.hypot(dx, dy) # Norm
            vx = dx/n
            vy = dy/n

            # Loop over all points
            wmin = 0
            wmax = 0
            for c in chull:

                # Compute vector
                tx = c.x() - chull[i].x()
                ty = c.y() - chull[i].y()

                # Perform projection
                # ToDo: continue when offset in x direction is too large
                offset = tx * vx + ty * vy

                # Update min and max
                wmin = min(wmin, offset)
                wmax = max(wmax, offset)

            width = wmax - wmin

            if width > self._width_treshold:
                continue
            else:
                score = 1.0

            ''' Compute candidate vector '''
            # Middle between point i and point j
            # x = 0.5 * ( chull[i].x() + chull[j].x() )
            # y = 0.5 * ( chull[i].y() + chull[j].y() )
            # cvec = kdl.Frame(kdl.Rotation.RPY(0, 0, yaw),
            #                kdl.Vector(x, y, entity.pose.position.z))

            # Divide width in two
             # * kdl.Vector(0.5 * (wmin+wmax, 0, 0)
            tvec = FrameStamped(kdl.Frame(kdl.Rotation.RPY(0, 0, yaw),
                                kdl.Vector(chull[i].x(), chull[i].y(), entity.pose.frame.p.z())),
                                frame_id="/map") # Helper frame

            cvec = FrameStamped(kdl.Frame(kdl.Rotation.RPY(0, 0, yaw),
                                tvec.frame * kdl.Vector(0, -0.5 * (wmin+wmax), 0)),
                                frame_id="/map")

            ''' Optimize over yaw offset w.r.t. robot '''
            # robot_in_map * entity_in_robot = entity_in_map
            # --> entity_in_robot = robot_in_map^-1 * entity_in_map
            gvec = robot_frame_inv * cvec.frame
            (R, P, Y) = gvec.M.GetRPY()

            rscore = 1.0 - (abs(Y)/math.pi)
            score = min(score, rscore)

            candidates.append({'vector': cvec, 'score': score})

        candidates = sorted(candidates, key=lambda candidate: candidate['score'], reverse=True)

        self.visualize(candidates)
        print "GPD took %f seconds"%(rospy.Time.now() - starttime).to_sec()

        return candidates[0]['vector']