Ejemplo n.º 1
0
def get_obj_frames(blue_obj, red_obj, base_tf, base2kinect_tf):
    base_pose = Pose()
    base_pose.position = base_tf.transform.translation
    base_pose.orientation = base_tf.transform.rotation
    base_kdl = tf_conversions.fromMsg(base_pose)
    base_unitX = base_kdl.M.UnitX()
    base_unitY = base_kdl.M.UnitY()
    base_unitZ = base_kdl.M.UnitZ()

    ### Frame for Blue Object

    blue_center_kinect = PointStamped()
    blue_center_kinect.header = base2kinect_tf.header
    blue_center_kinect.point = blue_obj.bb_center
    blue_center = tf2_geometry_msgs.do_transform_point(blue_center_kinect,
                                                       base2kinect_tf)

    blue_pose = Pose()
    blue_pose.position = blue_center.point
    blue_pose.position.z = blue_pose.position.z - blue_obj.bb_dims.z / 2
    blue_pose.orientation = base_tf.transform.rotation
    blue_kdl = tf_conversions.fromMsg(blue_pose)
    blue_pos = blue_kdl.p
    blue_rot = PyKDL.Rotation(-base_unitX, -base_unitY, base_unitZ)
    blue_kdl = PyKDL.Frame(blue_rot, blue_pos)
    blue_pose = tf_conversions.toMsg(blue_kdl)

    blue_frame = TransformStamped()
    blue_frame.header.frame_id = base_frame
    blue_frame.header.stamp = rospy.Time.now()
    blue_frame.child_frame_id = 'blue_frame'
    blue_frame.transform.translation = blue_pose.position
    blue_frame.transform.rotation = blue_pose.orientation

    ### Frame for Red Object

    red_center_kinect = PointStamped()
    red_center_kinect.header = base2kinect_tf.header
    red_center_kinect.point = red_obj.bb_center
    red_center = tf2_geometry_msgs.do_transform_point(red_center_kinect,
                                                      base2kinect_tf)

    red_pose = Pose()
    red_pose.position = red_center.point
    red_pose.position.z = red_pose.position.z - red_obj.bb_dims.z / 2
    red_pose.orientation = base_tf.transform.rotation
    red_kdl = tf_conversions.fromMsg(red_pose)
    red_pos = red_kdl.p
    red_rot = PyKDL.Rotation(-base_unitX, -base_unitY, base_unitZ)
    red_kdl = PyKDL.Frame(red_rot, red_pos)
    red_pose = tf_conversions.toMsg(red_kdl)

    red_frame = TransformStamped()
    red_frame.header.frame_id = base_frame
    red_frame.header.stamp = rospy.Time.now()
    red_frame.child_frame_id = 'red_frame'
    red_frame.transform.translation = red_pose.position
    red_frame.transform.rotation = red_pose.orientation

    return blue_frame, red_frame
Ejemplo n.º 2
0
    def visualize(self, path, start, goal):
        '''
        Publish various visualization messages.
        '''

        #rospy.loginfo('visualizing start')
        s = PointStamped()
        s.header = Utils.make_header("/map")
        s.point.x = start[0]
        s.point.y = start[1]
        s.point.z = 0
        self.start_pub.publish(s)

        #rospy.loginfo('visualizing goal')
        g = PointStamped()
        g.header = Utils.make_header("/map")
        g.point.x = goal[0]
        g.point.y = goal[1]
        g.point.z = 0
        self.goal_pub.publish(g)

        #rospy.loginfo('visualizing path')
        p = Path()
        p.header = Utils.make_header("/map")
        for point in path:
            pose = PoseStamped()
            pose.header = Utils.make_header("/map")
            pose.pose.position.x = point[0]
            pose.pose.position.y = point[1]
            pose.pose.orientation = Utils.angle_to_quaternion(0)
            p.poses.append(pose)
        self.path_pub.publish(p)
Ejemplo n.º 3
0
    def filter_scan(self, scan):
        # A callback to filter the input scan and output a new filtered scan

        # If there is no polygon initialized then we will just spit out the scan received
        if self._poly_init:
            self.new_scan = scan
            self.new_scan.range_min = 0.1
            self.new_scan.range_max = 30.0
            self.new_scan.ranges = list(scan.ranges)

            self._cloud = self._laser_projector.projectLaser(scan)
            # gen = pc2.read_points(self._cloud, skip_nans = True, field_names=("x", "y", "z"))
            # self.xyz_generator = gen

            laser_point = PointStamped()
            laser_point.header = scan.header

            # If the polygon and the points being read are in different frames we'll transform the polygon
            if self._current_poly.header.frame_id != laser_point.header.frame_id:
                # print("Tranforming the polygon")
                # tf_listener.waitForTransform(point.header.frame_id, polygon.header.frame_id, rospy.Time(), rospy.Duration(0.10))
                i = 0
                temp_poly_point = PointStamped()
                for v in self._current_poly.polygon.points:
                    temp_poly_point.header = self._current_poly.header
                    temp_poly_point.point = v  # self._current_poly.polygon.points[i]
                    temp_poly_point = tf_listener.transformPoint(
                        laser_point.header.frame_id, temp_poly_point)
                    self._current_poly.polygon.points[
                        i] = temp_poly_point.point
                    i = i + 1
                self._current_poly.header.frame_id = laser_point.header.frame_id

            i = 0
            for p in pc2.read_points(self._cloud,
                                     field_names=("x", "y", "z"),
                                     skip_nans=True):
                # print " x : %f  y: %f  z: %f" %(p[0],p[1],p[2])
                laser_point.point.x = p[0]
                laser_point.point.y = p[1]
                laser_point.point.z = p[2]

                # This for loop does not take into account inf points in the original scan. Need to count those too
                while scan.ranges[i] == numpy.inf:
                    i = i + 1

                # Check to see in the laser x and y are within the polygon
                in_poly = point_in_poly(self._current_poly, laser_point)

                # If the point is within the polygon we should throw it out (give it a value of inf)
                if in_poly: self.new_scan.ranges[i] = numpy.inf
                i = i + 1

        else:
            self.new_scan = scan
        self._send_msg = True
Ejemplo n.º 4
0
    def face_callback(self, msg):
        if not self.found_face:
            face = PointStamped()
            face.header = msg.people[0].header
            face.point = msg.people[0].pos
            self.face_parent_frame = msg.people[0].header.frame_id
            # self.listener.waitForTransform(face.header.frame_id, 'base_link', rospy.Time.now(), rospy.Duration(5.0))
            d = sqrt(face.point.x * face.point.x + face.point.y * face.point.y)
       
            # Change the axes from camera-type axes 
            self.quaternion = quaternion_from_euler(pi/2, pi/2, 0.0)
            pose = PoseStamped()
            pose.header = face.header
            pose.pose.position = face.point
            pose.pose.orientation.x = self.quaternion[0]
            pose.pose.orientation.y = self.quaternion[1]
            pose.pose.orientation.z = self.quaternion[2]
            pose.pose.orientation.w = self.quaternion[3]

            # Transform to base_link
            # pose = self.listener.transformPose('base_link', pose)
            face = pose.pose.position
            self.quaternion = (pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w)


            self.origin = (face.x, face.y, face.z)

            # Flip the bit
            self.found_face = True
Ejemplo n.º 5
0
def euroc_object_to_odom_combined(euroc_object):
    header = Header(0, rospy.Time(0), euroc_object.frame_id)

    # Convert the centroid
    camera_point = PointStamped()
    camera_point.header = header
    camera_point.point = euroc_object.c_centroid
    odom_point = manipulation.transform_to(camera_point, '/odom_combined')
    euroc_object.c_centroid = odom_point.point
    euroc_object.frame_id = '/odom_combined'

    # Convert the cuboid
    if euroc_object.c_cuboid_success:
        cuboid_posestamped = PoseStamped(
            header, euroc_object.object.primitive_poses[0])
        cuboid_posestamped = manipulation.transform_to(cuboid_posestamped,
                                                       '/odom_combined')
        euroc_object.object.primitive_poses[0] = cuboid_posestamped.pose
        euroc_object.object.header.frame_id = '/odom_combined'

    # Convert the mpe_object
    if euroc_object.mpe_success:
        for i in range(0, len(euroc_object.mpe_object.primitive_poses)):
            posestamped = PoseStamped(
                header, euroc_object.mpe_object.primitive_poses[i])
            posestamped = manipulation.transform_to(posestamped,
                                                    '/odom_combined')
            euroc_object.mpe_object.primitive_poses[i] = posestamped.pose
            euroc_object.mpe_object.header.frame_id = '/odom_combined'
Ejemplo n.º 6
0
 def _obj_detection_cb(self, msg):
     try:
         self._client.wait_for_service(timeout=_CONNECTION_TIMEOUT)
     except Exception as e:
         rospy.logerr(e)
         return
     # Get the position of the target object from a camera image
     req = InversePerspectiveTransformRequest()
     req.target_frame = _SENSOR_FRAME_ID
     req.depth_registration = True
     for detection in msg.detections:
         target_point = Point2DStamped()
         target_point.point.x = int(detection.x)
         target_point.point.y = int(detection.y)
         req.points_2D.append(target_point)
     try:
         res = self._client(req)
         rospy.loginfo(res)
         if self._visualize:
             for point in res.points_3D:
                 point_3d = PointStamped()
                 point_3d.header = point.header
                 point_3d.point =  point.point
                 self._result_pub.publish(point_3d)
                 rospy.sleep(.01)
     except rospy.ServiceException as e:
         rospy.logerr(e)
         return
     if len(res.points_3D) < 1:
         rospy.logerr('There is no detected point')
         return
Ejemplo n.º 7
0
 def point_stamped_cb(self, msg):
     if not self.is_camera_arrived:
         return
     pose_stamped = PoseStamped()
     pose_stamped.pose.position.x = msg.point.x
     pose_stamped.pose.position.y = msg.point.y
     pose_stamped.pose.position.z = msg.point.z
     try:
         transform = self.tf_buffer.lookup_transform(
             self.frame_id, msg.header.frame_id, rospy.Time(0),
             rospy.Duration(1.0))
     except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
             tf2_ros.ExtrapolationException) as e:
         rospy.logerr('lookup_transform failed: {}'.format(e))
         return
     position_transformed = tf2_geometry_msgs.do_transform_pose(
         pose_stamped, transform).pose.position
     pub_point = (position_transformed.x, position_transformed.y,
                  position_transformed.z)
     u, v = self.cameramodels.project3dToPixel(pub_point)
     rospy.logdebug("u, v : {}, {}".format(u, v))
     pub_msg = PointStamped()
     pub_msg.header = msg.header
     pub_msg.header.frame_id = self.frame_id
     pub_msg.point.x = u
     pub_msg.point.y = v
     pub_msg.point.z = 0
     self.pub.publish(pub_msg)
Ejemplo n.º 8
0
 def publish(self, imu_msg, nav_msg):
     if self.tfBuffer.can_transform(nav_msg.header.frame_id, "odom",
                                    rospy.Time.now(),
                                    rospy.Duration.from_sec(0.5)):
         if self.tfBuffer.can_transform(imu_msg.header.frame_id,
                                        "base_link", rospy.Time.now(),
                                        rospy.Duration.from_sec(0.5)):
             unfiltered_msg = Odometry()
             unfiltered_msg.header.frame_id = "odom"
             unfiltered_msg.child_frame_id = "base_link"
             imu_q = QuaternionStamped()
             imu_q.quaternion = imu_msg.orientation
             imu_q.header = imu_msg.header
             unfiltered_msg.pose.pose.orientation = self.tfListener.transformQuaternion(
                 "base_link",
                 imu_q).quaternion  #TF2 FOR KINETIC JUST AIN'T WORKING
             nav_p = PointStamped()
             nav_p.point = nav_msg.pose.pose.position
             nav_p.header = nav_msg.header
             unfiltered_msg.pose.pose.position = self.tfListener.transformPoint(
                 "odom", nav_p).point
             self.pub.publish(unfiltered_msg)
         else:
             rospy.logwarn("{} to base_link tf unavailable!".format(
                 imu_msg.header.frame_id))
     else:
         rospy.logwarn("{} to odom tf unavailable!".format(
             nav_msg.header.frame_id))
Ejemplo n.º 9
0
    def move(self, group, target="", pose=None):

        assert target != "" or pose is not None

        # group.set_workspace([minX, minY, minZ, maxX, maxY, maxZ])

        if pose is not None:
            group.set_pose_target(pose)
            pt = PointStamped()
            pt.header = pose.header
            pt.point = pose.pose.position
            self.look_at_cb(pt)
        else:

            group.set_named_target(target)



        group.allow_looking(False)
        group.allow_replanning(False)
        group.set_num_planning_attempts(1)

        cnt = 3

        while cnt > 0:

            if group.go(wait=True):
                return True

            rospy.sleep(1)

        return False
Ejemplo n.º 10
0
def to_msg_vector(vector):
    msg = PointStamped()
    msg.header = vector.header
    msg.point.x = vector[0]
    msg.point.y = vector[1]
    msg.point.z = vector[2]
    return msg
def cb(msg):
    out_msg = PointStamped()
    out_msg.header = msg.header
    out_msg.point.x = pt_x
    out_msg.point.y = pt_y
    out_msg.point.z = pt_z
    pub.publish(out_msg)
    def _obstacle_callback(self, msg):
        try:
            transform = self.tf_buffer.lookup_transform(
                self.map_frame, msg.header.frame_id, msg.header.stamp,
                rospy.Duration(1.0))
        except (tf2.ConnectivityException, tf2.LookupException,
                tf2.ExtrapolationException) as e:
            rospy.logwarn(e)
            return

        for o in msg.obstacles:
            point = PointStamped()
            point.header = msg.header
            point.point = o.pose.pose.pose.position
            # Transfrom robot to map frame
            point = tf2_geometry_msgs.do_transform_point(point, transform)
            # Update robots that are close together
            cleaned_robots = []
            for old_robot in self.robots:
                # Check if there is another robot in memory close to it
                if np.linalg.norm(
                        ros_numpy.numpify(point.point) - ros_numpy.numpify(
                            old_robot.point)) > self.robot_merge_distance:
                    cleaned_robots.append(old_robot)
            # Update our robot list with a new list that does not contain the duplicates
            self.robots = cleaned_robots
            # Append our new robots
            self.robots.append(point)
Ejemplo n.º 13
0
    def poses_callback(self, pose_msg, identity):
        """Callback for poses of other agents"""
        rospy.loginfo_once('Got other poses callback')
        if self.pose is None:
            return

        target_frame = f'drone_{self.my_id}/base_link'
        source_frame = 'world'

        try:
            transform = self.buffer.lookup_transform(target_frame=target_frame,
                                                     source_frame=source_frame,
                                                     time=rospy.Time(0))
        except Exception as e:
            print(e)
            return

        point = PointStamped()
        point.header = pose_msg.header
        point.point.x = pose_msg.pose.position.x
        point.point.y = pose_msg.pose.position.y
        point.point.z = pose_msg.pose.position.z

        point_tf = tf2_geometry_msgs.do_transform_point(point, transform)
        p = point_tf.point

        self.poses[identity] = np.array([p.x, p.y, p.z])
Ejemplo n.º 14
0
    def _create_initial_frontier_goal(self):
        frontier_gh = ExploreTaskGoal()
        curr_ts = rospy.Time.now()

        curr_header = Header()
        curr_header.frame_id = "map"
        curr_header.seq = 1
        curr_header.stamp = curr_ts
        
        # Point Stamped
        pt_s = PointStamped()
        pt_s.header = curr_header
        pt_s.point.x = 1.0
        # Polygon Stamped
        # Note that polygon is not defined so it's an unbounded exploration
        pg_s = PolygonStamped()
        pg_s.header = curr_header
        vertices = [(5.0, 5.0), (-5.0, 5.0), (-5.0, -5.0), (5.0, -5.0)]
        for vertex in vertices:
            pg_s.polygon.points.append(Point(x=vertex[0], y=vertex[1]))

        #frontier_gh.header = curr_header
        #frontier_gh.goal_id.stamp = curr_ts
        #frontier_gh.goal_id.id = 'initial_frontier_marco'
        frontier_gh.explore_boundary = pg_s
        frontier_gh.explore_center = pt_s
        
        rospy.loginfo(frontier_gh)  
        return frontier_gh  
Ejemplo n.º 15
0
def main():

    rospy.init_node('kobuki_exploration')

    area_to_explore = PolygonStamped()
    center_point = PointStamped()

    now = rospy.Time.now()

    area_to_explore.header.stamp = now
    area_to_explore.header.frame_id = "map"
    points = [
        Point32(-2.65, -2.56, 0.0),
        Point32(5.41, -2.7, 0.0),
        Point32(5.57, 4.44, 0.0),
        Point32(-2.75, 4.37, 0.0)
    ]

    area_to_explore.polygon = Polygon(points)

    center_point.header = area_to_explore.header
    center_point.point.x = 1.83
    center_point.point.y = 1.57
    center_point.point.z = 0.0

    brain = PR2RobotBrain(area_to_explore, center_point)
    brain.getReady()
    brain.loop()
Ejemplo n.º 16
0
	def runFilter(self):

		r = rospy.Rate(self.filterRate) 
		while not rospy.is_shutdown():
			if self.flag_reset:
				self.kf.reset(self.getReset())
				self.flag_reset = 0

			self.kf.iteration(self.getMeasures())

			self.pose_pub_.publish(self.kf.getState())
			
			person_point = PointStamped()
			person_point.header = self.kf.getState().header
			person_point.header.stamp = rospy.Time.now()
			person_point.point = self.kf.getState().pose.position
			self.point_pub_.publish(person_point)

			self.tf_person.sendTransform((self.kf.getState().pose.position.x,self.kf.getState().pose.position.y,0),
                     		(self.kf.getState().pose.orientation.x,self.kf.getState().pose.orientation.y,self.kf.getState().pose.orientation.z,self.kf.getState().pose.orientation.w),
                     		rospy.Time.now(),
                     		"person_link",
                     		self.kf.getState().header.frame_id)

			r.sleep()
Ejemplo n.º 17
0
    def transform_object_pose_to_robot_rf(self, object_pose):
        #kinect camera axis not the same as the robot axis so we could have
        #to perform the necessary transforms first to get both axes aligned
        #and then to transform camera rf to robot's rf
        #goal_pose is the final pose of the marker wrt the robot's rf

        transform = TransformStamped()
        transform.child_frame_id = 'camera_rgb_optical_frame'
        transform.transform.translation.x = 0.188441686871
        transform.transform.translation.y = -0.0448522594062
        transform.transform.translation.z = 0.80968

        transform.transform.rotation.x = -0.701957989835
        transform.transform.rotation.y = 0.701150861488
        transform.transform.rotation.z = -0.0883868019887
        transform.transform.rotation.w = 0.0884885482708

        trans_pose = tf2_geometry_msgs.do_transform_pose(
            object_pose, transform)
        #finetuning
        trans_pose.pose.position.x += 0.08
        trans_pose.pose.position.z -= 0.07
        trans_pose.pose.orientation.x = 1.0
        trans_pose.pose.orientation.y = 0
        trans_pose.pose.orientation.z = 0
        trans_pose.pose.orientation.w = 0
        self.transposearray_pub.publish(trans_pose)
        point = PointStamped()
        point.header = object_pose.header
        point.header.frame_id = "base"
        point.point = trans_pose.pose.position
        self.testtranspoint_pub.publish(point)
        return trans_pose
Ejemplo n.º 18
0
 def ar_cb(self, markers):
     for m in markers.markers:
         try:
             self.listener.waitForTransform(self.target_frame,
                                            '/RotMarker%02d' % m.id,
                                            m.header.stamp,
                                            rospy.Duration(1.0))
             self.listener.waitForTransform("/%s/ground" % self.name,
                                            m.header.frame_id,
                                            m.header.stamp,
                                            rospy.Duration(1.0))
             ((x, y, z),
              rot) = self.listener.lookupTransform(self.target_frame,
                                                   '/RotMarker%02d' % m.id,
                                                   m.header.stamp)
             L = vstack([x, y])
             # Transform the measurement into the rover reference frame
             m_pose = PointStamped()
             m_pose.header = m.header
             m_pose.point = m.pose.pose.position
             m_pose = self.listener.transformPoint("/%s/ground" % self.name,
                                                   m_pose)
             # Call the filter with the observed landmark
             Z = vstack([m_pose.point.x, m_pose.point.y])
             self.filter.update_ar(Z, L, self.ar_precision)
             self.filter.publish(self.pose_pub, self.target_frame,
                                 m.header.stamp)
             #self.kf.broadcast(self.broadcaster, self.target_frame, m.header.stamp)
         except:
             continue
 def ar_cb(self, markers):
     for m in markers.markers:
         try:
             self.listener.waitForTransform(self.target_frame,
                                            '/%s/ground' % self.name,
                                            m.header.stamp,
                                            rospy.Duration(1.0))
             self.listener.waitForTransform("/%s/ground" % self.name,
                                            m.header.frame_id,
                                            m.header.stamp,
                                            rospy.Duration(1.0))
             ((x, y, z),
              rot) = self.listener.lookupTransform(self.target_frame,
                                                   '/%s/ground' % self.name,
                                                   m.header.stamp)
             euler = euler_from_quaternion(rot)
             X = vstack([x, y, euler[2]])
             m_pose = PointStamped()
             m_pose.header = m.header
             m_pose.point = m.pose.pose.position
             m_pose = self.listener.transformPoint("/%s/ground" % self.name,
                                                   m_pose)
             Z = vstack([m_pose.point.x, m_pose.point.y])
             self.mapper.update_ar(Z, X, m.id, self.ar_precision)
         except:
             continue
     self.mapper.publish(self.target_frame, markers.header.stamp)
Ejemplo n.º 20
0
def estimation(msg):
    uwbdis = msg.distance

    point = PointStamped()
    point.header = Header()
    point.header.frame_id = "vicon"
    point.header.stamp = rospy.Time.now()
    point.point = msg.position

 
    listener.waitForTransform("ned", "vicon", rospy.Time.now(), rospy.Duration(0.05))
    point_tf = listener.transformPoint("ned", point)
    uwbanchor = array([point_tf.point.x, point_tf.point.y, point_tf.point.z])

    imp = array(msg.point)
    global q,a,r,xe
    
    #xe, _ = uwb.locate(xe, Q, 1.0/100, uwbdis, uwbanchor, q, a, r)
    xe, _ = vision.locate(xe, Q, 1.0/100, imp, visionanchor, q, a, r)
 
    
    x_msg = state()
    x_msg.state.header = Header()
    x_msg.state.header.frame_id = "ned"
    x_msg.state.header.stamp = rospy.Time.now()
    x_msg.state.pose.position = Point(xe[0], xe[1], xe[2])
    x_msg.state.pose.orientation = Quaternion(xe[3], xe[4], xe[5], xe[6])
    x_msg.velocity = Vector3(xe[7], xe[8], xe[9])
    x_msg.bias = xe[10]
    pub.publish(x_msg)
Ejemplo n.º 21
0
def euroc_object_to_odom_combined(euroc_object):
    header = Header(0, rospy.Time(0), euroc_object.frame_id)

    # Convert the centroid
    camera_point = PointStamped()
    camera_point.header = header
    camera_point.point = euroc_object.c_centroid
    odom_point = manipulation.transform_to(camera_point, '/odom_combined')
    euroc_object.c_centroid = odom_point.point
    euroc_object.frame_id = '/odom_combined'

    # Convert the cuboid
    if euroc_object.c_cuboid_success:
        cuboid_posestamped = PoseStamped(header, euroc_object.object.primitive_poses[0])
        cuboid_posestamped = manipulation.transform_to(cuboid_posestamped, '/odom_combined')
        euroc_object.object.primitive_poses[0] = cuboid_posestamped.pose
        euroc_object.object.header.frame_id = '/odom_combined'

    # Convert the mpe_object
    if euroc_object.mpe_success:
        for i in range(0, len(euroc_object.mpe_object.primitive_poses)):
            posestamped = PoseStamped(header, euroc_object.mpe_object.primitive_poses[i])
            posestamped = manipulation.transform_to(posestamped, '/odom_combined')
            euroc_object.mpe_object.primitive_poses[i] = posestamped.pose
            euroc_object.mpe_object.header.frame_id = '/odom_combined'
Ejemplo n.º 22
0
    def pose_cb(self, pose):
        m_pose = PointStamped()
        m_pose.header = pose.header
        m_pose.point = pose.pose.position
        m_pose = self.listener.transformPoint(self.reference_frame, m_pose)
        self.goal_x = m_pose.point.x
        self.goal_y = m_pose.point.y
        print "New goal: %.2f %.2f" % (self.goal_x, self.goal_y)

        marker = Marker()
        marker.header = pose.header
        marker.ns = "goal_marker"
        marker.id = 10
        marker.type = Marker.CYLINDER
        marker.action = Marker.ADD
        marker.pose = pose.pose
        marker.scale.x = 0.1
        marker.scale.y = 0.1
        marker.scale.z = 0.5
        marker.color.a = 1.0
        marker.color.r = 1.0
        marker.color.g = 1.0
        marker.color.b = 0.0
        marker.lifetime.secs = -1.0
        self.marker_pub.publish(marker)
 def getAveragePosByTime(self, dTime):
     if len(self.positions) == 0:
         return None, 0
     now = time.time()
     avgX, avgY, avgZ, num = 0, 0, 0, 0
     for point in self.positions[-1::-1]:  # reverse order
         if now - point.header.stamp.secs <= dTime:
             avgX += point.point.x
             avgY += point.point.y
             avgZ += point.point.z
             num += 1
         else:
             break  # Assume times are in non-decreasing order
     if num == 0:
         return None, num
     avgX /= num
     avgY /= num
     avgZ /= num
     pointMsg = PointStamped()
     pointMsg.header = self.positions[-1].header
     pointMsg.point = Point()
     pointMsg.point.x = avgX
     pointMsg.point.y = avgY
     pointMsg.point.z = avgZ
     return pointMsg, num
Ejemplo n.º 24
0
    def get_explored_region(self):
        """ Get all the explored cells on the grid map"""

        # TODO refactor the code, right now hardcoded to quickly solve the problem of non-common areas.
        # TODO more in general use matrices.
        def nearest_multiple(number, res=0.2):
            return np.round(res * np.floor(round(number / res, 2)), 1)

        p_in_sender = PointStamped()
        p_in_sender.header = self.header

        poses = set()
        self.tf_listener.waitForTransform("robot_0/map", self.header.frame_id,
                                          rospy.Time(), rospy.Duration(4.0))
        p_in_sender.header.stamp = rospy.Time()
        for x in range(self.grid.shape[1]):
            for y in range(self.grid.shape[0]):
                if self.is_free(x, y):
                    p = self.grid_to_pose((x, y))
                    p_in_sender.point.x = p[0]
                    p_in_sender.point.y = p[1]

                    p_in_common_ref_frame = self.tf_listener.transformPoint(
                        "robot_0/map", p_in_sender).point
                    poses.add((nearest_multiple(p_in_common_ref_frame.x),
                               nearest_multiple(p_in_common_ref_frame.y)))
        return poses
Ejemplo n.º 25
0
def main():
    rospy.init_node("hallucinated_lookat")
    wait_for_time()

    start = PoseStamped()
    start.header.frame_id = 'base_link'
                                                                               
    reader = ArTagReader(tf.TransformListener())
    rospy.sleep(0.5)
    sub = rospy.Subscriber("/ar_pose_marker", AlvarMarkers, callback=reader.callback, queue_size=10) # Subscribe to AR tag poses, use reader.callback
    
    while len(reader.markers) == 0: 
        rospy.sleep(0.1)

    head = robot_api.FullBodyLookAt(tf_listener=reader._tf_listener)
    sp = PointStamped()
    for marker in reader.markers:
        # TODO: get the pose to move to
        sp.header = marker.header
        sp.point = marker.pose.pose.position
        # print(sp)
        error = head.look_at(sp)
        if error:
            rospy.loginfo('Moved to marker {}'.format(marker.id))
            # return
        else:
            rospy.logwarn('Failed to move to marker {}'.format(marker.id))
    rospy.logerr('Failed to move to any markers!')
Ejemplo n.º 26
0
    def _transform_object(self, box_msg, obj_pose, target_frame):
        """ Transform object poses and box message to a target frame """
        try:
            common_time = self._tf_listener.getLatestCommonTime(
                target_frame, obj_pose.header.frame_id)
            obj_pose.header.stamp = common_time
            self._tf_listener.waitForTransform(target_frame,
                                               obj_pose.header.frame_id,
                                               obj_pose.header.stamp,
                                               rospy.Duration(1))
            # transform object pose
            old_header = obj_pose.header
            obj_pose = self._tf_listener.transformPose(target_frame, obj_pose)
            # box center is the object position as defined in bounding_box_wrapper.cpp
            box_msg.center = obj_pose.pose.position

            # transform box vertices
            for vertex in box_msg.vertices:
                stamped = PointStamped()
                stamped.header = old_header
                stamped.point = vertex
                transformed = self._tf_listener.transformPoint(
                    target_frame, stamped)
                vertex.x = transformed.point.x
                vertex.y = transformed.point.y
                vertex.z = transformed.point.z

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            rospy.logerr('Unable to transform %s -> %s' %
                         (obj_pose.header.frame_id, target_frame))

        return box_msg, obj_pose
Ejemplo n.º 27
0
 def _find_stair_base(self, cloud, distance, center, width):
     """ Try to find the base of the stairs really exactly """
     _, details = self.zarj.eyes.get_cloud_image_with_details(cloud)
     occ = 0
     pnt = None
     while pnt is None:
         if not np.isnan(details[distance][center - occ][0]):
             pnt = details[distance][center - occ]
             break
         if not np.isnan(details[distance][center + occ][0]):
             pnt = details[distance][center + occ]
             break
         occ += 1
         if occ >= width / 6:
             occ = 0
             distance -= 1
     if pnt is not None:
         point = PointStamped()
         point.header = cloud.header
         point.point.x = pnt[0]
         point.point.y = pnt[1]
         point.point.z = pnt[2]
         pnt = self.zarj.transform.tf_buffer.transform(
             point, self.zarj.walk.lfname)
         return pnt.point.x
     log("ERROR: Could not find stair base")
     return None
Ejemplo n.º 28
0
def to_msg_vector(vector):
    msg = PointStamped()
    msg.header = vector.header
    msg.point.x = vector[0]
    msg.point.y = vector[1]
    msg.point.z = vector[2]
    return msg
Ejemplo n.º 29
0
def get_rightwall_endpoint(laser_data):
    p1 = get_lidar_point_at_angle(laser_data, -90)
    p2 = get_lidar_point_at_angle(laser_data, -90 + 20)
    delta_y = p2[1] - p1[1]
    delta_x = p2[0] - p1[0]
    denominator = math.sqrt(delta_y**2 + delta_x**2)
    numerator_const_term = p2[0] * p1[1] - p2[1] * p1[0]

    wall_at_infinity = p1 + (p2 - p1) * 40
    rightwall = PolygonStamped()
    rightwall.header = Header(stamp=rospy.Time.now(), frame_id="laser")
    rightwall.polygon.points = [
        Point32(x=p1[0], y=p1[1]),
        Point32(x=wall_at_infinity[0], y=wall_at_infinity[1])
    ]
    pub_rightwall.publish(rightwall)

    angle = -90 + 20
    endpoint = get_lidar_point_at_angle(laser_data, angle)
    for i in range(20):
        angle += 5
        candid = get_lidar_point_at_angle(laser_data, angle)
        distance = abs(delta_y * candid[0] - delta_x * candid[1] +
                       numerator_const_term) / denominator
        if distance < 0.5:
            endpoint = candid
        else:
            break

    endpoint_msg = PointStamped()
    endpoint_msg.header = Header(stamp=rospy.Time.now(), frame_id="laser")
    endpoint_msg.point = Point(endpoint[0], endpoint[1], 0)
    pub_rightwall_endpoint.publish(endpoint_msg)

    return endpoint
Ejemplo n.º 30
0
    def face_callback(self, msg):
        if not self.found_face:
            face = PointStamped()
            face.header = msg.people[0].header
            face.point = msg.people[0].pos
            self.face_parent_frame = msg.people[0].header.frame_id
            # self.listener.waitForTransform(face.header.frame_id, 'base_link', rospy.Time.now(), rospy.Duration(5.0))
            d = sqrt(face.point.x * face.point.x + face.point.y * face.point.y)

            # Change the axes from camera-type axes
            self.quaternion = quaternion_from_euler(pi / 2, pi / 2, 0.0)
            pose = PoseStamped()
            pose.header = face.header
            pose.pose.position = face.point
            pose.pose.orientation.x = self.quaternion[0]
            pose.pose.orientation.y = self.quaternion[1]
            pose.pose.orientation.z = self.quaternion[2]
            pose.pose.orientation.w = self.quaternion[3]

            # Transform to base_link
            # pose = self.listener.transformPose('base_link', pose)
            face = pose.pose.position
            self.quaternion = (pose.pose.orientation.x,
                               pose.pose.orientation.y,
                               pose.pose.orientation.z,
                               pose.pose.orientation.w)

            self.origin = (face.x, face.y, face.z)

            # Flip the bit
            self.found_face = True
    def transformMap(self, odometry):

        odom_position = PointStamped()
        odom_position.header = odometry.header
        odom_position.point = odometry.pose.pose.position

        try:
            # Get transform
            self._listener.listener().waitForTransform(
                self._target_frame, odometry.header.frame_id,
                odometry.header.stamp, self._timeout)
            return self._listener.listener().transformPoint(
                self._target_frame, odom_position)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn(
                'Failed to get the transformation to target_frame\n   %s' %
                str(e))
            self._failed = True
            return None
        except Exception as e:
            Logger.logwarn(
                'Failed to get the transformation to target frame due to unknown error\n   %s'
                % str(e))
            self._failed = True
            return None
Ejemplo n.º 32
0
def calculate_goalpoint_distance_from_wall(laser_data, distance):
    p1 = get_lidar_point_at_angle(laser_data, -90)
    p2 = get_lidar_point_at_angle(laser_data, -80)
    p3 = get_lidar_point_at_angle(laser_data, -70)

    def calc_point(p1, p2, d):
        mp = (p1+p2)/2
        slopeinv = (p1[0]-p2[0])/(p1[1]-p2[1])
        dx = abs(np.sqrt((d**2)/(1+(1/slopeinv**2))))
        dy = abs(np.sqrt((d**2)/(1+(slopeinv**2))))
        rp = np.array([-1*(mp[0]-dx), (mp[1]-dy)])
        # print("GP Details: P1= "+str(p1), "P2="+str(p2), "MP= "+str(mp), "Dx= "+str(dx), "Dy= "+str(dy), "RP= "+str(rp))
        return rp

    gp1 = calc_point(p1, p2, distance)
    gp2 = calc_point(p2, p3, distance)

    # Publish for visualization

    center = PointStamped()
    center.header = Header(stamp=rospy.Time.now(), frame_id="laser")
    center.point = Point(gp1[0], gp1[1], 0)
    pub_point1.publish(center)
    center.point = Point(gp2[0], gp2[1], 0)
    pub_point2.publish(center)
    
    return (gp1 + gp2)/2
Ejemplo n.º 33
0
    def compute_center(self, point_array, header):
        mean_x = 0.0
        mean_y = 0.0
        mean_z = 0.0
        length = len(point_array)
        for p in point_array:
            mean_x += p.x
            mean_y += p.y
            mean_z += p.z
        mean_x /= length
        mean_y /= length
        mean_z /= length

        if -0.3 < mean_y < 0:  #(sqrt((mean_x**2) + (mean_y**2) + (mean_z**2))) > 0.8 and (sqrt((mean_x**2) + (mean_y**2) + (mean_z**2))) < 2.0:
            po = PointStamped()
            po.header = header
            po.point.x = mean_x
            po.point.y = mean_y
            po.point.z = mean_z

            p = PoseStamped()
            p.header = header
            p.pose.position.x = mean_x
            p.pose.position.y = mean_y
            p.pose.position.z = mean_z
            self.point_pub.publish(po)
            pose = self.transform_object_pose_to_robot_rf(p)
            return pose
Ejemplo n.º 34
0
 def timer_callback(self):
     self.get_logger().info('Timer callback: "%d"' % self.i)
     self.act_gen_admin_request(self.i)
     if self.i == 3:
         self.i = 0
     else:
         self.i += 1
     self.state_gen_admin_request()
     goal = PointStamped()
     goal.header = Header()
     goal.point = Point()
     goal.point.x = 0.1
     goal.point.y = 0.1
     goal.point.z = 0.1
     entt = self.get_entity("Suicide")
     if (entt == None):
         print("No entity found")
         return
     else:
         self.move_entity_to_goal(entt, goal)
     enn = self.get_enemy("Sniper")
     if (enn == None):
         print("No ennemy found")
         return
     else:
         self.check_line_of_sight_request(entt, enn)
     self.get_all_possible_ways_request(entt, goal.point)
Ejemplo n.º 35
0
def main():
  
    rospy.init_node('pr2_state_machine')
    brain = PR2RobotBrain()
    brain.getReady()
    
    area_to_explore = PolygonStamped()
    center_point = PointStamped()
    
    now = rospy.Time.now()
    
    area_to_explore.header.stamp = now
    area_to_explore.header.frame_id = "map"
    points = [Point32(-1.65, -1.56, 0.0),
              Point32(5.41, -1.7, 0.0),
              Point32(5.57, 4.44, 0.0),
              Point32(-1.75, 4.37, 0.0)]
              
    area_to_explore.polygon = Polygon(points)
    
    center_point.header = area_to_explore.header
    center_point.point.x = 1.83
    center_point.point.y = 1.57
    center_point.point.z = 0.0
    
    brain = PR2RobotBrain(area_to_explore, center_point)
    brain.loop()
Ejemplo n.º 36
0
    def runFilter(self):

        r = rospy.Rate(self.filterRate)
        while not rospy.is_shutdown():
            if self.flag_reset:
                self.kf.reset(self.getReset())
                self.flag_reset = 0

            self.kf.iteration(self.getMeasures())

            self.pose_pub_.publish(self.kf.getState())

            person_point = PointStamped()
            person_point.header = self.kf.getState().header
            person_point.header.stamp = rospy.Time.now()
            person_point.point = self.kf.getState().pose.position
            self.point_pub_.publish(person_point)

            self.tf_person.sendTransform(
                (self.kf.getState().pose.position.x,
                 self.kf.getState().pose.position.y, 0),
                (self.kf.getState().pose.orientation.x,
                 self.kf.getState().pose.orientation.y,
                 self.kf.getState().pose.orientation.z,
                 self.kf.getState().pose.orientation.w), rospy.Time.now(),
                "person_link",
                self.kf.getState().header.frame_id)

            r.sleep()
Ejemplo n.º 37
0
    def pose_cb(self, pose):
        m_pose = PointStamped()
        m_pose.header = pose.header
        m_pose.point = pose.pose.position
        m_pose = self.listener.transformPoint(self.reference_frame,m_pose)
        self.goal_x = m_pose.point.x
        self.goal_y = m_pose.point.y
        print "New goal: %.2f %.2f" % (self.goal_x, self.goal_y)

        marker = Marker()
        marker.header = pose.header
        marker.ns = "goal_marker"
        marker.id = 10
        marker.type = Marker.CYLINDER
        marker.action = Marker.ADD
        marker.pose = pose.pose
        marker.scale.x = 0.1
        marker.scale.y = 0.1
        marker.scale.z = 0.5;
        marker.color.a = 1.0;
        marker.color.r = 1.0;
        marker.color.g = 1.0;
        marker.color.b = 0.0;
        marker.lifetime.secs=-1.0
        self.marker_pub.publish(marker)
 def point_cb(self, msg):
     #self.point = msg
     cloud = read_points_np(msg)
     point = PointStamped()
     point.header = msg.header
     if cloud.shape[1] == 0: return
     point.point.x, point.point.y, point.point.z = cloud[0][0]
     self.point = point
def do_transform_point(point, transform):
    p = transform_to_kdl(transform) * PyKDL.Vector(point.point.x, point.point.y, point.point.z)
    res = PointStamped()
    res.point.x = p[0]
    res.point.y = p[1]
    res.point.z = p[2]
    res.header = transform.header
    return res
Ejemplo n.º 40
0
def people_cb(meas):
    global mouth_center
    ps = PointStamped()
    ps.point = meas.pos
    ps.header = meas.header
    ps.header.stamp = rospy.Time(0)
    point = tfl.transformPoint(model.tf_frame, ps)
    mouth_center = model.project3dToPixel((point.point.x, point.point.y + mouth_offset, point.point.z))
Ejemplo n.º 41
0
 def publish(self, msg):
     
     pt = PointStamped()
     
     pt.header = msg.header
     pt.point = msg.pos   
     
     self._point_pub.publish(pt)
Ejemplo n.º 42
0
 def transform_pos(self, pt, hdr):
     ps = PointStamped()
     ps.point = pt
     ps.header = hdr
     #ps.header.stamp = hdr.stamp
     print "set:"+str(ps)
     self.tfl.waitForTransform(ps.header.frame_id, '/map', ps.header.stamp, rospy.Duration(3.0))
     point_in_map = self.tfl.transformPoint('/map', ps)
     return point_in_map
Ejemplo n.º 43
0
 def handle_detection(self,detection):
     if self.done: return
     for i in range(len(detection.ids)):
         pt = PointStamped()
         pt.header = detection.header
         pt.point.x = detection.xs[i]
         pt.point.y = detection.ys[i]
         pt.point.z = detection.zs[i] + detection.hs[i]
         self.cylinders[detection.ids[i]].append((pt,detection.rs[i], detection.hs[i]))
     self.num_detections += 1
def callback(data):
    #height = data.differential_height
    height = data.height
    
    rospy.loginfo(rospy.get_caller_id() + "Pressure: %f", height)
    pt = PointStamped()
    pt.header = data.header
    pt.point.z = height
    pub = rospy.Publisher('pressure_height_point', PointStamped, queue_size=1)
    pub.publish(pt)
Ejemplo n.º 45
0
def poseStampedToPointStamped(poseStamped):
    """
    Converts PoseStamped to PointStamped
    """
    pointStamped = PointStamped()
    pointStamped.header = poseStamped.header
    pointStamped.point.x = poseStamped.pose.position.x
    pointStamped.point.y = poseStamped.pose.position.y
    pointStamped.point.z = poseStamped.pose.position.z

    return pointStamped
Ejemplo n.º 46
0
    def move_to_point(self, pose):

        pt = PointStamped()
        pt.header = pose.header
        pt.point = pose.pose.position
        self.look_at_pub.publish(pt)

        self.group.set_pose_target(pose)
        self.move_to_pose_pub.publish(pose)
        if not self.group.go(wait=True):
            return False
        return True
Ejemplo n.º 47
0
def location_from_cluster(cluster):
    pt = cloud2np(cluster).mean(axis=0)
    pt_stamped = PointStamped()
    pt_stamped.header = cluster.header
    pt_stamped.point.x, pt_stamped.point.y, pt_stamped.point.z = pt

    try:
        ptt = transform_point_stamped("/table", pt_stamped)
        return np.array([ptt.point.x, ptt.point.y, 0])
    except tf.Exception as e:
        print "exception..."
        return np.array([np.nan] * 3)
Ejemplo n.º 48
0
 def ar_cb(self, markers):
     for m in markers.markers:
         self.listener.waitForTransform(self.target_frame,'/RotMarker%02d'% m.id, m.header.stamp, rospy.Duration(1.0))
         self.listener.waitForTransform("/%s/ground"%self.name,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
         ((x,y,z),rot) = self.listener.lookupTransform(self.target_frame,'/RotMarker%02d'% m.id, m.header.stamp)
         L = vstack([x,y])
         m_pose = PointStamped()
         m_pose.header = m.header
         m_pose.point = m.pose.pose.position
         m_pose = self.listener.transformPoint("/%s/ground"%self.name,m_pose)
         Z = vstack([m_pose.point.x,m_pose.point.y])
         # TODO
         self.filter.update_ar(Z,L,self.ar_precision)
Ejemplo n.º 49
0
 def calculate_entropy(self, cov):
     cov = np.array(cov)*10**10
     H = PointStamped()
     h = std_msgs.msg.Header()
     h.stamp = rospy.Time.now()
     h.frame_id = 'odom'
     H.header = h
     n = float(cov.shape[0])
     inner = 2*math.pi*math.e
     det = np.linalg.det(cov)
     H.point.x = np.log(np.power(inner**n*det,0.5))
     #print(n,np.power(inner**n*det,0.5))
     return H
def callbackPoly(data):
  global listener
  polyfile = open("polygon_log.txt", 'w')
  listener.waitForTransform(data.header.frame_id, "r_sole", data.header.stamp, rospy.Duration(0.5))
  for p in data.polygon.points:
    p_s = PointStamped()
    p_s.point = p
    p_s.header = data.header
    p_st = listener.transformPoint('r_sole',p_s)
    polyfile.write("%f %f\n" % (p_st.point.x, p_st.point.y))
    rospy.loginfo("Writing Poly: %f %f",p_st.point.x, p_st.point.y);
  
  polyfile.close()
Ejemplo n.º 51
0
 def ar_cb(self, markers):
     for m in markers.markers:
         self.listener.waitForTransform(self.target_frame,'/%s/ground'% self.name, m.header.stamp, rospy.Duration(1.0))
         self.listener.waitForTransform("/%s/ground"%self.name,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
         ((x,y,z),rot) = self.listener.lookupTransform(self.target_frame,'/%s/ground'%self.name, m.header.stamp)
         euler = euler_from_quaternion(rot)
         X = vstack([x,y,euler[2]])
         m_pose = PointStamped()
         m_pose.header = m.header
         m_pose.point = m.pose.pose.position
         m_pose = self.listener.transformPoint("/%s/ground"%self.name,m_pose)
         Z = vstack([m_pose.point.x,m_pose.point.y])
         self.mapper.update_ar(Z,X,m.id,self.ar_precision)
     self.mapper.publish(self.target_frame,markers.header.stamp)
Ejemplo n.º 52
0
def to_msg_vector(vector):
    """Convert a PyKDL Vector to a geometry_msgs PointStamped message.

    :param vector: The vector to convert.
    :type vector: PyKDL.Vector
    :return: The converted vector/point.
    :rtype: geometry_msgs.msg.PointStamped
    """
    msg = PointStamped()
    msg.header = vector.header
    msg.point.x = vector[0]
    msg.point.y = vector[1]
    msg.point.z = vector[2]
    return msg
  def real_size_check(self,hull,header):
    rect = cv2.boundingRect(hull)
    #top_left = np.array([rect[0],rect[1]])
    bot_left = np.array([rect[0],rect[1]+rect[3]])
    bot_right = np.array([rect[0]+rect[2],rect[1]+rect[3]])

    #rospy.logdebug("Top Left: %s", top_left)
    #rospy.logdebug("Bot Right: %s", bot_right)

    bot_left_point = PointStamped()
    bot_left_point.header = copy.deepcopy(header)
    bot_left_point.point.x = bot_left[0]
    bot_left_point.point.y = bot_left[1]
    bot_left_point.point.z = 1.0
    bot_right_point = PointStamped()
    bot_right_point.header = copy.deepcopy(header)
    bot_right_point.point.x = bot_right[0]
    bot_right_point.point.y = bot_right[1]
    bot_right_point.point.z = 1.0

    #rospy.logdebug("Top Left Point: %s", top_left_point)
    #rospy.logdebug("Bot Right Point: %s", bot_right_point)

    bot_left_ground, bot_left_odom = self.cast_ray(bot_left_point,self.tf,'bot_left')
    bot_right_ground, bot_right_odom = self.cast_ray(bot_right_point,self.tf,'bot_right')

    #rospy.logdebug("Top Left Ground: %s", top_left_ground)
    #rospy.logdebug("Bot Right Ground: %s", bot_right_ground)

    width = np.array([0.,0.])
    width[0] = bot_left_ground.point.x - bot_right_ground.point.x
    width[1] = bot_left_ground.point.y - bot_right_ground.point.y
    rospy.logdebug("Width: %s", width)
    size = scipy.linalg.norm(width)
    rospy.logdebug("Size: %s", size)
    return size
Ejemplo n.º 54
0
 def ar_cb(self, markers):
     for m in markers.markers:
         if m.id > 32:
             continue
         self.listener.waitForTransform("/%s/ground"%self.name,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
         m_pose = PointStamped()
         m_pose.header = m.header
         m_pose.point = m.pose.pose.position
         m_pose = self.listener.transformPoint("/%s/ground"%self.name,m_pose)
         Z = vstack([m_pose.point.x,m_pose.point.y])
         if self.ignore_id:
             self.mapper.update_ar(Z,0,self.ar_precision)
         else:
             self.mapper.update_ar(Z,m.id,self.ar_precision)
     self.mapper.publish(self.target_frame,markers.header.stamp)
Ejemplo n.º 55
0
 def ar_cb(self, markers):
     for m in markers.markers:
         self.listener.waitForTransform(self.target_frame,'/RotMarker%02d'% m.id, m.header.stamp, rospy.Duration(1.0))
         self.listener.waitForTransform("/%s/ground"%self.name,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
         ((x,y,z),rot) = self.listener.lookupTransform(self.target_frame,'/RotMarker%02d'% m.id, m.header.stamp)
         L = vstack([x,y])
         # Transform the measurement into the rover reference frame
         m_pose = PointStamped()
         m_pose.header = m.header
         m_pose.point = m.pose.pose.position
         m_pose = self.listener.transformPoint("/%s/ground"%self.name,m_pose)
         # Call the filter with the observed landmark
         Z = vstack([m_pose.point.x,m_pose.point.y])
         self.filter.update_ar(Z,L,self.ar_precision)
         self.filter.publish(self.pose_pub,self.target_frame,m.header.stamp)
Ejemplo n.º 56
0
def callback(data):
    im = bridge.imgmsg_to_cv2(data, desired_encoding='bgr8')
    # im = v.undistort(im, MTX, DIST)

    found, corners = find_chessboard_corners(im, SCALE)

    if found:
        message = BoardMessage()

        # rotation and translation of board relative to camera
        rvec, tvec, _ = v.solvePnPRansac(OBJP, corners, MTX, DIST)

        # grab & reformat the corners and get the correct order for them
        outer, _ = v.projectPoints(OUTER, rvec, tvec, MTX, DIST)
        outer = np.float32(outer.reshape((4,2)))
        order = corner_order(outer)

        # undistort the image and put it in the message
        M = v.getPerspectiveTransform(outer[order], DSTPTS)
        un = v.warpPerspective(im, M, IMSIZE)
        message.unperspective = bridge.cv2_to_imgmsg(un, encoding='bgr8')
        message.unperspective.header.stamp = rospy.Time.now()

        if PUBLISH_UNDISTORT:
            impub.publish(message.unperspective)

        R, _ = v.Rodrigues(rvec)
        G = np.hstack([R, tvec.reshape((3,1))])
        outer_c = OUTERH[order].dot(G.T)

        print '\n\n==============={}================='.format(video_lock)
        fields = 'topleft topright botleft botright'.split()
        for i in range(4):
            point = PointStamped()
            point.point = Point()
            point.point.x, point.point.y, point.point.z = outer_c[i]
            point.header = Header()
            point.header.frame_id = 'left_hand_camera'
            point.header.stamp = rospy.Time(0)
            p = tfl.transformPoint(BASE_FRAME, point).point
            setattr(message, fields[i], p)
            print [p.x, p.y, p.z]
        if video_lock != 1:    
            pub.publish(message)

    if PUBLISH_DRAWPOINTS:
        v.drawChessboardCorners(im, (7,7), corners, found)
        ptpub.publish(bridge.cv2_to_imgmsg(im, encoding='bgr8'))
Ejemplo n.º 57
0
 def ar_cb(self, markers):
     for m in markers.markers:
         if m.id > 32:
             continue
         self.listener.waitForTransform(self.body_frame,m.header.frame_id, m.header.stamp, rospy.Duration(1.0))
         m_pose = PointStamped()
         m_pose.header = m.header
         m_pose.point = m.pose.pose.position
         m_pose = self.listener.transformPoint(self.body_frame,m_pose)
         Z = vstack([m_pose.point.x,m_pose.point.y])
         self.lock.acquire()
         if self.ignore_id:
             self.update_ar(Z,0,self.ar_precision)
         else:
             self.update_ar(Z,m.id,self.ar_precision)
         self.lock.release()
Ejemplo n.º 58
0
  def publish(self, header, pt1, pt2, depth):
  	
    pts = PointStamped()
      
    pts.header = header
    
    (cx, cy) = self.get_centres(pt1,pt2)
    
    ray = self._cam_model.projectPixelTo3dRay((cx, cy))
    pt = np.dot(ray, depth)
      
    pts.point.x = pt[0]
    pts.point.y = pt[1]
    pts.point.z = pt[2]

    #print pts
    
    self._point_pub.publish(pts)
Ejemplo n.º 59
0
def transformPolygonStamped(tf_listener, frame, polygon):

  transformed_polygon = PolygonStamped()
  transformed_polygon.header.frame_id = frame

  try:
    tf_listener.waitForTransform(frame, polygon.header.frame_id, rospy.Time.now(), rospy.Duration(4.0))

    old_point = PointStamped()
    old_point.header = polygon.header;
    for point in polygon.polygon.points:
      old_point.point = point
      new_point = tf_listener.transformPoint(frame, old_point)
      transformed_polygon.polygon.points.append(new_point.point)

    return transformed_polygon

  except tf.Exception as e:
    print "some tf exception happened %s" % e