Esempio n. 1
0
    def publish_odom(self, *args):
        if self.last_odom is None or self.position_offset is None:
            return

        msg = self.last_odom
        if self.target in msg.name:
            header = mil_ros_tools.make_header(frame='/map')

            target_index = msg.name.index(self.target)
            twist = msg.twist[target_index]

            # Add position offset to make the start position (0, 0, -depth)
            position_np, orientation_np = mil_ros_tools.pose_to_numpy(
                msg.pose[target_index])
            pose = mil_ros_tools.numpy_quat_pair_to_pose(
                position_np - self.position_offset, orientation_np)

            self.state_pub.publish(header=header,
                                   child_frame_id='/base_link',
                                   pose=PoseWithCovariance(pose=pose),
                                   twist=TwistWithCovariance(twist=twist))

            header = mil_ros_tools.make_header(frame='/world')
            twist = msg.twist[target_index]
            pose = msg.pose[target_index]
            self.world_state_pub.publish(
                header=header,
                child_frame_id='/base_link',
                pose=PoseWithCovariance(pose=pose),
                twist=TwistWithCovariance(twist=twist))
        else:
            # fail
            return
Esempio n. 2
0
 def publish_target_pose(self, position, orientation):
     self.target_pose_pub.publish(
         PoseStamped(header=mil_ros_tools.make_header('/map'),
                     pose=Pose(
                         position=mil_ros_tools.numpy_to_point(position),
                         orientation=mil_ros_tools.numpy_to_quaternion(
                             orientation))))
     self.distance_marker.header = mil_ros_tools.make_header('/map')
     self.distance_marker.text = 'XY: ' + str(self.target_distance) + 'm\n' +\
                                 'Z: ' + str(self.target_depth) + 'm'
     self.distance_marker.pose = Pose(
         position=mil_ros_tools.numpy_to_point(position),
         orientation=mil_ros_tools.numpy_to_quaternion(orientation))
     self.target_distance_pub.publish(self.distance_marker)
Esempio n. 3
0
    def range_callback(self, msg):
        '''Handle range data grabbed from dvl'''
        # future: should be /base_link/dvl, no?
        frame = '/dvl'
        distance = msg.range

        # Color a sharper red if we're in danger
        if distance < 1.0:
            color = (1.0, 0.1, 0.0, 0.9)
        else:
            color = (0.2, 0.8, 0.0, 0.7)

        marker = self.make_cylinder_marker(
            np.array([0.0, 0.0, 0.0]),  # place at origin
            length=distance,
            color=color,  # red,
            frame=frame,
            up_vector=np.array([0.0, 0.0, -1.0]),  # up is down in range world
            id=1  # Keep these guys from overwriting eachother
        )
        self.depth_marker.ns = 'sub'
        self.depth_marker.header = mil_ros_tools.make_header(frame='/dvl')
        self.depth_marker.pose = marker.pose
        self.depth_marker.text = str(round(distance, 3)) + 'm'
        self.depth_marker.id = 1

        self.rviz_pub_t.publish(self.depth_marker)
        self.rviz_pub.publish(marker)
Esempio n. 4
0
    def range_callback(self, msg):
        '''Handle range data grabbed from dvl'''
        # future: should be /base_link/dvl, no?
        frame = '/dvl'
        distance = msg.range

        # Color a sharper red if we're in danger
        if distance < 1.0:
            color = (1.0, 0.1, 0.0, 0.9)
        else:
            color = (0.2, 0.8, 0.0, 0.7)

        marker = self.make_cylinder_marker(
            np.array([0.0, 0.0, 0.0]),  # place at origin
            length=distance,
            color=color,  # red,
            frame=frame,
            up_vector=np.array([0.0, 0.0, -1.0]),  # up is down in range world
            id=1  # Keep these guys from overwriting eachother
        )
        self.depth_marker.ns = 'sub'
        self.depth_marker.header = mil_ros_tools.make_header(frame='/dvl')
        self.depth_marker.pose = marker.pose
        self.depth_marker.text = str(round(distance, 3)) + 'm'
        self.depth_marker.id = 1

        self.rviz_pub_t.publish(self.depth_marker)
        self.rviz_pub.publish(marker)
Esempio n. 5
0
    def make_cylinder_marker(self,
                             base,
                             length,
                             color,
                             frame='/base_link',
                             up_vector=np.array([0.0, 0.0, 1.0]),
                             **kwargs):
        '''Handle the frustration that Rviz cylinders are designated by their center, not base'''

        center = base + (up_vector * (length / 2))

        pose = Pose(position=mil_ros_tools.numpy_to_point(center),
                    orientation=mil_ros_tools.numpy_to_quaternion(
                        [0.0, 0.0, 0.0, 1.0]))

        marker = visualization_msgs.Marker(
            ns='sub',
            header=mil_ros_tools.make_header(frame=frame),
            type=visualization_msgs.Marker.CYLINDER,
            action=visualization_msgs.Marker.ADD,
            pose=pose,
            color=ColorRGBA(*color),
            scale=Vector3(0.2, 0.2, length),
            lifetime=rospy.Duration(),
            **kwargs)
        return marker
Esempio n. 6
0
 def publish_target_pose(self, position, orientation):
     self.target_pose_pub.publish(
         PoseStamped(
             header=mil_ros_tools.make_header('/map'),
             pose=Pose(
                 position=mil_ros_tools.numpy_to_point(position),
                 orientation=mil_ros_tools.numpy_to_quaternion(orientation)
             )
         )
     )
     self.distance_marker.header = mil_ros_tools.make_header('/map')
     self.distance_marker.text = 'XY: ' + str(self.target_distance) + 'm\n' +\
                                 'Z: ' + str(self.target_depth) + 'm'
     self.distance_marker.pose = Pose(position=mil_ros_tools.numpy_to_point(position),
                                      orientation=mil_ros_tools.numpy_to_quaternion(orientation))
     self.target_distance_pub.publish(self.distance_marker)
Esempio n. 7
0
    def moveto_action(self, position, orientation):
        self.client.cancel_goal()
        rospy.logwarn("Going to waypoint")
        rospy.logwarn("Found server")

        # Stay under the water
        if position[2] > 0.3:
            rospy.logwarn("Waypoint set too high!")
            position[2] = -0.5

        goal = mil_msgs.MoveToGoal(
            header=mil_ros_tools.make_header('/map'),
            posetwist=mil_msgs.PoseTwist(
                pose=geom_msgs.Pose(
                    position=mil_ros_tools.numpy_to_point(position),
                    orientation=mil_ros_tools.numpy_to_quaternion(orientation)
                )
            ),
            speed=0.2,
            linear_tolerance=0.1,
            angular_tolerance=0.1
        )
        self.client.send_goal(goal)
        # self.client.wait_for_result()
        rospy.logwarn("Go to waypoint")
Esempio n. 8
0
    def request_bin(self, srv):
        self.bin_type = srv.target_name
        if (self.last_image != None):
            print 'requesting', srv
            response = self.find_single_bin(np.copy(self.last_image),
                                            srv.target_name)

            if response is False or response is None:
                print 'did not find'
                resp = VisionRequest2DResponse(
                    header=mil_ros_tools.make_header(frame='/down'),
                    found=False)

            else:
                # Fill in
                center, radius = response
                resp = VisionRequest2DResponse(
                    header=Header(stamp=self.last_image_time,
                                  frame_id='/down'),
                    pose=Pose2D(x=center[0], y=center[1], theta=radius),
                    max_x=self.last_image.shape[0],
                    max_y=self.last_image.shape[1],
                    camera_info=self.image_sub.camera_info,
                    found=True)
            return resp
Esempio n. 9
0
    def publish_pose(self):
        '''TODO:
            Publish velocity in body frame
        '''
        linear_vel = self.body.getRelPointVel((0.0, 0.0, 0.0))
        # TODO: Not 100% on this transpose
        angular_vel = self.orientation.dot(self.angular_vel)
        # angular_vel = self.orientation.transpose().dot(self.angular_vel)

        quaternion = self.body.getQuaternion()

        translation = self.body.getPosition()

        header = mil_ros_tools.make_header(frame='/map')

        pose = geometry.Pose(
            position=geometry.Point(*translation),
            orientation=geometry.Quaternion(quaternion[1], quaternion[2],
                                            quaternion[3], quaternion[0]),
        )

        twist = geometry.Twist(linear=geometry.Vector3(*linear_vel),
                               angular=geometry.Vector3(*angular_vel))

        odom_msg = Odometry(header=header,
                            child_frame_id='/base_link',
                            pose=geometry.PoseWithCovariance(pose=pose),
                            twist=geometry.TwistWithCovariance(twist=twist))

        self.truth_odom_pub.publish(odom_msg)
Esempio n. 10
0
    def moveto_action(self, position, orientation):
        self.client.cancel_goal()
        rospy.logwarn("Going to waypoint")
        rospy.logwarn("Found server")

        # Stay under the water
        if position[2] > 0.3:
            rospy.logwarn("Waypoint set too high!")
            position[2] = -0.5

        goal = mil_msgs.MoveToGoal(
            header=mil_ros_tools.make_header('/map'),
            posetwist=mil_msgs.PoseTwist(
                pose=geom_msgs.Pose(
                    position=mil_ros_tools.numpy_to_point(position),
                    orientation=mil_ros_tools.numpy_to_quaternion(orientation)
                )
            ),
            speed=0.2,
            linear_tolerance=0.1,
            angular_tolerance=0.1
        )
        self.client.send_goal(goal)
        # self.client.wait_for_result()
        rospy.logwarn("Go to waypoint")
Esempio n. 11
0
 def publish_height(self, msg):
     '''Sim DVL uses laserscan message to relay height'''
     self.dvl_pub.publish(
         RangeStamped(
             header=mil_ros_tools.make_header(),
             range=msg.range
         )
     )
Esempio n. 12
0
    def publish_odom(self, *args):
        if self.last_odom is None or self.position_offset is None:
            return

        msg = self.last_odom
        if self.target in msg.name:
            header = mil_ros_tools.make_header(frame='/map')

            target_index = msg.name.index(self.target)
            twist = msg.twist[target_index]

            # Add position offset to make the start position (0, 0, -depth)
            position_np, orientation_np = mil_ros_tools.pose_to_numpy(msg.pose[
                                                                      target_index])
            pose = mil_ros_tools.numpy_quat_pair_to_pose(
                position_np - self.position_offset, orientation_np)

            self.state_pub.publish(
                header=header,
                child_frame_id='/base_link',
                pose=PoseWithCovariance(
                    pose=pose
                ),
                twist=TwistWithCovariance(
                    twist=twist
                )
            )

            header = mil_ros_tools.make_header(frame='/world')
            twist = msg.twist[target_index]
            pose = msg.pose[target_index]
            self.world_state_pub.publish(
                header=header,
                child_frame_id='/base_link',
                pose=PoseWithCovariance(
                    pose=pose
                ),
                twist=TwistWithCovariance(
                    twist=twist
                )
            )
        else:
            # fail
            return
Esempio n. 13
0
 def send_wrench(self, force, torque):
     '''
     Specify wrench in Newtons, Newton-meters
         (Are you supposed to say Newtons-meter? Newtons-Meters?)
     '''
     wrench_msg = geometry_msgs.WrenchStamped(
         header=mil_ros_tools.make_header(),
         wrench=geometry_msgs.Wrench(force=geometry_msgs.Vector3(*force),
                                     torque=geometry_msgs.Vector3(*torque)))
     self.wrench_pub.publish(wrench_msg)
 def set_target(self, pos, orientation=(0.0, 0.0, 0.0, 1.0)):
     '''TODO:
         Add linear/angular velocity
         Publish a trajectory
     '''
     # self.target = make_pose_stamped(position=pos, orientation=orientation)
     target_msg = Trajectory(
         header=mil_ros_tools.make_header(frame='/body'),
         trajectory=[
             Waypoint(pose=geometry_msgs.Pose(
                 position=geometry_msgs.Vector3(*pos),
                 orientation=geometry_msgs.Quaternion(*orientation)))
         ])
     self.target_pub.publish(target_msg)
Esempio n. 15
0
def make_ray(base, direction, length, color, frame='/base_link', m_id=0, **kwargs):
    '''Handle the frustration that Rviz cylinders are designated by their center, not base'''
    marker = visualization_msgs.Marker(
        ns='wamv',
        id=m_id,
        header=mil_ros_tools.make_header(frame=frame),
        type=visualization_msgs.Marker.LINE_STRIP,
        action=visualization_msgs.Marker.ADD,
        color=ColorRGBA(*color),
        scale=Vector3(0.05, 0.05, 0.05),
        points=map(lambda o: Point(*o), [base, direction * length]),
        lifetime=rospy.Duration(),
        **kwargs
    )
    return marker
Esempio n. 16
0
File: rviz.py Progetto: DSsoto/Sub8
 def make_ray(self, base, direction, length, color, frame='/base_link', _id=100, timestamp=None, **kwargs):
     '''Handle the frustration that Rviz cylinders are designated by their center, not base'''
     marker = visualization_msgs.Marker(
         ns='sub',
         id=_id,
         header=mil_ros_tools.make_header(frame=frame, stamp=timestamp),
         type=visualization_msgs.Marker.LINE_STRIP,
         action=visualization_msgs.Marker.ADD,
         color=ColorRGBA(*color),
         scale=Vector3(0.05, 0.05, 0.05),
         points=map(lambda o: Point(*o), [base, direction * length]),
         lifetime=rospy.Duration(),
         **kwargs
     )
     return marker
Esempio n. 17
0
 def set_target(self, pos, orientation=(0.0, 0.0, 0.0, 1.0)):
     '''TODO:
         Add linear/angular velocity
         Publish a trajectory
     '''
     # self.target = make_pose_stamped(position=pos, orientation=orientation)
     target_msg = Trajectory(
         header=mil_ros_tools.make_header(frame='/body'),
         trajectory=[Waypoint(
             pose=geometry_msgs.Pose(
                 position=geometry_msgs.Vector3(*pos),
                 orientation=geometry_msgs.Quaternion(*orientation)
             )
         )]
     )
     self.target_pub.publish(target_msg)
Esempio n. 18
0
 def visualize_pose_est(self):
     marker = visualization_msgs.Marker(
         ns='torpedo_board/pose_est',
         id=0,
         header=mil_ros_tools.make_header(frame=self.current_req.frame_id),
         type=visualization_msgs.Marker.LINE_STRIP,
         action=visualization_msgs.Marker.ADD,
         color=ColorRGBA(0.0, 1.0, 10, 0.7),
         scale=Vector3(0.05, 0.0, 0.0),
         lifetime=rospy.Duration())
     pts = self.generate_hom_tb_corners_from_cam(self.minimization_result.x)
     marker.points.append(Point(x=pts[0, 0], y=pts[1, 0], z=pts[2, 0]))
     marker.points.append(Point(x=pts[0, 1], y=pts[1, 1], z=pts[2, 1]))
     marker.points.append(Point(x=pts[0, 2], y=pts[1, 2], z=pts[2, 2]))
     marker.points.append(Point(x=pts[0, 3], y=pts[1, 3], z=pts[2, 3]))
     marker.points.append(Point(x=pts[0, 0], y=pts[1, 0], z=pts[2, 0]))
     self.marker_pub.publish(marker)
Esempio n. 19
0
File: rviz.py Progetto: DSsoto/Sub8
    def draw_sphere(self, position, color, scaling=(0.11, 0.11, 0.11), _id=4, frame='/front_stereo'):
        pose = Pose(
            position=mil_ros_tools.numpy_to_point(position),
            orientation=mil_ros_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0])
        )

        marker = visualization_msgs.Marker(
            ns='sub',
            id=_id,
            header=mil_ros_tools.make_header(frame=frame),
            type=visualization_msgs.Marker.SPHERE,
            action=visualization_msgs.Marker.ADD,
            pose=pose,
            color=ColorRGBA(*color),
            scale=Vector3(*scaling),
            lifetime=rospy.Duration(),
        )
        self.rviz_pub.publish(marker)
Esempio n. 20
0
def draw_sphere(position, color, scaling=(0.11, 0.11, 0.11), m_id=4, frame='/base_link'):
    pose = Pose(
        position=mil_ros_tools.numpy_to_point(position),
        orientation=mil_ros_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0])
    )

    marker = visualization_msgs.Marker(
        ns='wamv',
        id=m_id,
        header=mil_ros_tools.make_header(frame=frame),
        type=visualization_msgs.Marker.SPHERE,
        action=visualization_msgs.Marker.ADD,
        pose=pose,
        color=ColorRGBA(*color),
        scale=Vector3(*scaling),
        lifetime=rospy.Duration(),
    )
    rviz_pub.publish(marker)
Esempio n. 21
0
 def visualize_pose_est(self):
     marker = visualization_msgs.Marker(
         ns='torpedo_board/pose_est',
         id=0,
         header=mil_ros_tools.make_header(frame=self.current_req.frame_id),
         type=visualization_msgs.Marker.LINE_STRIP,
         action=visualization_msgs.Marker.ADD,
         color=ColorRGBA(0.0, 1.0, 10, 0.7),
         scale=Vector3(0.05, 0.0, 0.0),
         lifetime=rospy.Duration()
     )
     pts = self.generate_hom_tb_corners_from_cam(self.minimization_result.x)
     marker.points.append(Point(x=pts[0, 0], y=pts[1, 0], z=pts[2, 0]))
     marker.points.append(Point(x=pts[0, 1], y=pts[1, 1], z=pts[2, 1]))
     marker.points.append(Point(x=pts[0, 2], y=pts[1, 2], z=pts[2, 2]))
     marker.points.append(Point(x=pts[0, 3], y=pts[1, 3], z=pts[2, 3]))
     marker.points.append(Point(x=pts[0, 0], y=pts[1, 0], z=pts[2, 0]))
     self.marker_pub.publish(marker)
Esempio n. 22
0
    def depth_callback(self, msg):
        '''Handle depth data sent from depth sensor'''
        frame = '/depth'
        distance = msg.depth
        marker = self.make_cylinder_marker(
            np.array([0.0, 0.0, 0.0]),  # place at origin
            length=distance,
            color=(0.0, 1.0, 0.2, 0.7),  # green,
            frame=frame,
            id=0  # Keep these guys from overwriting eachother
        )
        self.surface_marker.ns = 'sub'
        self.surface_marker.header = mil_ros_tools.make_header(frame='/depth')
        self.surface_marker.pose = marker.pose
        self.surface_marker.text = str(round(distance, 3)) + 'm'
        self.surface_marker.id = 0

        self.rviz_pub.publish(marker)
        self.rviz_pub_t.publish(self.depth_marker)
Esempio n. 23
0
    def depth_callback(self, msg):
        '''Handle depth data sent from depth sensor'''
        frame = '/depth'
        distance = msg.depth
        marker = self.make_cylinder_marker(
            np.array([0.0, 0.0, 0.0]),  # place at origin
            length=distance,
            color=(0.0, 1.0, 0.2, 0.7),  # green,
            frame=frame,
            id=0  # Keep these guys from overwriting eachother
        )
        self.surface_marker.ns = 'sub'
        self.surface_marker.header = mil_ros_tools.make_header(frame='/depth')
        self.surface_marker.pose = marker.pose
        self.surface_marker.text = str(round(distance, 3)) + 'm'
        self.surface_marker.id = 0

        self.rviz_pub.publish(marker)
        self.rviz_pub_t.publish(self.depth_marker)
Esempio n. 24
0
    def publish_dvl(self):
        """Publishes dvl sensor data - twist message, and array of 4 dvl velocities based off of ray orientations"""
        correlation = -1

        header = mil_ros_tools.make_header(frame='/body')

        vel_dvl_body = self.body.vectorFromWorld(
            self.body.getRelPointVel(self.dvl_position))

        velocity_measurements = []

        for ray in self.dvl_ray_orientations:
            velocity_measurements.append(
                VelocityMeasurement(direction=geometry.Vector3(*ray),
                                    velocity=np.dot(ray, vel_dvl_body),
                                    correlation=correlation))

        dvl_msg = VelocityMeasurements(
            header=header, velocity_measurements=velocity_measurements)

        self.dvl_sensor_pub.publish(dvl_msg)
Esempio n. 25
0
    def publish_imu(self, dt):
        """Publishes imu sensor information - orientation, angular velocity, and linear acceleration"""
        orientation_matrix = self.pose[:3, :3]
        sigma = 0.01
        noise = np.random.normal(0.0, sigma, 3)

        # Work on a better way to get this
        # We can't get this nicely from body.getForce()
        g = self.body.vectorFromWorld(self.g)
        # Get current velocity of IMU in body frame
        cur_vel = np.array(self.body.vectorFromWorld(self.body.getRelPointVel(self.imu_position)))
        linear_acc = orientation_matrix.dot(cur_vel - self.last_vel) / dt
        linear_acc += g + (noise * dt)

        # TODO: Fix frame
        angular_vel = orientation_matrix.dot(self.body.getAngularVel()) + (noise * dt)

        header = mil_ros_tools.make_header(frame='/body')

        linear = geometry.Vector3(*linear_acc)
        angular = geometry.Vector3(*angular_vel)

        covariance = [sigma ** 2, 0., 0.,
                      0., sigma ** 2, 0.,
                      0., 0., sigma ** 2]

        orientation_covariance = [-1, 0., 0.,
                                  0., 0., 0.,
                                  0., 0., 0.]

        imu_msg = Imu(
            header=header,
            angular_velocity=angular,
            angular_velocity_covariance=covariance,
            linear_acceleration=linear,
            linear_acceleration_covariance=covariance,
            orientation_covariance=orientation_covariance
        )

        self.imu_sensor_pub.publish(imu_msg)
Esempio n. 26
0
    def make_cylinder_marker(self, base, length, color, frame='/base_link',
                             up_vector=np.array([0.0, 0.0, 1.0]), **kwargs):
        '''Handle the frustration that Rviz cylinders are designated by their center, not base'''

        center = base + (up_vector * (length / 2))

        pose = Pose(
            position=mil_ros_tools.numpy_to_point(center),
            orientation=mil_ros_tools.numpy_to_quaternion([0.0, 0.0, 0.0, 1.0])
        )

        marker = visualization_msgs.Marker(
            ns='sub',
            header=mil_ros_tools.make_header(frame=frame),
            type=visualization_msgs.Marker.CYLINDER,
            action=visualization_msgs.Marker.ADD,
            pose=pose,
            color=ColorRGBA(*color),
            scale=Vector3(0.2, 0.2, length),
            lifetime=rospy.Duration(),
            **kwargs
        )
        return marker
Esempio n. 27
0
    def publish_dvl(self):
        """Publishes dvl sensor data - twist message, and array of 4 dvl velocities based off of ray orientations"""
        correlation = -1

        header = mil_ros_tools.make_header(frame='/body')

        vel_dvl_body = self.body.vectorFromWorld(self.body.getRelPointVel(self.dvl_position))

        velocity_measurements = []

        for ray in self.dvl_ray_orientations:
            velocity_measurements.append(VelocityMeasurement(
                direction=geometry.Vector3(*ray),
                velocity=np.dot(ray, vel_dvl_body),
                correlation=correlation
            ))

        dvl_msg = VelocityMeasurements(
            header=header,
            velocity_measurements=velocity_measurements
        )

        self.dvl_sensor_pub.publish(dvl_msg)
Esempio n. 28
0
    def publish_imu(self, dt):
        """Publishes imu sensor information - orientation, angular velocity, and linear acceleration"""
        orientation_matrix = self.pose[:3, :3]
        sigma = 0.01
        noise = np.random.normal(0.0, sigma, 3)

        # Work on a better way to get this
        # We can't get this nicely from body.getForce()
        g = self.body.vectorFromWorld(self.g)
        # Get current velocity of IMU in body frame
        cur_vel = np.array(
            self.body.vectorFromWorld(
                self.body.getRelPointVel(self.imu_position)))
        linear_acc = orientation_matrix.dot(cur_vel - self.last_vel) / dt
        linear_acc += g + (noise * dt)

        # TODO: Fix frame
        angular_vel = orientation_matrix.dot(
            self.body.getAngularVel()) + (noise * dt)

        header = mil_ros_tools.make_header(frame='/body')

        linear = geometry.Vector3(*linear_acc)
        angular = geometry.Vector3(*angular_vel)

        covariance = [sigma**2, 0., 0., 0., sigma**2, 0., 0., 0., sigma**2]

        orientation_covariance = [-1, 0., 0., 0., 0., 0., 0., 0., 0.]

        imu_msg = Imu(header=header,
                      angular_velocity=angular,
                      angular_velocity_covariance=covariance,
                      linear_acceleration=linear,
                      linear_acceleration_covariance=covariance,
                      orientation_covariance=orientation_covariance)

        self.imu_sensor_pub.publish(imu_msg)
Esempio n. 29
0
    def publish_pose(self):
        '''TODO:
            Publish velocity in body frame
        '''
        linear_vel = self.body.getRelPointVel((0.0, 0.0, 0.0))
        # TODO: Not 100% on this transpose
        angular_vel = self.orientation.dot(self.angular_vel)
        # angular_vel = self.orientation.transpose().dot(self.angular_vel)

        quaternion = self.body.getQuaternion()

        translation = self.body.getPosition()

        header = mil_ros_tools.make_header(frame='/map')

        pose = geometry.Pose(
            position=geometry.Point(*translation),
            orientation=geometry.Quaternion(quaternion[1], quaternion[2], quaternion[3], quaternion[0]),
        )

        twist = geometry.Twist(
            linear=geometry.Vector3(*linear_vel),
            angular=geometry.Vector3(*angular_vel)
        )

        odom_msg = Odometry(
            header=header,
            child_frame_id='/base_link',
            pose=geometry.PoseWithCovariance(
                pose=pose
            ),
            twist=geometry.TwistWithCovariance(
                twist=twist
            )
        )

        self.truth_odom_pub.publish(odom_msg)
Esempio n. 30
0
    def do_observe(self, *args):
        resp = self.make_request(name='totem')

        # If atleast one totem was found start observing
        if resp.found:
            # Time of the databse request
            time_of_marker = resp.objects[0].header.stamp  # - ros_t(1)
            fprint("Looking for image at {}".format(time_of_marker.to_sec()), msg_color='yellow')
            image_holder = self.image_history.get_around_time(time_of_marker)
            if not image_holder.contains_valid_image:
                t = self.image_history.newest_image.time
                if t is None:
                    fprint("No images found.")
                    return

                fprint("No valid image found for t={} ({}) dt: {}".format(
                    time_of_marker.to_sec(), t.to_sec(), (rospy.Time.now() - t).to_sec()), msg_color='red')
                return
            header = make_header(frame='/enu', stamp=image_holder.time)
            image = image_holder.image
            self.debug.image = np.copy(image)

            cam_tf = self.camera_model.tfFrame()
            try:
                fprint("Getting transform between /enu and {}...".format(cam_tf))
                self.tf_listener.waitForTransform("/enu", cam_tf, time_of_marker, ros_t(1))
                t_mat44 = self.tf_listener.asMatrix(cam_tf, header)
            except tf.ExtrapolationException as e:
                fprint("TF error found and excepted: {}".format(e))
                return

            for obj in resp.objects:
                if len(obj.points) <= 1:
                    fprint("No points in object")
                    continue

                fprint("{} {}".format(obj.id, "=" * 50))

                # Get object position in px coordinates to determine if it's in frame
                object_cam = t_mat44.dot(np.append(point_to_numpy(obj.position), 1))
                object_px = map(int, self.camera_model.project3dToPixel(object_cam[:3]))
                if not self._object_in_frame(object_cam):
                    fprint("Object not in frame")
                    continue

                # Get enu points associated with this totem and remove ones that are too low
                points_np = np.array(map(point_to_numpy, obj.points))
                height = np.max(points_np[:, 2]) - np.min(points_np[:, 2])
                if height < .1:
                    # If the height of the object is too small, skip (units are meters)
                    fprint("Object too small")
                    continue

                threshold = np.min(points_np[:, 2]) + self.height_remove * height
                points_np = points_np[points_np[:, 2] > threshold]

                # Shove ones in there to make homogenous points to get points in image frame
                points_np_homo = np.hstack((points_np, np.ones((points_np.shape[0], 1)))).T
                points_cam = t_mat44.dot(points_np_homo).T
                points_px = map(self.camera_model.project3dToPixel, points_cam[:, :3])

                [cv2.circle(self.debug.image, tuple(map(int, p)), 2, (255, 255, 255), -1) for p in points_px]

                # Get color information from the points
                roi = self._get_ROI_from_points(points_px)
                h, s, v = self._get_hsv_from_ROI(roi, image)

                # Compute parameters that go into the confidence
                boat_q = self.odom[1]
                target_q = self._get_solar_angle()
                q_err = self._get_quaternion_error(boat_q, target_q)

                dist = np.linalg.norm(self.odom[0] - point_to_numpy(obj.position))

                fprint("H: {}, S: {}, V: {}".format(h, s, v))
                fprint("q_err: {}, dist: {}".format(q_err, dist))

                # Add to database and setup debug image
                if s < self.saturation_reject or v < self.value_reject:
                    err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting."
                    fprint(err_msg.format(s, self.saturation_reject, v, self.value_reject), msg_color='red')

                else:
                    if obj.id not in self.colored:
                        self.colored[obj.id] = Observation()

                    # Add this observation in
                    self.colored[obj.id] += h, v, dist, q_err
                    print self.colored[obj.id]

                rgb = (0, 0, 0)
                color = self.do_estimate(obj.id)
                # Do we have a valid color estimate
                if color:
                    fprint("COLOR IS VALID", msg_color='green')
                    rgb = self.database_color_map[color[0]]

                    cmd = '{name}={rgb[0]},{rgb[1]},{rgb[2]},{_id}'
                    self.make_request(cmd=cmd.format(name=obj.name, _id=obj.id, rgb=rgb))

                bgr = rgb[::-1]
                cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1)
                font = cv2.FONT_HERSHEY_SIMPLEX
                cv2.putText(self.debug.image, str(obj.id), tuple(object_px), font, 1, bgr, 2)
Esempio n. 31
0
#!/usr/bin/env python
import rospy
import numpy as np
import tf
import mil_ros_tools
from sensor_msgs.msg import PointCloud

rospy.init_node("ground_finder")

pub = rospy.Publisher("ground_est", PointCloud, queue_size=1)

listener = tf.TransformListener()
pc = PointCloud()
pc.header = mil_ros_tools.make_header(frame="map")
pc.points = []

rate = rospy.Rate(1.0)
while not rospy.is_shutdown():
    try:
        t = listener.waitForTransform('/map', '/ground', rospy.Time.now(),
                                      rospy.Duration(1))
        (trans, rot) = listener.lookupTransform('/map', '/ground',
                                                rospy.Time(0))
    except (tf.Exception, tf.LookupException, tf.ConnectivityException,
            tf.ExtrapolationException):
        rospy.logwarn("TF waitForTransform timeout")
        continue

    pc.points.append(mil_ros_tools.numpy_to_point(np.array(trans)))
    pub.publish(pc)
    def do_observe(self, *args):
        resp = self.make_request(name='totem')

        # If atleast one totem was found start observing
        if resp.found:
            # Time of the databse request
            time_of_marker = resp.objects[0].header.stamp  # - ros_t(1)
            fprint("Looking for image at {}".format(time_of_marker.to_sec()),
                   msg_color='yellow')
            image_holder = self.image_history.get_around_time(time_of_marker)
            if not image_holder.contains_valid_image:
                t = self.image_history.newest_image.time
                if t is None:
                    fprint("No images found.")
                    return

                fprint("No valid image found for t={} ({}) dt: {}".format(
                    time_of_marker.to_sec(), t.to_sec(),
                    (rospy.Time.now() - t).to_sec()),
                       msg_color='red')
                return
            header = make_header(frame='/enu', stamp=image_holder.time)
            image = image_holder.image
            self.debug.image = np.copy(image)

            cam_tf = self.camera_model.tfFrame()
            try:
                fprint(
                    "Getting transform between /enu and {}...".format(cam_tf))
                self.tf_listener.waitForTransform("/enu", cam_tf,
                                                  time_of_marker, ros_t(1))
                t_mat44 = self.tf_listener.asMatrix(cam_tf, header)
            except tf.ExtrapolationException as e:
                fprint("TF error found and excepted: {}".format(e))
                return

            for obj in resp.objects:
                if len(obj.points) <= 1:
                    fprint("No points in object")
                    continue

                fprint("{} {}".format(obj.id, "=" * 50))

                # Get object position in px coordinates to determine if it's in frame
                object_cam = t_mat44.dot(
                    np.append(point_to_numpy(obj.position), 1))
                object_px = map(
                    int, self.camera_model.project3dToPixel(object_cam[:3]))
                if not self._object_in_frame(object_cam):
                    fprint("Object not in frame")
                    continue

                # Get enu points associated with this totem and remove ones that are too low
                points_np = np.array(map(point_to_numpy, obj.points))
                height = np.max(points_np[:, 2]) - np.min(points_np[:, 2])
                if height < .1:
                    # If the height of the object is too small, skip (units are meters)
                    fprint("Object too small")
                    continue

                threshold = np.min(points_np[:,
                                             2]) + self.height_remove * height
                points_np = points_np[points_np[:, 2] > threshold]

                # Shove ones in there to make homogenous points to get points in image frame
                points_np_homo = np.hstack(
                    (points_np, np.ones((points_np.shape[0], 1)))).T
                points_cam = t_mat44.dot(points_np_homo).T
                points_px = map(self.camera_model.project3dToPixel,
                                points_cam[:, :3])

                [
                    cv2.circle(self.debug.image, tuple(map(int, p)), 2,
                               (255, 255, 255), -1) for p in points_px
                ]

                # Get color information from the points
                roi = self._get_ROI_from_points(points_px)
                h, s, v = self._get_hsv_from_ROI(roi, image)

                # Compute parameters that go into the confidence
                boat_q = self.odom[1]
                target_q = self._get_solar_angle()
                q_err = self._get_quaternion_error(boat_q, target_q)

                dist = np.linalg.norm(self.odom[0] -
                                      point_to_numpy(obj.position))

                fprint("H: {}, S: {}, V: {}".format(h, s, v))
                fprint("q_err: {}, dist: {}".format(q_err, dist))

                # Add to database and setup debug image
                if s < self.saturation_reject or v < self.value_reject:
                    err_msg = "The colors aren't expressive enough s: {} ({}) v: {} ({}). Rejecting."
                    fprint(err_msg.format(s, self.saturation_reject, v,
                                          self.value_reject),
                           msg_color='red')

                else:
                    if obj.id not in self.colored:
                        self.colored[obj.id] = Observation()

                    # Add this observation in
                    self.colored[obj.id] += h, v, dist, q_err
                    print self.colored[obj.id]

                rgb = (0, 0, 0)
                color = self.do_estimate(obj.id)
                # Do we have a valid color estimate
                if color:
                    fprint("COLOR IS VALID", msg_color='green')
                    rgb = self.database_color_map[color[0]]

                    cmd = '{name}={rgb[0]},{rgb[1]},{rgb[2]},{_id}'
                    self.make_request(
                        cmd=cmd.format(name=obj.name, _id=obj.id, rgb=rgb))

                bgr = rgb[::-1]
                cv2.circle(self.debug.image, tuple(object_px), 10, bgr, -1)
                font = cv2.FONT_HERSHEY_SIMPLEX
                cv2.putText(self.debug.image, str(obj.id), tuple(object_px),
                            font, 1, bgr, 2)
Esempio n. 33
0
#!/usr/bin/env python
import rospy
import numpy as np
import tf
import mil_ros_tools
from sensor_msgs.msg import PointCloud


rospy.init_node("ground_finder")

pub = rospy.Publisher("ground_est", PointCloud, queue_size=1)

listener = tf.TransformListener()
pc = PointCloud()
pc.header = mil_ros_tools.make_header(frame="map")
pc.points = []

rate = rospy.Rate(1.0)
while not rospy.is_shutdown():
    try:
        t = listener.waitForTransform('/map', '/ground', rospy.Time.now(), rospy.Duration(1))
        (trans, rot) = listener.lookupTransform('/map', '/ground', rospy.Time(0))
    except (tf.Exception, tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
        rospy.logwarn("TF waitForTransform timeout")
        continue

    pc.points.append(mil_ros_tools.numpy_to_point(np.array(trans)))
    pub.publish(pc)

    rate.sleep()