示例#1
0
	def test_create_axis_angle(self):
		axis = tft.random_vector(3)
		angle = random.random() * 2 * pi
		
		q = tft.quaternion_about_axis(angle, axis)
		
		q2 = tb_angles(axis,angle).quaternion
		
		self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg='%s and %s not close!' % (list(q),list(q2)))
		
		q2 = tb_angles(angle,axis).quaternion
		
		self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg='%s and %s not close!' % (list(q),list(q2)))
		
		q2 = tb_angles((axis,angle)).quaternion
		
		self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg='%s and %s not close!' % (list(q),list(q2)))
		
		q2 = tb_angles((angle,axis)).quaternion
		
		self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg='%s and %s not close!' % (list(q),list(q2)))
		
		for _ in xrange(1000):
			axis = tft.random_vector(3)
			angle = random.random() * 2 * pi
			
			q = tft.quaternion_about_axis(angle, axis)
			q2 = tb_angles(axis,angle).quaternion
			
			self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg='%s and %s not close! for axis %s and angle %f' % (list(q),list(q2),tuple(axis),angle))
示例#2
0
    def align_poses(self, ps1, ps2):

        self.aul.update_frame(ps1)
        ps2.header.stamp = rospy.Time.now()
        self.tfl.waitForTransform(ps2.header.frame_id, 'lh_utility_frame',
                                  rospy.Time.now(), rospy.Duration(3.0))
        ps2_in_ps1 = self.tfl.transformPose('lh_utility_frame', ps2)

        ang = math.atan2(-ps2_in_ps1.pose.position.z,
                         -ps2_in_ps1.pose.position.y) + (math.pi / 2)
        q_st_rot = transformations.quaternion_about_axis(ang, (1, 0, 0))
        q_st_new = transformations.quaternion_multiply([
            ps1.pose.orientation.x, ps1.pose.orientation.y,
            ps1.pose.orientation.z, ps1.pose.orientation.w
        ], q_st_rot)
        ps1.pose.orientation = Quaternion(*q_st_new)

        self.aul.update_frame(ps2)
        ps1.header.stamp = rospy.Time.now()
        self.tfl.waitForTransform(ps1.header.frame_id, 'lh_utility_frame',
                                  rospy.Time.now(), rospy.Duration(3.0))
        ps1_in_ps2 = self.tfl.transformPose('lh_utility_frame', ps1)
        ang = math.atan2(ps1_in_ps2.pose.position.z,
                         ps1_in_ps2.pose.position.y) + (math.pi / 2)

        q_st_rot = transformations.quaternion_about_axis(ang, (1, 0, 0))
        q_st_new = transformations.quaternion_multiply([
            ps2.pose.orientation.x, ps2.pose.orientation.y,
            ps2.pose.orientation.z, ps2.pose.orientation.w
        ], q_st_rot)
        ps2.pose.orientation = Quaternion(*q_st_new)
        return ps1, ps2
    def __init__(self, full_dof=False):
        # Waypoint set
        self._waypoints = None

        # True if the path is generated for all degrees of freedom, otherwise
        # the path will be generated for (x, y, z, yaw) only
        self._is_full_dof = full_dof

        # The parametric variable to use as input for the interpolator
        self._s = list()
        self._segment_to_wp_map = list()
        self._cur_s = 0
        self._s_step = 0.0001

        self._start_time = None
        self._duration = None

        self._termination_by_time = True

        self._final_pos_tolerance = 0.1

        self._init_rot = quaternion_about_axis(0.0, [0, 0, 1])
        self._last_rot = quaternion_about_axis(0.0, [0, 0, 1])

        self._markers_msg = MarkerArray()
        self._marker_id = 0
示例#4
0
    def _compute_rot_quat(self, dx, dy, dz):
        rotq = quaternion_about_axis(
            np.arctan2(dy, dx),
            [0, 0, 1])

        if self._is_full_dof:
            rote = quaternion_about_axis(
                -1 * np.arctan2(dz, np.sqrt(dx**2 + dy**2)),
                [0, 1, 0])
            rotq = quaternion_multiply(rotq, rote)
        return rotq
示例#5
0
	def test_to_axis_angle(self):
		for _ in xrange(1000):
			axis = tft.random_vector(3)
			axis = axis / np.linalg.norm(axis)
			for angle in list(np.linspace(-pi, pi, 10)) + [0]:
			
				q = tft.quaternion_about_axis(angle, axis)
				axis2,angle2 = tb_angles(q).axis_angle
				q2 = tft.quaternion_about_axis(angle2, axis2)
				
				self.assertTrue(np.allclose(q, q2) or np.allclose(-q, q2),msg="axis %s and angle %f don't match %s, %f" % (tuple(axis),angle,tuple(axis2),angle2))
示例#6
0
def euler_to_quat_deg(roll, pitch, yaw):
    roll = roll * (math.pi / 180.0)
    pitch = pitch * (math.pi / 180.0)
    yaw = yaw * (math.pi / 180.0)
    xaxis = (1, 0, 0)
    yaxis = (0, 1, 0)
    zaxis = (0, 0, 1)
    qx = transformations.quaternion_about_axis(roll, xaxis)
    qy = transformations.quaternion_about_axis(pitch, yaxis)
    qz = transformations.quaternion_about_axis(yaw, zaxis)
    q = transformations.quaternion_multiply(qx, qy)
    q = transformations.quaternion_multiply(q, qz)
    print q
示例#7
0
    def _rotate(self, start_odometry, angle, reverse=False):
        logger.infof('start _rotate, angle={}', angle)

        start_qt = start_odometry.pose.pose.orientation
        target_zrot = euler_from_quaternion(
            quaternion_multiply(
                [start_qt.x, start_qt.y, start_qt.z, start_qt.w],
                quaternion_about_axis(angle, [0, 0, 1])))[2]

        twist = Twist()
        twist.angular.z = self.__params['turtlebot3']['polygon']['velocities'][
            'z']
        if reverse:
            twist.angular.z *= -1

        r = rospy.Rate(self.__params['turtlebot3']['rate_hz'])
        rot_threshold = self.__params['turtlebot3']['polygon']['thresholds'][
            'angular_rad']
        while not rospy.is_shutdown() and self.__is_moving:
            current_qt = self.__current_odometry.pose.pose.orientation
            current_zrot = euler_from_quaternion(
                [current_qt.x, current_qt.y, current_qt.z, current_qt.w])[2]
            if abs(current_zrot - target_zrot) < rot_threshold:
                break
            else:
                self.__turtlebot3_cmd_pub.publish(twist)
            r.sleep()

        self.__turtlebot3_cmd_pub.publish(Twist())
        logger.infof('end _rotate')
示例#8
0
    def __init__(self, plugin):
        super(PoseViewWidget, self).__init__()
        rp = rospkg.RosPack()
        ui_file = os.path.join(rp.get_path('rqt_pose_view'), 'resource',
                               'PoseViewWidget.ui')
        loadUi(ui_file, self)
        self._plugin = plugin

        self._position = (0.0, 0.0, 0.0)
        self._orientation = quaternion_about_axis(0.0, (1.0, 0.0, 0.0))
        self._topic_name = None
        self._subscriber = None

        # create GL view
        self._gl_view = GLWidget()
        self._gl_view.setAcceptDrops(True)

        # backup and replace original paint method
        self._gl_view.paintGL_original = self._gl_view.paintGL
        self._gl_view.paintGL = self._gl_view_paintGL

        # backup and replace original mouse release method
        self._gl_view.mouseReleaseEvent_original = self._gl_view.mouseReleaseEvent
        self._gl_view.mouseReleaseEvent = self._gl_view_mouseReleaseEvent

        # add GL view to widget layout
        self.layout().addWidget(self._gl_view)

        # init and start update timer with 40ms (25fps)
        self._update_timer = QTimer(self)
        self._update_timer.timeout.connect(self.update_timeout)
        self._update_timer.start(40)
示例#9
0
def aim_frame_to(target_pt, point_dir=(1,0,0)):
    goal_dir = np.array([target_pt.x, target_pt.y, target_pt.z])
    goal_norm = np.divide(goal_dir, np.linalg.norm(goal_dir))
    point_norm = np.divide(point_dir, np.linalg.norm(point_dir))
    axis = np.cross(point_norm, goal_norm)
    angle = np.arccos(np.vdot(goal_norm, point_norm))
    return tft.quaternion_about_axis(angle, axis)
    def cluster(self):
        if not self.hanging:
            self.hacky()
        data = np.array(self.detections)
        separators = []
        old_frame_id = self.knowrob.get_perceived_frame_id(
            self.current_floor_id)
        if len(data) == 0:
            rospy.logwarn('no separators detected')
        else:
            clusters = DBSCAN(eps=self.max_dist,
                              min_samples=self.min_samples).fit(data)
            labels = np.unique(clusters.labels_)
            rospy.loginfo('detected {} separators'.format(len(labels)))
            for i, label in enumerate(labels):
                if label != -1:
                    separator = PoseStamped()
                    separator.header.frame_id = old_frame_id
                    separator.pose.position = Point(*self.cluster_to_separator(
                        data[clusters.labels_ == label]))
                    separator.pose.orientation = Quaternion(
                        *quaternion_about_axis(-np.pi / 2, [0, 0, 1]))
                    if 0.0 <= separator.pose.position.x and separator.pose.position.x <= 1:
                        separators.append(separator)

        return separators
示例#11
0
def draw_axes(pub, id, ns, pose_stamped, scale=(0.05, 0.1, 0.1), text=""):
    ps = PointStamped()
    ps.header = pose_stamped.header
    ps.point = pose_stamped.pose.position
    q_x = quaternion_to_array(pose_stamped.pose.orientation)
    q_y = quaternion_multiply(q_x, quaternion_about_axis(pi / 2, (0, 1, 0)))
    q_z = quaternion_multiply(q_x, quaternion_about_axis(pi / 2, (0, 0, 1)))
    if id > 999:
        rospy.logerr('Currently can\'t visualize markers with id > 999.')
        return
    place_arrow(ps, pub, id, ns, (1, 0, 0),\
            scale, array_to_quaternion(q_x))
    place_arrow(ps, pub, id + 1000, ns, (0, 1, 0),\
            scale, array_to_quaternion(q_y))
    place_arrow(ps, pub, id + 2000, ns, (0, 0, 1),\
            scale, array_to_quaternion(q_z), text=text)
示例#12
0
def aim_pose_to(ps, pts, point_dir=(1, 0, 0)):
    if not (ps.header.frame_id.lstrip('/') == pts.header.frame_id.lstrip('/')):
        rospy.logerr("[Pose_Utils.aim_pose_to]:" +
                     "Pose and point must be in same frame: %s, %s" %
                     (ps.header.frame_id, pt2.header.frame_id))
    target_pt = np.array((pts.point.x, pts.point.y, pts.point.z))
    base_pt = np.array(
        (ps.pose.position.x, ps.pose.position.y, ps.pose.position.z))
    base_quat = np.array((ps.pose.orientation.x, ps.pose.orientation.y,
                          ps.pose.orientation.z, ps.pose.orientation.w))

    b_to_t_vec = np.array(
        (target_pt[0] - base_pt[0], target_pt[1] - base_pt[1],
         target_pt[2] - base_pt[2]))
    b_to_t_norm = np.divide(b_to_t_vec, np.linalg.norm(b_to_t_vec))

    point_dir_hom = (point_dir[0], point_dir[1], point_dir[2], 1)
    base_rot_mat = tft.quaternion_matrix(base_quat)
    point_dir_hom = np.dot(point_dir_hom, base_rot_mat.T)
    point_dir = np.array((point_dir_hom[0] / point_dir_hom[3],
                          point_dir_hom[1] / point_dir_hom[3],
                          point_dir_hom[2] / point_dir_hom[3]))
    point_dir_norm = np.divide(point_dir, np.linalg.norm(point_dir))
    axis = np.cross(point_dir_norm, b_to_t_norm)
    angle = np.arccos(np.vdot(point_dir_norm, b_to_t_norm))
    quat = tft.quaternion_about_axis(angle, axis)
    new_quat = tft.quaternion_multiply(quat, base_quat)
    ps.pose.orientation = Quaternion(*new_quat)
示例#13
0
def aim_frame_to(target_pt, point_dir=(1, 0, 0)):
    goal_dir = np.array([target_pt.x, target_pt.y, target_pt.z])
    goal_norm = np.divide(goal_dir, np.linalg.norm(goal_dir))
    point_norm = np.divide(point_dir, np.linalg.norm(point_dir))
    axis = np.cross(point_norm, goal_norm)
    angle = np.arccos(np.vdot(goal_norm, point_norm))
    return tft.quaternion_about_axis(angle, axis)
示例#14
0
    def fk(self, link, joints):
        def value(joint):
            """Get joint value from joints, considering mimic joints"""
            if joint.mimic is None:
                return joints[joint.name]
            return joint.mimic.multiplier * value(joint.mimic.joint) + joint.mimic.offset

        def index(joint):
            """Get joint index (into self.active_joint) and the velocity scaling factor"""
            if joint.mimic is None:
                return next((i for i,j in enumerate(self.active_joints) if j is joint), None), 1.0
            idx, scale = index(joint.mimic.joint)
            return idx, joint.mimic.multiplier * scale

        T = numpy.eye(4)
        J = numpy.zeros((6, len(self.active_joints)))
        joint = self.links[link]
        while joint is not None:
            T_offset = joint.T  # fixed transform from parent to joint frame
            # TODO: combine with joint's motion transform (rotation / translation along joint axis)
            if joint.jtype == Joint.revolute:
                T_motion = tf.quaternion_matrix(tf.quaternion_about_axis(angle=value(joint), axis=joint.axis))
            elif joint.jtype == Joint.prismatic:
                T_motion = tf.translation_matrix(value(joint) * joint.axis)
            elif joint.jtype == Joint.fixed:
                pass
            else:
                raise Exception("unknown joint type: " + str(joint.jtype))
            # TODO: actually compute forward kinematics
            joint = self.links[joint.parent]
        return T, J
示例#15
0
    def __parser(self, msg: String):
        data = msg.data.split(',')
        imu = Imu()
        accel_x = float(data[0])
        accel_y = float(data[1])
        accel_z = float(data[2])
        gyro_x = float(data[3])
        gyro_y = float(data[4])
        gyro_z = float(data[5])
        accel = accel_x, accel_y, accel_z
        ref = np.array([0, 0, 1])
        acceln = accel / np.linalg.norm(accel)
        axis = np.cross(acceln, ref)
        angle = np.arccos(np.dot(acceln, ref))
        orientation = quaternion_about_axis(angle, axis)

        o = imu.orientation
        o.x, o.y, o.z, o.z = orientation
        imu.linear_acceleration.x = accel_x
        imu.linear_acceleration.y = accel_y
        imu.linear_acceleration.z = accel_z

        imu.angular_velocity.x = gyro_x
        imu.angular_velocity.y = gyro_y
        imu.angular_velocity.z = gyro_z

        imu.header.frame_id = "imu"
        imu.header.stamp = rospy.Time.now()
        self.pub_imu.publish(imu)
示例#16
0
    def __init__(self, plugin):
        super(Theta360ViewWidget, self).__init__()
        rp = rospkg.RosPack()
        ui_file = os.path.join(rp.get_path('theta_viewer'), 'resource',
                               'Theta360ViewWidget.ui')
        loadUi(ui_file, self)
        self.plugin = plugin

        self.position = (0.0, 0.0, 0.0)
        self.orientation = quaternion_about_axis(0.0, (1.0, 0.0, 0.0))
        self.topicName = None
        self.subscriber = None

        # create GL view
        self._glview = MyGLWidget()
        self._glview.setAcceptDrops(True)

        # backup and replace original paint method
        # self.glView.paintGL is callbacked from QGLWidget
        self._glview.paintGL_original = self._glview.paintGL
        self._glview.paintGL = self.glview_paintGL

        # backup and replace original mouse release method
        self._glview.mouseReleaseEvent_original = self._glview.mouseReleaseEvent
        self._glview.mouseReleaseEvent = self.glview_mouseReleaseEvent

        # add GL view to widget layout
        self.layout().addWidget(self._glview)

        # init and start update timer with 40ms (25fps)
        # updateTimeout is called with interval time
        self.update_timer = QTimer(self)
        self.update_timer.timeout.connect(self.update_timeout)
        self.update_timer.start(40)
        self.cnt = 0
示例#17
0
    def __init__(self,
                 full_dof=False,
                 use_finite_diff=True,
                 interpolation_method='cubic',
                 stamped_pose_only=False):
        """Class constructor."""
        self._logger = logging.getLogger('wp_trajectory_generator')
        out_hdlr = logging.StreamHandler(sys.stdout)
        out_hdlr.setFormatter(
            logging.Formatter(
                get_namespace().replace('/', '').upper() +
                ' -- %(asctime)s | %(levelname)s | %(module)s | %(message)s'))
        out_hdlr.setLevel(logging.INFO)
        self._logger.addHandler(out_hdlr)
        self._logger.setLevel(logging.INFO)

        self._path_generators = dict()
        self._logger.info('Waypoint interpolators available:')
        for gen in PathGenerator.get_all_generators():
            self._logger.info('\t - ' + gen.get_label())
            self._path_generators[gen.get_label()] = gen
            self._path_generators[gen.get_label()].set_full_dof(full_dof)
        # Time step between interpolated samples
        self._dt = None
        # Last time stamp
        self._last_t = None
        # Last interpolated point
        self._last_pnt = None
        self._this_pnt = None

        # Flag to generate only stamped pose, no velocity profiles
        self._stamped_pose_only = stamped_pose_only

        self._t_step = 0.001

        # Interpolation method
        self._interp_method = interpolation_method

        # True if the path is generated for all degrees of freedom, otherwise
        # the path will be generated for (x, y, z, yaw) only
        self._is_full_dof = full_dof

        # Use finite differentiation if true, otherwise use motion regression
        # algorithm
        self._use_finite_diff = use_finite_diff
        # Time window used for the regression method
        self._regression_window = 0.5
        # If the regression method is used, adjust the time step
        if not self._use_finite_diff:
            self._t_step = self._regression_window / 30

        # Flags to indicate that the interpolation process has started and
        # ended
        self._has_started = False
        self._has_ended = False

        # The parametric variable to use as input for the interpolator
        self._cur_s = 0

        self._init_rot = quaternion_about_axis(0.0, [0, 0, 1])
示例#18
0
 def move_absolute_xyz(self, frame_id, x, y, z, retry=True):
     target_pose = PoseStamped()
     target_pose.header.frame_id = frame_id
     target_pose.pose.position.x = x
     target_pose.pose.position.y = y
     target_pose.pose.orientation = Quaternion(*quaternion_about_axis(z, [0, 0, 1]))
     return self.move_absolute(target_pose, retry)
示例#19
0
def aim_pose_to(ps, pts, point_dir=(1,0,0)):
    if not (ps.header.frame_id.lstrip('/') == pts.header.frame_id.lstrip('/')):
        rospy.logerr("[Pose_Utils.aim_pose_to]:"+
                     "Pose and point must be in same frame: %s, %s"
                    %(ps.header.frame_id, pt2.header.frame_id))
    target_pt = np.array((pts.point.x, pts.point.y, pts.point.z))
    base_pt = np.array((ps.pose.position.x,
                        ps.pose.position.y,
                        ps.pose.position.z)) 
    base_quat = np.array((ps.pose.orientation.x, ps.pose.orientation.y,
                          ps.pose.orientation.z, ps.pose.orientation.w))

    b_to_t_vec = np.array((target_pt[0]-base_pt[0],
                           target_pt[1]-base_pt[1],
                           target_pt[2]-base_pt[2]))
    b_to_t_norm = np.divide(b_to_t_vec, np.linalg.norm(b_to_t_vec))

    point_dir_hom = (point_dir[0], point_dir[1], point_dir[2], 1)
    base_rot_mat = tft.quaternion_matrix(base_quat)
    point_dir_hom = np.dot(point_dir_hom, base_rot_mat.T)
    point_dir = np.array((point_dir_hom[0]/point_dir_hom[3],
                         point_dir_hom[1]/point_dir_hom[3],
                         point_dir_hom[2]/point_dir_hom[3]))
    point_dir_norm = np.divide(point_dir, np.linalg.norm(point_dir))
    axis = np.cross(point_dir_norm, b_to_t_norm)
    angle = np.arccos(np.vdot(point_dir_norm, b_to_t_norm))
    quat = tft.quaternion_about_axis(angle, axis)
    new_quat = tft.quaternion_multiply(quat, base_quat)
    ps.pose.orientation = Quaternion(*new_quat)
示例#20
0
def draw_axes(pub, id, ns, pose_stamped, scale=(0.05, 0.1, 0.1), text=""):
    ps = PointStamped()
    ps.header = pose_stamped.header
    ps.point = pose_stamped.pose.position
    q_x = quaternion_to_array(pose_stamped.pose.orientation)
    q_y = quaternion_multiply(q_x, quaternion_about_axis(pi/2, (0, 1, 0)))
    q_z = quaternion_multiply(q_x, quaternion_about_axis(pi/2, (0, 0, 1)))
    if id > 999:
        rospy.logerr('Currently can\'t visualize markers with id > 999.')
        return
    place_arrow(ps, pub, id, ns, (1, 0, 0),\
            scale, array_to_quaternion(q_x))
    place_arrow(ps, pub, id + 1000, ns, (0, 1, 0),\
            scale, array_to_quaternion(q_y))
    place_arrow(ps, pub, id + 2000, ns, (0, 0, 1),\
            scale, array_to_quaternion(q_z), text=text)
示例#21
0
    def __init__(self,
                 pose=TransformStamped(
                     header=Header(frame_id='panda_link8'),
                     child_frame_id='target',
                     transform=Transform(rotation=Quaternion(
                         *tf.quaternion_about_axis(numpy.pi / 4, [0, 0, 1])),
                                         translation=Vector3(0, 0, 0.105)))):

        self.robot = RobotModel()
        self.robot._add(Joint(pose))  # add a fixed end-effector transform
        self.pub = rospy.Publisher('/target_joint_states',
                                   JointState,
                                   queue_size=10)
        self.joint_msg = JointState()
        self.joint_msg.name = [j.name for j in self.robot.active_joints]
        self.joint_msg.position = numpy.asarray([
            (j.min + j.max) / 2 + 0.1 * (j.max - j.min) * random.uniform(0, 1)
            for j in self.robot.active_joints
        ])
        self.target_link = pose.child_frame_id
        self.T, self.J = self.robot.fk(
            self.target_link,
            dict(zip(self.joint_msg.name, self.joint_msg.position)))

        self.im_server = MyInteractiveMarkerServer("controller", self.T)
    def generate_quat(self, s):
        s = max(0, s)
        s = min(s, 1)

        last_s = s - self._s_step
        if last_s == 0:
            last_s = 0

        if s == 0:
            self._last_rot = deepcopy(self._init_rot)
            return self._init_rot

        this_pos = self.generate_pos(s)
        last_pos = self.generate_pos(last_s)
        dx = this_pos[0] - last_pos[0]
        dy = this_pos[1] - last_pos[1]
        dz = this_pos[2] - last_pos[2]

        rotq = self._compute_rot_quat(dx, dy, dz)
        self._last_rot = deepcopy(rotq)

        # Calculating the step for the heading offset
        q_step = quaternion_about_axis(
            self._interp_fcns['heading'](s),
            np.array([0, 0, 1]))
        # Adding the heading offset to the rotation quaternion
        rotq = quaternion_multiply(rotq, q_step)
        return rotq
    def __init__(self, plugin):
        super(Theta360ViewWidget, self).__init__()
        rp = rospkg.RosPack()
        ui_file = os.path.join(rp.get_path('theta_viewer'), 'resource', 'Theta360ViewWidget.ui')
        loadUi(ui_file, self)
        self.plugin = plugin

        self.position = (0.0, 0.0, 0.0)
        self.orientation = quaternion_about_axis(0.0, (1.0, 0.0, 0.0))
        self.topicName = None
        self.subscriber = None

        # create GL view
        self._glview = MyGLWidget()
        self._glview.setAcceptDrops(True)

        # backup and replace original paint method
        # self.glView.paintGL is callbacked from QGLWidget
        self._glview.paintGL_original = self._glview.paintGL
        self._glview.paintGL = self.glview_paintGL

        # backup and replace original mouse release method
        self._glview.mouseReleaseEvent_original = self._glview.mouseReleaseEvent
        self._glview.mouseReleaseEvent = self.glview_mouseReleaseEvent

        # add GL view to widget layout
        self.layout().addWidget(self._glview)

        # init and start update timer with 40ms (25fps)
        # updateTimeout is called with interval time
        self.update_timer = QTimer(self)
        self.update_timer.timeout.connect(self.update_timeout)
        self.update_timer.start(40)
        self.cnt = 0
示例#24
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
示例#25
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
示例#26
0
    def draw_car_heading(self):
        marker = Marker()
        marker.header.stamp = rospy.get_rostime()
        marker.header.frame_id = "map"
        marker.id = self.MARKER_HEADING
        marker.type = Marker.ARROW
        marker.action = Marker.ADD

        q = quaternion_about_axis(self.car_position.theta, (0, 0, 1))
        marker.pose.position.x = self.car_position.x
        marker.pose.position.y = self.car_position.y
        marker.pose.position.z = 0
        marker.pose.orientation.x = q[0]
        marker.pose.orientation.y = q[1]
        marker.pose.orientation.z = q[2]
        marker.pose.orientation.w = q[3]

        max_arrow_length = 5.0
        arrow_length = self.car_speed / \
            (self.MAX_SPEED - self.MIN_SPEED) * max_arrow_length

        marker.scale.x = arrow_length
        marker.scale.y = 0.10
        marker.scale.z = 0.10

        marker.color.r = 1.0
        marker.color.g = 0.0
        marker.color.b = 0.0
        marker.color.a = 1.0

        self.pub_visualiser.publish(marker)
示例#27
0
    def generate_quat(self, s):
        s = max(0, s)
        s = min(s, 1)

        if s == 0:
            self._last_rot = deepcopy(self._init_rot)
            return self._init_rot

        last_s = max(0, s - self._s_step)

        this_pos = self.generate_pos(s)
        last_pos = self.generate_pos(last_s)

        dx = this_pos[0] - last_pos[0]
        dy = this_pos[1] - last_pos[1]
        dz = this_pos[2] - last_pos[2]

        rotq = self._compute_rot_quat(dx, dy, dz)
        self._last_rot = rotq
        # Calculating the step for the heading offset
        q_step = quaternion_about_axis(self._interp_fcns['heading'](s),
                                       np.array([0, 0, 1]))
        # Adding the heading offset to the rotation quaternion
        rotq = quaternion_multiply(rotq, q_step)
        return rotq
示例#28
0
    def test_pacour(self, kitchen_setup):
        start = self.createStampedPose(0, 0, 0, orienation=-0.5)
        goal = self.createStampedPose(-4.0, -9, 0, orienation=-0.5)
        room_pose = self.createStampedPose(-2.35, -5.01, 0.1, orentationaxis=0)
        #person_pose = self.createStampedPose(0.57, -2.98, 0, orentationaxis=0)
        person_pose = self.createStampedPose(0.39, -4.55, 0, orentationaxis=0)
        person_pose_2 = self.createStampedPose(-1, -5.52, 0, orentationaxis=0)
        sofa_pose = self.createStampedPose(-0.96, -1.28, 0, orentationaxis=0)

        goalpose = PoseStamped()
        goalpose.pose.position = Point(-2.35, -5.01, 0.1)
        orient_axis = [1, 0, 0]
        orienation = 0.5
        goalpose.pose.orientation = Quaternion(*quaternion_about_axis(np.pi * orienation, orient_axis))
        goalpose.header.frame_id = "map"
        kitchen_setup.add_mesh(u'room',
                                path=u'package://suturo_resources/meshes/pacour/kitchen_dining/meshes/kitchen_dining_conv_edit.obj',
                                pose=goalpose)

        kitchen_setup.add_mesh(u'person_2',
                               path=u'package://suturo_resources/meshes/person_standing/meshes/standing.obj',
                               pose=person_pose_2)

        kitchen_setup.add_mesh(u'person',
                               path=u'package://suturo_resources/meshes/person_standing/meshes/standing.obj',
                               pose=person_pose)

        kitchen_setup.add_mesh(u'sofa',
                              path=u'package://suturo_resources/meshes/sofa/meshes/sofa_seat.obj',
                              pose=sofa_pose)

        self.full_coverage_avoidance(start, goal, kitchen_setup)
        np.testing.assert_almost_equal(0, 0)
示例#29
0
    def __init__(self, plugin):
        super(PoseViewWidget, self).__init__()
        rp = rospkg.RosPack()
        ui_file = os.path.join(rp.get_path('rqt_pose_view'), 'resource', 'PoseViewWidget.ui')
        loadUi(ui_file, self)
        self._plugin = plugin

        self._position = (0.0, 0.0, 0.0)
        self._orientation = quaternion_about_axis(0.0, (1.0, 0.0, 0.0))
        self._topic_name = None
        self._subscriber = None

        # create GL view
        self._gl_view = GLWidget()
        self._gl_view.setAcceptDrops(True)

        # backup and replace original paint method
        self._gl_view.paintGL_original = self._gl_view.paintGL
        self._gl_view.paintGL = self._gl_view_paintGL

        # backup and replace original mouse release method
        self._gl_view.mouseReleaseEvent_original = self._gl_view.mouseReleaseEvent
        self._gl_view.mouseReleaseEvent = self._gl_view_mouseReleaseEvent

        # add GL view to widget layout
        self.layout().addWidget(self._gl_view)

        # init and start update timer with 40ms (25fps)
        self._update_timer = QTimer(self)
        self._update_timer.timeout.connect(self.update_timeout)
        self._update_timer.start(40)
    def cluster(self, visualize=False):
        """
        :param visualize: whether or not the debug plut should be shown !this might result in a exception because pyplot does not like multithreading!
        :type visualize: bool
        :return: list of PoseStamped
        :rtype: list
        """
        data = np.array(self.detections)
        separators = []
        # old_frame_id = self.get_frame_id()
        if len(data) == 0:
            print_with_prefix('no separators detected', self.prefix)
        else:
            clusters = DBSCAN(eps=self.max_dist,
                              min_samples=self.min_samples).fit(data)
            labels = np.unique(clusters.labels_)
            print_with_prefix('detected {} separators'.format(len(labels)),
                              self.prefix)
            for i, label in enumerate(labels):
                if label != -1:
                    separator = PoseStamped()
                    separator.header.frame_id = 'map'
                    separator.pose.position = Point(*self.cluster_to_separator(
                        data[clusters.labels_ == label]))
                    separator.pose.orientation = Quaternion(
                        *quaternion_about_axis(-np.pi / 2, [0, 0, 1]))
                    separators.append(separator)

            if visualize:
                detections = np.array(self.detections)
                self.visualize_detections(clusters.labels_, detections,
                                          self.pose_list_to_np(separators))
        return separators
示例#31
0
    def test_2(self):
        angle = .42
        axis = [0, 0, 1]
        r1 = spw.rotation3_axis_angle(axis, angle)
        r1 = np.asarray(r1.tolist()).reshape(r1.shape)

        r_goal = quaternion_matrix(quaternion_about_axis(angle, axis))
        np.testing.assert_array_almost_equal(r1, r_goal)
示例#32
0
 def test_rotate_gripper(self, zero_pose):
     """
     :type zero_pose: HSR
     """
     r_goal = PoseStamped()
     r_goal.header.frame_id = zero_pose.tip
     r_goal.pose.orientation = Quaternion(*quaternion_about_axis(pi, [0, 0, 1]))
     zero_pose.set_and_check_cart_goal(r_goal, zero_pose.tip)
示例#33
0
 def pitch_down(self, angle):
     return self.set_orientation(
         transformations.quaternion_multiply(
             transformations.quaternion_about_axis(
                 angle,
                 self.zero_roll().left_vector),
             self.orientation,
         ))
    def _calc_teleop_reference(self):
        """
        Compute pose and velocity reference using the joystick linear and angular velocity input.
        """
        # Check if there is already a timestamp for the last received reference message
        # from the teleop node
        if self._last_teleop_update is None:
            self._is_teleop_active = False

        # Compute time step
        self._dt = rospy.get_time() - self._last_teleop_update

        # Compute the pose and velocity reference if the computed time step is positive and
        # the twist teleop message is valid
        if self._dt > 0 and self._teleop_vel_ref is not None and self._dt < 0.1:
            speed = np.sqrt(self._teleop_vel_ref.linear.x**2 +
                            self._teleop_vel_ref.linear.y**2)
            vel = np.array([
                self._teleop_vel_ref.linear.x, self._teleop_vel_ref.linear.y,
                self._teleop_vel_ref.linear.z
            ])
            # Cap the forward speed if needed
            if speed > self._max_forward_speed:
                vel[0] *= self._max_forward_speed / speed
                vel[1] *= self._max_forward_speed / speed

            vel = np.dot(self._vehicle_pose.rot_matrix, vel)

            # Compute pose step
            step = uuv_trajectory_generator.TrajectoryPoint()
            step.pos = np.dot(self._vehicle_pose.rot_matrix, vel * self._dt)
            step.rotq = quaternion_about_axis(
                self._teleop_vel_ref.angular.z * self._dt, [0, 0, 1])

            # Compute new reference
            ref_pnt = uuv_trajectory_generator.TrajectoryPoint()
            ref_pnt.pos = self._vehicle_pose.pos + step.pos

            ref_pnt.rotq = quaternion_multiply(self._vehicle_pose.rotq,
                                               step.rotq)

            # Cap the pose reference in Z to stay underwater
            if ref_pnt.z > 0:
                ref_pnt.z = 0.0
                ref_pnt.vel = [
                    vel[0], vel[1], 0, 0, 0, self._teleop_vel_ref.angular.z
                ]
            else:
                ref_pnt.vel = [
                    vel[0], vel[1], vel[2], 0, 0,
                    self._teleop_vel_ref.angular.z
                ]

            ref_pnt.acc = np.zeros(6)
        else:
            self._is_teleop_active = False
            ref_pnt = deepcopy(self._vehicle_pose)
        return ref_pnt
  def cb_master_state(self, msg):
    self.master_real_pos = np.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z])
    pos = np.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z]) - self.center_pos
    vel = np.array([msg.velocity.x, msg.velocity.y, msg.velocity.z])
    self.master_pos = self.change_axes(pos)
    self.master_vel = self.change_axes(vel)

    # Rotate tu use the same axes orientation between master and slave
    real_rot = np.array([msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w])
    q_1 = tr.quaternion_about_axis(self.angle_rotation_1, self.axes_rotation_1)
    aux_rot = tr.quaternion_multiply(q_1, real_rot)
    q_2 = tr.quaternion_about_axis(self.angle_rotation_2, self.axes_rotation_2)
    aux_rot_2 = tr.quaternion_multiply(q_2, aux_rot)
    q_3 = tr.quaternion_about_axis(self.angle_rotation_3, self.axes_rotation_3)
    self.master_rot = tr.quaternion_multiply(q_3, aux_rot_2)

    #Normalize velocitity
    self.master_dir = self.normalize_vector(self.master_vel)
示例#36
0
 def createStampedPose(self, x, y, z, orienation=0.5, orentationaxis=2):
     goalpose = PoseStamped()
     goalpose.pose.position = Point(x, y, z)
     orient_axis = [0, 0, 0]
     orient_axis[orentationaxis] += 1
     goalpose.pose.orientation = Quaternion(*quaternion_about_axis(np.pi * orienation, orient_axis))
     # goalpose.pose.orientation.z = 1
     goalpose.header.frame_id = "map"
     return goalpose
    def _compute_rot_quat(self, dx, dy, dz):
        if np.isclose(dx, 0) and np.isclose(dy, 0):
            rotq = self._last_rot
        else:
            heading = np.arctan2(dy, dx)
            rotq = quaternion_about_axis(heading, [0, 0, 1])

        if self._is_full_dof:
            rote = quaternion_about_axis(
                -1 * np.arctan2(dz, np.sqrt(dx**2 + dy**2)), [0, 1, 0])
            rotq = quaternion_multiply(rotq, rote)

        # Certify that the next quaternion remains in the same half hemisphere
        d_prod = np.dot(self._last_rot, rotq)
        if d_prod < 0:
            rotq *= -1

        return rotq
 def compute_transforms(self, link_names, joints, joint_values):
     all_transforms = tf.msg.tfMessage()
     # We start with the identity
     T = tf.transformations.translation_matrix([0, 0, 0])
     for i, l in enumerate(link_names):
         joint = self.robot.joint_map[self.robot.parent_map[l][0]]
         # print "Processing: %s %s %s" % (joint.axis, joint.name, l)
         T = T.dot(tft.translation_matrix(joint.origin.position))
         if joint.type != 'fixed':
             joint_value = joint_values.position[joint_values.name.index(
                 joint.name)]
             tft.quaternion_about_axis(joint_value, joint.axis)
             R = tft.quaternion_matrix(
                 tft.quaternion_about_axis(joint_value, joint.axis))
             T = T.dot(R)
         all_transforms.transforms.append(
             convert_to_message(T, l, 'world_link'))
     return all_transforms
 def _odom_callback(self, odom_data):
     vehicle_position_x = odom_data.pose.pose.position.x
     vehicle_position_y = odom_data.pose.pose.position.y
     vehicle_position_z = odom_data.pose.pose.position.z
     self._position = (vehicle_position_x, vehicle_position_y, vehicle_position_z)
     self._mapDrawer.set_position(self._position)
     self._yaw = odom_data.pose.pose.orientation.z
     self._orientation = quaternion_about_axis(math.radians(self._yaw), (0.0, 0.0, 1.0))
     self._mapDrawer.set_orientation(self._orientation,self._yaw)
    def __init__(self, full_dof=False, use_finite_diff=True,
                 interpolation_method='cubic',
                 stamped_pose_only=False):
        """Class constructor."""
        self._logger = logging.getLogger('wp_trajectory_generator')
        out_hdlr = logging.StreamHandler(sys.stdout)
        out_hdlr.setFormatter(logging.Formatter(
            get_namespace().replace('/', '').upper() + ' -- %(asctime)s | %(levelname)s | %(module)s | %(message)s'))
        out_hdlr.setLevel(logging.INFO)
        self._logger.addHandler(out_hdlr)
        self._logger.setLevel(logging.INFO)

        self._path_generators = dict()
        self._logger.info('Waypoint interpolators available:')
        for gen in PathGenerator.get_all_generators():
            self._logger.info('\t - ' + gen.get_label())
            self._path_generators[gen.get_label()] = gen
            self._path_generators[gen.get_label()].set_full_dof(full_dof)
        # Time step between interpolated samples
        self._dt = None
        # Last time stamp
        self._last_t = None
        # Last interpolated point
        self._last_pnt = None
        self._this_pnt = None

        # Flag to generate only stamped pose, no velocity profiles
        self._stamped_pose_only = stamped_pose_only

        self._t_step = 0.001

        # Interpolation method
        self._interp_method = interpolation_method

        # True if the path is generated for all degrees of freedom, otherwise
        # the path will be generated for (x, y, z, yaw) only
        self._is_full_dof = full_dof

        # Use finite differentiation if true, otherwise use motion regression
        # algorithm
        self._use_finite_diff = use_finite_diff
        # Time window used for the regression method
        self._regression_window = 0.5
        # If the regression method is used, adjust the time step
        if not self._use_finite_diff:
            self._t_step = self._regression_window / 30

        # Flags to indicate that the interpolation process has started and
        # ended
        self._has_started = False
        self._has_ended = False

        # The parametric variable to use as input for the interpolator
        self._cur_s = 0

        self._init_rot = quaternion_about_axis(0.0, [0, 0, 1])
示例#41
0
    def double_click(self, item, value):
        self.object_name = str(item.text(0))
        self.object = self.gui.found_objects[self.object_name]

        graspable_object = self.object.graspable_object

        # draw a bounding box around the selected object
        (box_pose, box_dims) = self.call_find_cluster_bounding_box(
            graspable_object.cluster)
        if box_pose == None:
            return

        box_mat = pose_to_mat(box_pose.pose)
        rospy.logerr("box_pose %f %f %f  q: %f %f %f %f",
                     box_pose.pose.position.x, box_pose.pose.position.y,
                     box_pose.pose.position.z, box_pose.pose.orientation.x,
                     box_pose.pose.orientation.y, box_pose.pose.orientation.z,
                     box_pose.pose.orientation.w)
        box_ranges = [[-box_dims.x / 2, -box_dims.y / 2, -box_dims.z / 2],
                      [box_dims.x / 2, box_dims.y / 2, box_dims.z / 2]]
        self.box_pose = box_pose

        self.draw_functions.draw_rviz_box(box_mat,
                                          box_ranges,
                                          '/world',
                                          ns='bounding box',
                                          color=[0, 0, 1],
                                          opaque=0.25,
                                          duration=60)

        # call the pickup service
        res = self.pickup(graspable_object, self.object.graspable_object_name,
                          self.object_name)

        if res == 0:  #correctly picked up
            #TODO: set up place_location
            executed_grasp = self.pickup_result.grasp
            initial_pose = PoseStamped()
            initial_pose.header.stamp = rospy.get_rostime()
            initial_pose.header.frame_id = "/world"
            initial_pose.pose.position.x = self.box_pose.pose.position.x + 0.1
            initial_pose.pose.position.y = self.box_pose.pose.position.y - 0.3
            initial_pose.pose.position.z = self.box_pose.pose.position.z - box_dims.z / 2  # graspable object is from bottom but bounding box is at center !
            q = transformations.quaternion_about_axis(-0.05, (0, 0, 1))
            initial_pose.pose.orientation.x = self.box_pose.pose.orientation.x  #q[0]
            initial_pose.pose.orientation.y = self.box_pose.pose.orientation.y  #q[1]
            initial_pose.pose.orientation.z = self.box_pose.pose.orientation.z  #q[2]
            initial_pose.pose.orientation.w = self.box_pose.pose.orientation.w  #q[3]

            self.list_of_poses = self.compute_list_of_poses(
                initial_pose, graspable_object, executed_grasp)
            #print "list of pose",self.list_of_poses

            self.place_object(graspable_object,
                              self.object.graspable_object_name,
                              self.object_name, self.list_of_poses)
示例#42
0
 def test_single_objects_sofa(self, kitchen_setup):
     start = self.createStampedPose(2.20, 0, 0)
     goal = self.createStampedPose(2.20, 4, 0)
     sofa_pose = self.createStampedPose(2.10, 1.93, 0, orentationaxis=0)
     sofa_pose.pose.orientation = Quaternion( *(utils.quaternion_multiply(quaternion_about_axis(np.pi * 0.5, [1, 0 , 0]), quaternion_about_axis(np.pi * -0.12, [0, 1, 0]))))
     kitchen_setup.add_mesh(u'hydrant',
                           path=u'package://suturo_resources/meshes/sofa/meshes/sofa_seat.obj',
                           pose=sofa_pose)
     self.full_coverage_avoidance(start, goal, kitchen_setup)
     np.testing.assert_almost_equal(0, 0)
示例#43
0
    def __init__(self, full_dof=False):
        # Waypoint set
        self._waypoints = None

        # True if the path is generated for all degrees of freedom, otherwise
        # the path will be generated for (x, y, z, yaw) only
        self._is_full_dof = full_dof

        # The parametric variable to use as input for the interpolator
        self._s = list()
        self._segment_to_wp_map = list()
        self._cur_s = 0
        self._s_step = 0.0001

        self._start_time = None
        self._duration = None

        self._init_rot = quaternion_about_axis(0.0, [0, 0, 1])
        self._last_rot = quaternion_about_axis(0.0, [0, 0, 1])
示例#44
0
    def rot_r_wrist(self, pt):
        out_pose = deepcopy(self.curr_state[1])
        q_r = transformations.quaternion_about_axis(
            -pt.x, (1, 0, 0))  #Hand frame roll (hand roll)
        q_p = transformations.quaternion_about_axis(
            -pt.y, (0, 1, 0))  #Hand frame pitch (wrist flex)
        q_h = transformations.quaternion_multiply(q_r, q_p)
        q_f = transformations.quaternion_about_axis(
            -pt.y, (1, 0, 0))  #Forearm frame rot (forearm roll)

        if pt.x or pt.y:
            self.tf.waitForTransform(out_pose.header.frame_id,
                                     'r_wrist_roll_link',
                                     out_pose.header.stamp,
                                     rospy.Duration(3.0))
            hand_pose = self.tf.transformPose('r_wrist_roll_link', out_pose)
            q_hand_pose = (hand_pose.pose.orientation.x,
                           hand_pose.pose.orientation.y,
                           hand_pose.pose.orientation.z,
                           hand_pose.pose.orientation.w)
            q_h_rot = transformations.quaternion_multiply(
                q_h, hand_pose.pose.orientation)
            hand_pose.pose.orientation = Quaternion(*q_h_rot)
            out_pose = self.tf.transformPose(out_pose.header.frame_id,
                                             hand_pose)

        if pt.z:
            self.tf.waitForTransform(out_pose.header.frame_id,
                                     'r_forearm_roll_link',
                                     out_pose.header.stamp,
                                     rospy.Duration(3.0))
            hand_pose = self.tf.transformPose('r_forearm_roll_link', out_pose)
            q_hand_pose = (hand_pose.pose.orientation.x,
                           hand_pose.pose.orientation.y,
                           hand_pose.pose.orientation.z,
                           hand_pose.pose.orientation.w)
            q_f_rot = transformations.quaternion_multiply(
                q_f, hand_pose.pose.orientation)
            hand_pose.pose.orientation = Quaternion(*q_f_rot)
            out_pose = self.tf.transformPose(out_pose.header.frame_id,
                                             hand_pose)

        wrist_traj = self.build_trajectory(out_pose, arm=1)
示例#45
0
    def test_avoid_collision4(self, better_pose):
        """
        :type box_setup: PR2
        """
        better_pose.attach_cylinder(height=0.3,
                                    radius=0.025,
                                    frame_id=better_pose.gripper_tip,
                                    position=[0, 0, 0.13],
                                    orientation=[0, 0, 0, 1])
        p = PoseStamped()
        p.header.frame_id = better_pose.gripper_tip
        p.pose.position.x = 0
        p.pose.position.z = 0.25
        p.pose.position.y = 0.04
        p.pose.orientation = Quaternion(
            *quaternion_about_axis(np.pi / 2, [0, 1, 0]))
        better_pose.add_cylinder('fdown', [0.2, 0.01], p)
        p = PoseStamped()
        p.header.frame_id = better_pose.gripper_tip
        p.pose.position.x = 0
        p.pose.position.z = 0.25
        p.pose.position.y = -0.07
        p.pose.orientation = Quaternion(
            *quaternion_about_axis(np.pi / 2, [0, 1, 0]))
        better_pose.add_cylinder('fup', [0.2, 0.01], p)
        p = PoseStamped()
        p.header.frame_id = better_pose.gripper_tip
        p.pose.position.x = 0
        p.pose.position.z = 0.15
        p.pose.position.y = 0.07
        p.pose.orientation = Quaternion(
            *quaternion_about_axis(np.pi / 2, [0, 1, 0]))
        better_pose.add_cylinder('bdown', [0.2, 0.01], p)
        p = PoseStamped()
        p.header.frame_id = better_pose.gripper_tip
        p.pose.position.x = 0
        p.pose.position.z = 0.15
        p.pose.position.y = -0.04
        p.pose.orientation = Quaternion(
            *quaternion_about_axis(np.pi / 2, [0, 1, 0]))
        better_pose.add_cylinder('bup', [0.2, 0.01], p)

        better_pose.send_and_check_goal()
    def _compute_rot_quat(self, dx, dy, dz):
        if np.isclose(dx, 0) and np.isclose(dy, 0):
            rotq = self._last_rot
        else:
            heading = np.arctan2(dy, dx)
            rotq = quaternion_about_axis(heading, [0, 0, 1])

        if self._is_full_dof:
            rote = quaternion_about_axis(
                -1 * np.arctan2(dz, np.sqrt(dx**2 + dy**2)),
                [0, 1, 0])
            rotq = quaternion_multiply(rotq, rote)

        # Certify that the next quaternion remains in the same half hemisphere
        d_prod = np.dot(self._last_rot, rotq)
        if d_prod < 0:
            rotq *= -1

        return rotq
示例#47
0
文件: util.py 项目: oberzan/RIS2018
def quaternion_between(target, viewpoint):
    angle = angle_between((target.x - viewpoint.x, target.y - viewpoint.y, 0),
                          (1, 0, 0))
    if target.y < viewpoint.y:
        angle = -angle

    quaternion = quaternion_about_axis(angle, (0, 0, 1))
    quaternion_ros = Quaternion(quaternion[0], quaternion[1], quaternion[2],
                                quaternion[3])
    return quaternion_ros, quaternion
    def _getPokePose(self, pos, orient):

        print orient
        if numpy.dot(tft.quaternion_matrix(orient)[0:3,2], (0,0,1)) < 0:
            print "fliiping arm pose"
            orient = tft.quaternion_multiply(orient, (1,0,0,0))
        tilt_adjust = tft.quaternion_about_axis(self.tilt_angle[self.arm_using], (0,1,0))
        new_orient = tft.quaternion_multiply(orient, tilt_adjust)
        pos[2] += self.above #push above cm above table 
#DEBUG
        #print new_orient

        return (pos, orient, new_orient)
示例#49
0
    def rot_r_wrist(self, pt):
        out_pose = deepcopy(self.curr_state[1])
        q_r = transformations.quaternion_about_axis(-pt.x, (1, 0, 0))  # Hand frame roll (hand roll)
        q_p = transformations.quaternion_about_axis(-pt.y, (0, 1, 0))  # Hand frame pitch (wrist flex)
        q_h = transformations.quaternion_multiply(q_r, q_p)
        q_f = transformations.quaternion_about_axis(-pt.y, (1, 0, 0))  # Forearm frame rot (forearm roll)

        if pt.x or pt.y:
            self.tf.waitForTransform(
                out_pose.header.frame_id, "r_wrist_roll_link", out_pose.header.stamp, rospy.Duration(3.0)
            )
            hand_pose = self.tf.transformPose("r_wrist_roll_link", out_pose)
            q_hand_pose = (
                hand_pose.pose.orientation.x,
                hand_pose.pose.orientation.y,
                hand_pose.pose.orientation.z,
                hand_pose.pose.orientation.w,
            )
            q_h_rot = transformations.quaternion_multiply(q_h, hand_pose.pose.orientation)
            hand_pose.pose.orientation = Quaternion(*q_h_rot)
            out_pose = self.tf.transformPose(out_pose.header.frame_id, hand_pose)

        if pt.z:
            self.tf.waitForTransform(
                out_pose.header.frame_id, "r_forearm_roll_link", out_pose.header.stamp, rospy.Duration(3.0)
            )
            hand_pose = self.tf.transformPose("r_forearm_roll_link", out_pose)
            q_hand_pose = (
                hand_pose.pose.orientation.x,
                hand_pose.pose.orientation.y,
                hand_pose.pose.orientation.z,
                hand_pose.pose.orientation.w,
            )
            q_f_rot = transformations.quaternion_multiply(q_f, hand_pose.pose.orientation)
            hand_pose.pose.orientation = Quaternion(*q_f_rot)
            out_pose = self.tf.transformPose(out_pose.header.frame_id, hand_pose)

        wrist_traj = self.build_trajectory(out_pose, arm=1)
 def imu_cb(self, msg):
     q = (msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w)
     
     # N-W-U to E-N-U is simply a rotation of -90 about the Z axis.
     tform = quaternion_about_axis(math.pi/2, (0,0,1))
     enu = quaternion_multiply(q,tform)
     new_msg = PoseStamped()
     new_msg.header = msg.header
     new_msg.pose.position.x = 0
     new_msg.pose.position.y = 0
     new_msg.pose.position.z = 0
     new_msg.pose.orientation.x = enu[0]
     new_msg.pose.orientation.y = enu[1]
     new_msg.pose.orientation.z = enu[2]
     new_msg.pose.orientation.w = enu[3]
     self.publisher.publish(new_msg)
    def append_pose(self, pose, secs, nsecs):
        x = pose['position']['x']
        pose['position']['x'] = -pose['position']['y'] - 4.1 
        pose['position']['y'] = x - 3.75

        oldq = [pose['orientation']['x'], pose['orientation']['y'],
                pose['orientation']['z'], pose['orientation']['w']]
        rotq = quaternion_about_axis(1.57, [0,0,1])
        newq = quaternion_multiply(oldq, rotq)
        pose['orientation']['x'] = newq[0]
        pose['orientation']['y'] = newq[1]
        pose['orientation']['z'] = newq[2]
        pose['orientation']['w'] = newq[3]
        self.pose.append(pose)
        self.secs.append(secs)
        self.nsecs.append(nsecs)
def frameCallback( msg ):
    global counter, objects_name_list
    counter+=1

    if objects_name_list:
        for object_name in objects_name_list:
            int_marker = server.get(object_name)
            if int_marker:
                cur_pose = int_marker.pose
                quat = transformations.quaternion_about_axis(2 * 3.14159286 * (1.0 / 1000), (0,0,1))
                quat = transformations.quaternion_multiply([cur_pose.orientation.x, cur_pose.orientation.y, cur_pose.orientation.z, cur_pose.orientation.w], quat)
                cur_pose.orientation.x = quat[0]
                cur_pose.orientation.y = quat[1]
                cur_pose.orientation.z = quat[2]
                cur_pose.orientation.w = quat[3]
                server.setPose(int_marker.name, cur_pose);
        server.applyChanges()
  def publish_relative_frames2(self, data):
    tmp_dict = {}
    for segment in data.segments:
      if(segment.is_quaternion):
        tmp_dict[segment.id] = (segment.position,segment.quat,self.tr.fromTranslationRotation(segment.position,segment.quat))
      else:	  
        tmp_dict[segment.id] = (segment.position,segment.euler)
  
    for idx in tmp_dict.keys():
      p_idx = xsens_client.get_segment_parent_id(idx)
      child_data = tmp_dict[idx]
      if(p_idx==-1):
        continue
      elif(p_idx==0):
        helper = tft.quaternion_about_axis(1,(-1,0,0))
        new_quat = tft.quaternion_multiply(tft.quaternion_inverse(helper),child_data[1])#if(segment.is_quaternion): TODO Handle Euler
        #self.sendTransform(child_data[0],
        self.sendTransform(child_data[0],
            child_data[1],
            #(1.0,0,0,0),#FIXME
            rospy.Time.now(),
            xsens_client.get_segment_name(idx),
            self.ref_frame)
      elif(p_idx>0):
        
        parent_data = tmp_dict[p_idx]
	new_quat = tft.quaternion_multiply(tft.quaternion_inverse(parent_data[1]),child_data[1])
	tmp_trans = (parent_data[0][0] - child_data[0][0],parent_data[0][1] - child_data[0][1],parent_data[0][2] - child_data[0][2])
        new_trans = qv_mult(parent_data[1],tmp_trans)
        self.sendTransform(
	  new_trans,
	  new_quat,
	  rospy.Time.now(),
	  xsens_client.get_segment_name(idx),
	  xsens_client.get_segment_name(p_idx))

      else:
        parent_data = tmp_dict[p_idx]
        
        this_data = np.multiply(tft.inverse_matrix(child_data[2]) , parent_data[2])
        self.sendTransform(
	  tft.translation_from_matrix(this_data),
	  tft.quaternion_from_matrix(this_data),
	  rospy.Time.now(),
	  xsens_client.get_segment_name(idx),
	  xsens_client.get_segment_name(p_idx))
示例#54
0
 def publish_thread():
     try:
         while True:
             t = nh.get_time()
             tf_broadcaster.send_transform(TransformStamped(
                 header=Header(
                     stamp=t,
                     frame_id='/parent',
                 ),
                 child_frame_id='/child',
                 transform=Transform(
                     translation=Vector3(*[1, 2, 3]),
                     rotation=Quaternion(*transformations.quaternion_about_axis(t.to_sec(), [0, 0, 1])),
                 ),
             ))
             yield nh.sleep(0.1)
     except Exception:
         traceback.print_exc()
  def cb_master_state(self, msg):
    self.master_real_pos = np.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z])
    pos = np.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z]) - self.center_pos
    vel = np.array([msg.velocity.x, msg.velocity.y, msg.velocity.z])
    self.master_pos = self.change_axes(pos)
    self.master_vel = self.change_axes(vel)
    
    real_rot = np.array([msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w])

    #~ # Synchronize rotation axis
    if (self.angle_arot_1 == 0.0):
        aux_arot_1 = real_rot
    else:
        q_a1 = tr.quaternion_about_axis(self.angle_arot_1, self.axes_arot_1)
        aux_arot_1 = tr.quaternion_multiply(real_rot, q_a1)
    
    if (self.angle_arot_2 == 0.0):
        aux_arot_2 = aux_arot_1
    else:
        q_a2 = tr.quaternion_about_axis(self.angle_arot_2, self.axes_arot_2)
        aux_arot_2 = tr.quaternion_multiply(aux_arot_1, q_a2)

    if (self.angle_arot_3 == 0.0):
        aux_arot_3 = aux_arot_2
    else:
        q_a3 = tr.quaternion_about_axis(self.angle_arot_3, self.axes_arot_3)
        aux_arot_3 = tr.quaternion_multiply(aux_arot_2, q_a3)
        
    # Synchronize rotation movement
    if (self.angle_mrot_1 == 0.0):
        aux_mrot_1 = aux_arot_3
    else:
        q_m1 = tr.quaternion_about_axis(self.angle_mrot_1, self.axes_mrot_1)
        aux_mrot_1 = tr.quaternion_multiply(q_m1, aux_arot_3)
    
    if (self.angle_mrot_2 == 0.0):
        aux_mrot_2 = aux_mrot_1
    else:
        q_m2 = tr.quaternion_about_axis(self.angle_mrot_2, self.axes_mrot_2)
        aux_mrot_2 = tr.quaternion_multiply(q_m2, aux_mrot_1)

    if (self.angle_mrot_3 == 0.0):
        aux_mrot_3 = aux_mrot_2
    else:
        q_m3 = tr.quaternion_about_axis(self.angle_mrot_3, self.axes_mrot_3)
        aux_mrot_3 = tr.quaternion_multiply(q_m3, aux_mrot_2)    
    
    self.master_rot = aux_mrot_3


    #Normalize velocitity
    self.master_dir = self.normalize_vector(self.master_vel)
def callback_angle(data):
    # Compute the rotation quaternion: rotation about the z-axis.
    z_axis = (0, 0, 1)

    quaternion = transformations.quaternion_about_axis(
        angle=np.pi+np.pi*data.data, axis=z_axis)

    model_state.pose.orientation.x = quaternion[0]
    model_state.pose.orientation.y = quaternion[1]
    model_state.pose.orientation.z = quaternion[2]
    model_state.pose.orientation.w = quaternion[3]

    # In order to move the virtual camera on an arc
    global model_state
    model_state.pose.position.x = 1.9*math.cos(np.pi*data.data) + 0.48
    model_state.pose.position.y = 1.9*math.sin(np.pi*data.data) -0.6
    model_state.pose.position.z = 1.0

    move_camera()
    def message_callback(self, message, slot_paths):
        quaternion_slot_path = slot_paths[0]
        point_slot_path = slot_paths[1]

        if quaternion_slot_path is None:
            self._orientation = quaternion_about_axis(0.0, (1.0, 0.0, 0.0))
        else:
            orientation = message
            for slot_name in quaternion_slot_path:
                orientation = getattr(orientation, slot_name)
            self._orientation = (orientation.x, orientation.y, orientation.z, orientation.w)

        if point_slot_path is None:
            # if no point is given, set it to a fixed offset so the axes can be seen
            self._position = (2.0, 2.0, 2.0)
        else:
            position = message
            for slot_name in point_slot_path:
                position = getattr(position, slot_name)
            self._position = (position.x, position.y, position.z)
    def imu_cb(self, msg):
        try:
            (trans, rot) = self.listener.lookupTransform(msg.header.frame_id, '/base_footprint', rospy.Time(0))
        except (tf.LookupException, tf.ConnectivityException):
            rospy.logerr('Exception in TF lookup')
        q = (msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w)
        (r, p, y) = efq(q)
        
        # Raw Reading from the IMU:
        # 0 is north, and rotations in the CCW direction are positive.
        # I guess that means N-W-U
        rospy.loginfo("Raw: " + str(wrapTo360(180.0 * y / math.pi)))

        # N-W-U to E-N-U is simply a rotation of -90 about the Z axis.
        tform = quaternion_about_axis(math.pi/2, (0,0,1))
        enu = quaternion_multiply(q,tform)
        (r,p,y) = efq(enu)
        rospy.loginfo("ENU: " + str(radAndWrap(y)))

        y = (-y) + math.pi/2.0
        rospy.loginfo("NED: " + str(radAndWrap(y)))
    def test_april_tags_single_interval(self):
        #Setup the publisher and subscribers
        self.pub_april_tags = rospy.Publisher("visual_odometry_april_tags_node/april_tags", AprilTags, queue_size=1, latch=True)
        self.pub_wheels_cmd = rospy.Publisher("visual_odometry_april_tags_node/wheels_cmd", WheelsCmdStamped, queue_size=1, latch=True)

        # Wait for the forward_kinematics_node to finish starting up
        timeout = time.time()+5.0
        while (self.pub_wheels_cmd.get_num_connections() < 1 or self.pub_april_tags.get_num_connections() < 1) and \
                not rospy.is_shutdown() and not time.time()>timeout:
            rospy.sleep(0.1)

        # Publish a single wheels cmd, and two simple april tag messages
        msg_wheels_cmd = WheelsCmdStamped()
        msg_wheels_cmd.vel_left = 1
        msg_wheels_cmd.vel_right = 1
        self.pub_wheels_cmd.publish(msg_wheels_cmd)
        rospy.sleep(0.2)    #Wait so the tags come in the right order
        T1 = Transform()
        T2 = Transform()
        T1.translation.y = 2
        T2.translation.y = 2
        T2.rotation.x, T2.rotation.y, T2.rotation.z, T2.rotation.w = tr.quaternion_about_axis(-np.pi/2, (0,0,1))

        msg_tag1 = AprilTags()
        tag = TagDetection()
        tag.transform = T1
        msg_tag1.detections.append(tag)
        msg_tag1.header.stamp = rospy.Duration(0)
        self.pub_april_tags.publish(msg_tag1)
        rospy.sleep(0.2)    #Wait so the tags come in the right order
        msg_tag2 = AprilTags()
        msg_tag1.header.stamp = rospy.Duration(1)
        tag.transform = T2
        msg_tag1.detections.append(tag)
        self.pub_april_tags.publish(msg_tag1)

        # Wait 1 second for the file to be output
        rospy.sleep(3)
        res = np.genfromtxt(rospy.get_param("visual_odometry_april_tags_node/filename", ''))
        assert_almost_equal(res, np.array([1,1,1,np.pi/2, 2, 2]))
    def donot_test_vicon_learning_sample_calculation(self):
        self.setup_publisher_and_subscriber()

        # publish the first wheels_cmd
        msg_wheels_cmd = WheelsCmdStamped()
        msg_wheels_cmd.vel_left = 1
        msg_wheels_cmd.vel_right = 1
        self.pub_wheels_cmd.publish(msg_wheels_cmd)

        # publish a vicon pose
        msg_pose = PoseStamped()
        msg_pose.pose.position.x = 1
        msg_pose.pose.position.y = 1
        q = tr.quaternion_about_axis(np.pi/2,(0,0,1))
        for i, attr in enumerate(['x', 'y', 'z', 'w']):
            msg_pose.pose.orientation.__setattr__(attr, q[i])
        self.pub_pose.publish(msg_pose)

        # publish second wheels_cmd pose
        msg_wheels_cmd.header.stamp = rospy.Time(1)
        self.pub_wheels_cmd.publish(msg_wheels_cmd)

        # Wait for the samples to come back
        timeout = time.time()+10.0
        while not (self.msg_v_received and self.msg_theta_dot_received) and not rospy.is_shutdown() and not time.time()>timeout:
            rospy.sleep(0.1)

        # Notify if the timeout struck
        self.assertLess(time.time(),timeout,msg="WARNING: Timeout reached, no samples received from vicon_learning_node.")

        self.assertAlmostEqual(self.msg_v_sample.d_R, 1)
        self.assertAlmostEqual(self.msg_v_sample.d_L, 1)
        self.assertAlmostEqual(self.msg_v_sample.dt, 1)
        self.assertAlmostEqual(self.msg_v_sample.x_axis_pose_delta, -1)
        self.assertAlmostEqual(self.msg_v_sample.y_axis_pose_delta, 1)
        self.assertAlmostEqual(self.msg_v_sample.theta_angle_pose_delta, np.pi/2)
        self.assertAlmostEqual(self.msg_theta_dot_sample.d_R, 1)
        self.assertAlmostEqual(self.msg_theta_dot_sample.d_L, 1)
        self.assertAlmostEqual(self.msg_theta_dot_sample.dt, 1)
        self.assertAlmostEqual(self.msg_theta_dot_sample.theta_angle_pose_delta, np.pi/2)