def goal_diff(goal, transform):
    t1 = [
        goal.pose.position.x,
        goal.pose.position.y,
        goal.pose.position.z,
    ]
    t2 = [
        transform.transform.translation.x,
        transform.transform.translation.y,
        transform.transform.translation.z,
    ]
    rospy.loginfo('translation diff = {}m'.format(
        np.linalg.norm(np.array(t1) - np.array(t2))))
    q1 = [
        goal.pose.orientation.x,
        goal.pose.orientation.y,
        goal.pose.orientation.z,
        goal.pose.orientation.w,
    ]
    q2 = [
        transform.transform.rotation.x,
        transform.transform.rotation.y,
        transform.transform.rotation.z,
        transform.transform.rotation.w,
    ]
    q1 = np.array(q1)
    q2 = np.array(q2)
    angle1 = rotation_from_matrix(quaternion_matrix(q1))[0]
    angle2 = rotation_from_matrix(quaternion_matrix(q2))[0]
    rospy.loginfo('rotation diff = {} deg'.format(abs(angle1 - angle2)))
Пример #2
0
class MoveToState(State):
    def __init__(self, target, v):  # type: (List[ParkingSquare], ARCube, float) -> None
        super(MoveToState, self).__init__(outcomes=['ok'])
        self.v = v
        self.target = target
        self.twist_pub = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist, queue_size=10)
        self.target_pub = rospy.Publisher('/viz/moveto_target', PoseStamped, queue_size=1)
        self.odometry = SubscriberValue('/odom', Odometry)
        self.tf_listener = TransformListener()

    def execute(self, ud):
        while not rospy.is_shutdown():
            target = self.target()
            self.target_pub.publish(target)

            try:
                this_pose = PoseStamped()
                this_pose.header.frame_id = 'odom'
                this_pose.pose = self.odometry.value.pose.pose
                this_pose = self.tf_listener.transformPose('map', this_pose)
            except (tf.LookupException, tf.ExtrapolationException, tf.ConnectivityException), e:
                continue

            this_position = numpify(this_pose.pose.position)[0:2]
            target_position = numpify(target.pose.position)[0:2]

            if np.linalg.norm(this_position - target_position) < 0.01:
                self.twist_pub.publish(Twist())
                break

            target_angle = np.arctan2(target.pose.position.y - this_pose.pose.position.y, target.pose.position.x - this_pose.pose.position.x)
            this_angle, _, _ = transformations.rotation_from_matrix(transformations.quaternion_matrix(numpify(this_pose.pose.orientation)))

            t = Twist()
            t.linear.x = self.v
            t.angular.z = -4 * angle_diff(this_angle, target_angle)
            self.twist_pub.publish(t)

        while not rospy.is_shutdown():
            try:
                this_pose = PoseStamped()
                this_pose.header.frame_id = 'odom'
                this_pose.pose = self.odometry.value.pose.pose
                this_pose = self.tf_listener.transformPose('map', this_pose)
            except (tf.LookupException, tf.ExtrapolationException, tf.ConnectivityException), e:
                continue

            target_angle, _, _ = transformations.rotation_from_matrix(transformations.quaternion_matrix(numpify(self.target().pose.orientation)))
            this_angle, _, _ = transformations.rotation_from_matrix(transformations.quaternion_matrix(numpify(this_pose.pose.orientation)))

            if angle_diff(this_angle, target_angle) < 0.05:
                self.twist_pub.publish(Twist())
                return 'ok'

            t = Twist()
            t.angular.z = -2 * angle_diff(this_angle, target_angle)
            self.twist_pub.publish(t)
Пример #3
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
Пример #4
0
    def execute(self, ud):
        while not rospy.is_shutdown():
            target = self.target()
            self.target_pub.publish(target)

            try:
                this_pose = PoseStamped()
                this_pose.header.frame_id = 'odom'
                this_pose.pose = self.odometry.value.pose.pose
                this_pose = self.tf_listener.transformPose('map', this_pose)
            except (tf.LookupException, tf.ExtrapolationException, tf.ConnectivityException), e:
                continue

            this_position = numpify(this_pose.pose.position)[0:2]
            target_position = numpify(target.pose.position)[0:2]

            if np.linalg.norm(this_position - target_position) < 0.01:
                self.twist_pub.publish(Twist())
                break

            target_angle = np.arctan2(target.pose.position.y - this_pose.pose.position.y, target.pose.position.x - this_pose.pose.position.x)
            this_angle, _, _ = transformations.rotation_from_matrix(transformations.quaternion_matrix(numpify(this_pose.pose.orientation)))

            t = Twist()
            t.linear.x = self.v
            t.angular.z = -4 * angle_diff(this_angle, target_angle)
            self.twist_pub.publish(t)
Пример #5
0
    def get_start_guess(self):
        if 1:
            rod = matrix2rodrigues(self.base_cam.get_rotation())
            t = self.base_cam.get_translation()
            t.shape= (3,)
            rod.shape=(3,)
            return np.array(list(t)+list(rod),dtype=np.float)

        R = np.eye(4)
        R[:3,:3] = self.base_cam.get_rotation()

        angle, direction, point = rotation_from_matrix(R)
        q = quaternion_about_axis(angle,direction)
        #q = matrix2quaternion(R)
        if 1:
            R2 = rotation_matrix(angle, direction, point)
            #R2 = quaternion2matrix( q )
            try:
                assert np.allclose(R, R2)
            except:
                print
                print 'R'
                print R
                print 'R2'
                print R2
                raise

        C = self.base_cam.get_camcenter()
        result = list(C) + list(q)
        return result
def turnRadians(angle_radians, angular_vel, clockwise):
    twist = Twist()
    cmd_vel = rospy.Publisher("cmd_vel_mux/input/teleop", Twist, queue_size=10)
    listener = TransformListener()
    listener.waitForTransform("/base_link", "/odom", rospy.Time(0),
                              rospy.Duration(1))
    (start_t, start_r) = listener.lookupTransform("/base_link", "/odom",
                                                  rospy.Time(0))
    start_transform = t.concatenate_matrices(t.translation_matrix(start_t),
                                             t.quaternion_matrix(start_r))
    if clockwise:
        twist.angular.z = -abs(angular_vel)
    else:
        twist.angular.z = abs(angular_vel)
    rate = rospy.Rate(10)
    done = False
    while not done:
        cmd_vel.publish(twist)
        rate.sleep()
        (curr_t, curr_r) = listener.lookupTransform("/base_link", "/odom",
                                                    rospy.Time(0))
        current_transform = t.concatenate_matrices(
            t.translation_matrix(curr_t), t.quaternion_matrix(curr_r))
        relative = numpy.dot(t.inverse_matrix(start_transform),
                             current_transform)
        rot_moved, dire, point = t.rotation_from_matrix(relative)
        print("angle=%s, moved=%s,stop=%s" %
              (str(angle_radians), str(rot_moved),
               str(rot_moved / 2 > angle_radians)))
        if abs(rot_moved) > abs(angle_radians):
            done = True
            break

    return done, "Done!"
Пример #7
0
def pose_similarity(pose1, pose2):

    pose_transform = numpy.linalg.inv(pose1) * pose2
    position_distance = numpy.linalg.norm(numpy.array(pose_transform)[0:3, 3])

    angle, direction, point = rotation_from_matrix(pose_transform)

    return abs(angle), position_distance
def angle_diff_from_quaternions(q1, q2):
    tf1 = transformations.compose_matrix(
        angles=transformations.euler_from_quaternion(q1))
    tf2 = transformations.compose_matrix(
        angles=transformations.euler_from_quaternion(q2))
    angle, _, _ = transformations.rotation_from_matrix(
        np.dot(np.linalg.inv(tf1), tf2))
    return angle / np.pi * 180.
Пример #9
0
def pose_similarity(pose1,pose2):
    
    pose_transform = numpy.linalg.inv(pose1)*pose2
    position_distance = numpy.linalg.norm(numpy.array(pose_transform)[0:3,3])

    angle,direction,point = rotation_from_matrix(pose_transform)

    return abs(angle), position_distance
Пример #10
0
 def test_axis_angle_from_matrix2(self, expected_axis, expected_angle):
     m = rotation_matrix(expected_angle, expected_axis)
     actual_axis = w.compile_and_execute(
         lambda x: w.axis_angle_from_matrix(x)[0], [m])
     actual_angle = w.compile_and_execute(
         lambda x: w.axis_angle_from_matrix(x)[1], [m])
     expected_angle, expected_axis, _ = rotation_from_matrix(m)
     compare_axis_angle(actual_angle, actual_axis, expected_angle,
                        expected_axis)
Пример #11
0
def _inverse_tuples(t):
    trans, rot = t
    euler = tr.euler_from_quaternion(rot)
    m = tr.compose_matrix(translate=trans, angles=euler)
    m_inv = tr.inverse_matrix(m)
    trans_inv = tr.translation_from_matrix(m_inv)
    rot_inv = tr.rotation_matrix(*tr.rotation_from_matrix(m_inv))
    quat_inv = tr.quaternion_from_matrix(rot_inv)
    return (trans_inv, quat_inv)
Пример #12
0
    def cb(self, move_base_goal):
        move_base_goal = self.tf.transformPose('map',
                                               move_base_goal.target_pose)
        goal_js = [
            move_base_goal.pose.position.x, move_base_goal.pose.position.y,
            rotation_from_matrix(
                quaternion_matrix([
                    move_base_goal.pose.orientation.x,
                    move_base_goal.pose.orientation.y,
                    move_base_goal.pose.orientation.z,
                    move_base_goal.pose.orientation.w
                ]))[0]
        ]

        current_js = rospy.wait_for_message('whole_body_controller/state',
                                            JointTrajectoryControllerState)
        goal = FollowJointTrajectoryGoal()
        goal.trajectory.header = move_base_goal.header
        goal.trajectory.joint_names = current_js.joint_names

        p = JointTrajectoryPoint()
        p.time_from_start = rospy.Duration(0.1)
        p.positions = current_js.actual.positions
        goal.trajectory.points.append(p)
        last_p = p

        def next_p(goal_p, last_p, f):
            diff = (goal_p - last_p)
            if diff > 0:
                diff = min(diff, self.max_vel * f)
            else:
                diff = max(diff, -self.max_vel * f)
            return last_p + (diff)

        while not np.allclose(np.array(goal_js), p.positions[:3]):
            p = JointTrajectoryPoint()
            p.time_from_start = last_p.time_from_start + rospy.Duration(
                1 * self.freq)
            p.positions = list(current_js.actual.positions)

            p.positions[0] = next_p(goal_js[0], last_p.positions[0], self.freq)
            p.positions[1] = next_p(goal_js[1], last_p.positions[1], self.freq)
            p.positions[2] = next_p(goal_js[2], last_p.positions[2], self.freq)

            goal.trajectory.points.append(p)
            last_p = p

        self.client.send_goal(goal)
        while not self.client.wait_for_result(rospy.Duration(.1)):
            if self.server.is_preempt_requested():
                rospy.loginfo('new goal, cancel old one')
                self.client.cancel_all_goals()
                self.server.set_preempted(MoveBaseResult())
                break
        # result = self.client.get_result()
        # if result.error_code == result.SUCCESSFUL:
        self.server.set_succeeded(MoveBaseResult())
Пример #13
0
 def test_rotation_distance(self, q1, q2):
     m1 = quaternion_matrix(q1)
     m2 = quaternion_matrix(q2)
     actual_angle = w.compile_and_execute(w.rotation_distance, [m1, m2])
     expected_angle, _, _ = rotation_from_matrix(m1.T.dot(m2))
     expected_angle = expected_angle
     try:
         self.assertAlmostEqual(shortest_angular_distance(actual_angle, expected_angle), 0, places=3)
     except AssertionError:
         self.assertAlmostEqual(shortest_angular_distance(actual_angle, -expected_angle), 0, places=3)
Пример #14
0
 def test_axis_angle_from_rpy(self, roll, pitch, yaw):
     angle2, axis2, _ = rotation_from_matrix(euler_matrix(roll, pitch, yaw))
     axis = w.compile_and_execute(lambda r, p, y: w.axis_angle_from_rpy(r, p, y)[0], [roll, pitch, yaw])
     angle = w.compile_and_execute(lambda r, p, y: w.axis_angle_from_rpy(r, p, y)[1], [roll, pitch, yaw])
     if angle < 0:
         angle = -angle
         axis = [-x for x in axis]
     if angle2 < 0:
         angle2 = -angle2
         axis2 *= -1
     compare_axis_angle(angle, axis, angle2, axis2)
Пример #15
0
def robot_pose_cb(data):
    """
    :type data: PoseWithCovarianceStamped
    :return:
    """
    global robot_pose
    robot_pose = [data.pose.pose.position.x,
                  data.pose.pose.position.y,
                  rotation_from_matrix(quaternion_matrix([data.pose.pose.orientation.x,
                                                          data.pose.pose.orientation.y,
                                                          data.pose.pose.orientation.z,
                                                          data.pose.pose.orientation.w]))[0]]
Пример #16
0
def pcloud2ranges(point_cloud, tf_mat):
    angle, direc, point = rotation_from_matrix(tf_mat)
    R = rotation_matrix(angle, direc, point)
    rot_inv = R[np.ix_([0,1,2],[0,1,2])].transpose()

    t = translation_from_matrix(tf_mat)
    t_inv = rot_inv.dot(t)

    ranges = []
    for p in pc2.read_points(point_cloud, field_names = ("x", "y", "z"), skip_nans=True):
        p_part = rot_inv.dot(p) - t_inv
        ranges.append(np.linalg.norm(p_part[-2:]))

    return np.asarray(ranges)
Пример #17
0
 def move_base(self, goal_pose):
     goal_pose = transform_pose(self.default_root, goal_pose)
     js = {
         u'odom_x_joint':
         goal_pose.pose.position.x,
         u'odom_y_joint':
         goal_pose.pose.position.y,
         u'odom_z_joint':
         rotation_from_matrix(
             quaternion_matrix([
                 goal_pose.pose.orientation.x, goal_pose.pose.orientation.y,
                 goal_pose.pose.orientation.z, goal_pose.pose.orientation.w
             ]))[0]
     }
     self.send_and_check_joint_goal(js)
Пример #18
0
def cb(dt):
    pose = lookup_pose(odom, base_footprint)
    js = JointState()
    js.header.stamp = pose.header.stamp
    js.name = [x, y, z]
    js.position = [
        pose.pose.position.x, pose.pose.position.y,
        rotation_from_matrix(
            quaternion_matrix([
                pose.pose.orientation.x, pose.pose.orientation.y,
                pose.pose.orientation.z, pose.pose.orientation.w
            ]))[0]
    ]
    js.velocity = [0, 0, 0]
    js.effort = [0, 0, 0]
    pose_pub.publish(js)
Пример #19
0
    def execute(self, ud):
        while True:
            target_pose = self.target()
            self.target_pub.publish(target_pose)

            try:
                this_pose = PoseStamped()
                this_pose.header.frame_id = 'odom'
                this_pose.pose = self.odometry.value.pose.pose
                this_pose = self.tf_listener.transformPose('map', this_pose)
            except (tf.LookupException, tf.ExtrapolationException,
                    tf.ConnectivityException), e:
                continue

            # cube_offset = 0.18 + self.cube.cube_side_length/2 + 0.3 + 0.05 - .30 + 0.09 + 0.05  # TODO: remove magic numbers
            cube_offset = 0.41
            this_position = numpify(this_pose.pose.position)[0:2]
            cube_position = this_position + qv_mult(
                numpify(this_pose.pose.orientation),
                [1, 0, 0])[0:2] * cube_offset
            target_position = numpify(target_pose.pose.position)[0:2]

            target_position_pub = PoseStamped()
            target_position_pub.header.frame_id = 'map'
            target_position_pub.pose.position.x = target_position[0]
            target_position_pub.pose.position.y = target_position[1]
            target_position_pub.pose.orientation.w = 1
            self.cube_pub.publish(target_position_pub)

            # print(target_position - this_position, target_position - cube_position)
            if (np.dot(target_position - this_position,
                       target_position - cube_position)) <= 0:
                self.twist_pub.publish(Twist())
                return 'ok'

            target_angle = np.arctan2(
                target_pose.pose.position.y - this_pose.pose.position.y,
                target_pose.pose.position.x - this_pose.pose.position.x)
            this_angle, _, _ = transformations.rotation_from_matrix(
                transformations.quaternion_matrix(
                    numpify(this_pose.pose.orientation)))

            t = Twist()
            t.linear.x = self.v
            t.angular.z = -2 * angle_diff(this_angle, target_angle)
            self.twist_pub.publish(t)
Пример #20
0
 def test_rotation_distance(self, q1, q2):
     # assume(angle > 0.0001)
     # assume(angle < np.pi - 0.0001)
     m1 = quaternion_matrix(q1)
     m2 = quaternion_matrix(q2)
     angle = speed_up_and_execute(spw.rotation_distance, [m1, m2])[0]
     ref_angle, _, _ = rotation_from_matrix(m1.T.dot(m2))
     # axis2, angle2 = spw.diffable_axis_angle_from_matrix(spw.rotation_matrix_from_axis_angle(axis, angle))
     # angle = float(angle)
     # axis2 = np.array(axis2).astype(float).T[0]
     if angle < 0:
         angle = -angle
         # axis = [-x for x in axis]
     if ref_angle < 0:
         ref_angle = -ref_angle
         # axis2 *= -1
     self.assertAlmostEqual(angle, ref_angle, msg='{} != {}'.format(angle, ref_angle))
Пример #21
0
 def teleport_base(self, goal_pose):
     goal_pose = transform_pose(self.default_root, goal_pose)
     js = {
         u'odom_x_joint':
         goal_pose.pose.position.x,
         u'odom_y_joint':
         goal_pose.pose.position.y,
         u'odom_z_joint':
         rotation_from_matrix(
             quaternion_matrix([
                 goal_pose.pose.orientation.x, goal_pose.pose.orientation.y,
                 goal_pose.pose.orientation.z, goal_pose.pose.orientation.w
             ]))[0]
     }
     goal = SetJointStateRequest()
     goal.state = dict_to_joint_states(js)
     self.set_base.call(goal)
Пример #22
0
 def test_axis_angle_from_matrix_stable(self, axis, angle):
     assume(angle < np.pi - 0.0001)
     m = rotation_matrix(angle, axis)
     axis2 = w.compile_and_execute(lambda x: w.axis_angle_from_matrix(x)[0],
                                   [m])
     angle2 = w.compile_and_execute(
         lambda x: w.axis_angle_from_matrix(x)[1], [m])
     angle, axis, _ = rotation_from_matrix(m)
     if angle < 0:
         angle = -angle
         axis = [-x for x in axis]
     if angle2 < 0:
         angle2 = -angle2
         axis2 *= -1
     self.assertTrue(np.isclose(angle, angle2),
                     msg='{} != {}'.format(angle, angle2))
     self.assertTrue(np.isclose(axis, axis2).all(),
                     msg='{} != {}'.format(axis, axis2))
Пример #23
0
def call_back(pose_stamped):
    """
    :type pose_stamped: PoseStamped
    """
    rospy.loginfo(u'received simple move base goal')
    goal = transform_pose(root, pose_stamped)
    js = {
        x_joint:
        goal.pose.position.x,
        y_joint:
        goal.pose.position.y,
        z_joint:
        rotation_from_matrix(
            quaternion_matrix([
                goal.pose.orientation.x, goal.pose.orientation.y,
                goal.pose.orientation.z, goal.pose.orientation.w
            ]))[0]
    }
    giskard.set_joint_goal(js)
    giskard.plan_and_execute(wait=False)
Пример #24
0
def odom_cb(data):
    """
    :type data: Odometry
    :return:
    """
    global robot_pose
    js = JointState()
    js.header = data.header
    js.name = ['odom_x', 'odom_y', 'odom_t']
    js.position = [
        data.pose.pose.position.x, data.pose.pose.position.y,
        rotation_from_matrix(
            quaternion_matrix([
                data.pose.pose.orientation.x, data.pose.pose.orientation.y,
                data.pose.pose.orientation.z, data.pose.pose.orientation.w
            ]))[0]
    ]
    js.velocity = [
        data.twist.twist.linear.x, data.twist.twist.linear.y,
        data.twist.twist.angular.z
    ]
    js.effort = [0, 0, 0]
    js_pub.publish(js)
Пример #25
0
def matrix_to_axis_angle(rmat):
    rmat = np.column_stack((rmat, np.matrix([0,0,0]).T))
    rmat = np.row_stack((rmat, np.matrix([0,0,0,1])))
    ang, direc, point = tft.rotation_from_matrix(rmat)
#    print 'point:', point
    return ang, direc
    def _pose_sending_func(self):
        """
        Continually sends the desired pose to the controller.
        """
        r = rospy.Rate(100)
        while not rospy.is_shutdown():
            try:
                r.sleep()
            except rospy.ROSInterruptException:
                break

            # get the desired pose as a transform matrix
            with self._state_lock:
                ps_des = copy.deepcopy(self._desired_pose_stamped)
                delta_dist = self._delta_dist

            if ps_des == None:
                # no desired pose to publish
                continue

            # convert desired pose to transform matrix from goal frame to CONTROLLER_FRAME
            self._transform_listener.waitForTransform(
                CONTROLLER_FRAME, ps_des.header.frame_id, rospy.Time(0), rospy.Duration(4.0)
            )
            # trans, rot = self._transform_listener.lookupTransform(CONTROLLER_FRAME, ps_des.header.frame_id, rospy.Time(0))
            # torso_H_wrist_des = geom.trans_rot_to_hmat(trans, rot)

            ps_des.header.stamp = rospy.Time(0)
            ps_des = self._transform_listener.transformPose(CONTROLLER_FRAME, ps_des)
            torso_H_wrist_des = conversions.pose_to_mat(ps_des.pose)

            if self.br is not None:
                p = ps_des.pose.position
                q = ps_des.pose.orientation
                self.br.sendTransform(
                    (p.x, p.y, p.z), (q.x, q.y, q.z, q.w), rospy.Time.now(), "/cci_goal", CONTROLLER_FRAME
                )

            # get the current pose as a transform matrix
            goal_link = self._hand_description.hand_frame

            # really shouldn't need to do waitForTransform here, since rospy.Time(0) means "use the most
            # recent available transform". but without this, tf sometimes raises an ExtrapolationException. -jdb
            self._transform_listener.waitForTransform(CONTROLLER_FRAME, goal_link, rospy.Time(0), rospy.Duration(4.0))
            trans, rot = self._transform_listener.lookupTransform(CONTROLLER_FRAME, goal_link, rospy.Time(0))
            torso_H_wrist = geom.trans_rot_to_hmat(trans, rot)

            # compute difference between actual and desired wrist pose
            wrist_H_wrist_des = np.dot(linalg.inv(torso_H_wrist), torso_H_wrist_des)
            trans_err, rot_err = geom.hmat_to_trans_rot(wrist_H_wrist_des)

            # scale the relative transform such that the translation is equal
            # to delta_dist. we're assuming that the scaled angular error will
            # be reasonable
            scale_factor = delta_dist / linalg.norm(trans_err)
            if scale_factor > 1.0:
                scale_factor = 1.0  # don't overshoot
            scaled_trans_err = scale_factor * trans_err
            angle_err, direc, point = transformations.rotation_from_matrix(wrist_H_wrist_des)
            scaled_angle_err = scale_factor * angle_err
            wrist_H_wrist_control = np.dot(
                transformations.translation_matrix(scaled_trans_err),
                transformations.rotation_matrix(scaled_angle_err, direc, point),
            )

            # find incrementally moved pose to send to the controller
            torso_H_wrist_control = np.dot(torso_H_wrist, wrist_H_wrist_control)
            desired_pose = conversions.mat_to_pose(torso_H_wrist_control)
            desired_ps = PoseStamped()
            desired_ps.header.stamp = rospy.Time(0)
            desired_ps.header.frame_id = CONTROLLER_FRAME
            desired_ps.pose = desired_pose

            if self.br is not None:
                p = desired_ps.pose.position
                q = desired_ps.pose.orientation
                self.br.sendTransform(
                    (p.x, p.y, p.z), (q.x, q.y, q.z, q.w), rospy.Time.now(), "/cci_imm_goal", CONTROLLER_FRAME
                )

            # send incremental pose to controller
            self._desired_pose_pub.publish(desired_ps)
Пример #27
0
    def _pose_sending_func(self):
        '''
        Continually sends the desired pose to the controller.
        '''
        r = rospy.Rate(100)
        while not rospy.is_shutdown():
            try:
                r.sleep()
            except rospy.ROSInterruptException:
                break

            # get the desired pose as a transform matrix
            with self._state_lock:
                ps_des = copy.deepcopy(self._desired_pose_stamped)
                delta_dist = self._delta_dist

            if ps_des == None:
                # no desired pose to publish
                continue

            # convert desired pose to transform matrix from goal frame to CONTROLLER_FRAME
            self._transform_listener.waitForTransform(CONTROLLER_FRAME,
                                                      ps_des.header.frame_id,
                                                      rospy.Time(0),
                                                      rospy.Duration(4.0))
            #trans, rot = self._transform_listener.lookupTransform(CONTROLLER_FRAME, ps_des.header.frame_id, rospy.Time(0))
            #torso_H_wrist_des = geom.trans_rot_to_hmat(trans, rot)

            ps_des.header.stamp = rospy.Time(0)
            ps_des = self._transform_listener.transformPose(
                CONTROLLER_FRAME, ps_des)
            torso_H_wrist_des = conversions.pose_to_mat(ps_des.pose)

            if self.br is not None:
                p = ps_des.pose.position
                q = ps_des.pose.orientation
                self.br.sendTransform((p.x, p.y, p.z), (q.x, q.y, q.z, q.w),
                                      rospy.Time.now(), '/cci_goal',
                                      CONTROLLER_FRAME)

            # get the current pose as a transform matrix
            goal_link = self._hand_description.hand_frame

            # really shouldn't need to do waitForTransform here, since rospy.Time(0) means "use the most
            # recent available transform". but without this, tf sometimes raises an ExtrapolationException. -jdb
            self._transform_listener.waitForTransform(CONTROLLER_FRAME,
                                                      goal_link, rospy.Time(0),
                                                      rospy.Duration(4.0))
            trans, rot = self._transform_listener.lookupTransform(
                CONTROLLER_FRAME, goal_link, rospy.Time(0))
            torso_H_wrist = geom.trans_rot_to_hmat(trans, rot)

            # compute difference between actual and desired wrist pose
            wrist_H_wrist_des = np.dot(linalg.inv(torso_H_wrist),
                                       torso_H_wrist_des)
            trans_err, rot_err = geom.hmat_to_trans_rot(wrist_H_wrist_des)

            # scale the relative transform such that the translation is equal
            # to delta_dist. we're assuming that the scaled angular error will
            # be reasonable
            scale_factor = delta_dist / linalg.norm(trans_err)
            if scale_factor > 1.0:
                scale_factor = 1.0  # don't overshoot
            scaled_trans_err = scale_factor * trans_err
            angle_err, direc, point = transformations.rotation_from_matrix(
                wrist_H_wrist_des)
            scaled_angle_err = scale_factor * angle_err
            wrist_H_wrist_control = np.dot(
                transformations.translation_matrix(scaled_trans_err),
                transformations.rotation_matrix(scaled_angle_err, direc,
                                                point))

            # find incrementally moved pose to send to the controller
            torso_H_wrist_control = np.dot(torso_H_wrist,
                                           wrist_H_wrist_control)
            desired_pose = conversions.mat_to_pose(torso_H_wrist_control)
            desired_ps = PoseStamped()
            desired_ps.header.stamp = rospy.Time(0)
            desired_ps.header.frame_id = CONTROLLER_FRAME
            desired_ps.pose = desired_pose

            if self.br is not None:
                p = desired_ps.pose.position
                q = desired_ps.pose.orientation
                self.br.sendTransform((p.x, p.y, p.z), (q.x, q.y, q.z, q.w),
                                      rospy.Time.now(), '/cci_imm_goal',
                                      CONTROLLER_FRAME)

            # send incremental pose to controller
            self._desired_pose_pub.publish(desired_ps)
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            continue

    raw_input("Plug in the new camera and press any key")

    rospy.loginfo("Listening for transform from {} to {}...".format(CAMERA_FRAME_NAME, BOARD_FRAME_NAME))

    while not rospy.is_shutdown():
        try:

            translation, rotation = tf_listener.lookupTransform(CAMERA_FRAME_NAME, BOARD_FRAME_NAME, rospy.Time(0))
            camera_to_board_matrix = np.dot(transformations.translation_matrix(translation), transformations.quaternion_matrix(rotation))
            root_to_camera_matrix = np.dot(root_to_board_matrix, np.linalg.inv(camera_to_board_matrix))

            translation = transformations.translation_from_matrix(root_to_camera_matrix)
            rotation = transformations.quaternion_about_axis(transformations.rotation_from_matrix(root_to_camera_matrix)[0], transformations.rotation_from_matrix(root_to_camera_matrix)[1])
            rospy.loginfo("Transform computed. Broadcasting transform...")

            tf_broadcaster.sendTransform(translation,
                        rotation,
                        rospy.Time.now(),
                        "camera_link",
                        "panda_link0")

            rospy.sleep(0.1)

        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            rospy.logerr("Error computing transform. Transform broadcasting offline")
            rospy.sleep(rospy.Duration(1))
            continue
Пример #29
0
def stat_poses(csv_file):
    print('%s %s %s' % ('>' * 20, csv_file, '<' * 20))
    df = pandas.DataFrame.from_csv(csv_file, index_col=None)

    # Compute average of transformation matrix
    # ========================================
    n_fused = 0
    matrix_fused = None  # average matrix from base -> checkerboard pose
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        if n_fused == 0:
            matrix_fused = matrix_pose
            n_fused += 1
            continue

        # Fuse transformation matrix
        # --------------------------
        # base -> pointFused (fused calibboard pose)
        # matrix_fused

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))

        # weight of the pointIn is 1 / (n_fused + 1)
        weight_of_new_point = 1. / (n_fused + 1)

        # midpoint of rotation
        angle, direction, point = tff.rotation_from_matrix(matrix_rel)
        matrix_rel_rot_mid = tff.rotation_matrix(
            angle * weight_of_new_point, direction, point)
        # midpoint of translation
        trans_rel = tff.translation_from_matrix(matrix_rel)
        matrix_rel_pos_mid = tff.translation_matrix(
            [x * weight_of_new_point for x in trans_rel])
        # relative transformation for midpoint from pointFused
        matrix_rel_mid = matrix_rel_pos_mid.dot(matrix_rel_rot_mid)

        # base -> pointFused_new
        matrix_fused = matrix_rel_mid.dot(matrix_fused)
        n_fused += 1
        # -------------------------------------------------------------------------
    assert n_fused == len(df)

    print('%s Average %s' % ('-' * 30, '-' * 30))
    print('N fused: %d' % n_fused)
    print('Matrix: \n%s' % matrix_fused)
    trans = tff.translation_from_matrix(matrix_fused)
    print('Position: {}'.format(trans))
    pos_keys = ['position_%s' % x for x in 'xyz']
    print('Position (simple): {}'.format(df.mean()[pos_keys].values))
    euler = tff.euler_from_matrix(matrix_fused)
    print('Orientation: {}'.format(euler))

    # Compute variance of transformation matrix
    # =========================================

    N = len(df)
    keys = ['x', 'y', 'z', 'angle']
    variance = {k: 0 for k in keys}
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row['quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))
        # compute distance for translation/rotation
        delta_x, delta_y, delta_z = tff.translation_from_matrix(matrix_rel)
        delta_angle, _, _ = tff.rotation_from_matrix(matrix_rel)

        variance['x'] += delta_x ** 2
        variance['y'] += delta_y ** 2
        variance['z'] += delta_z ** 2
        variance['angle'] += delta_angle ** 2
    for k in keys:
        variance[k] /= (N - 1)

    print('%s Std Deviation (Variance) %s' % ('-' * 22, '-' * 22))
    for k in keys:
        print('%s: %f (%f)' % (k, variance[k], math.sqrt(variance[k])))
        if k != 'angle':
            print('{} (simple): {}'.format(k, df.std()['position_%s' % k]))
 def __init__(self, logger=rospy):
   # Generic
   np.set_printoptions(precision=5, suppress=True)
   axes = []
   # Get required parameters
   has_serial, serial = read_parameter_err('/camera/serial')
   if not has_serial:
     raise Exception('Could not find all the required parameters')
   # Get optional parameters
   debug = read_parameter('~debug', False)
   robot_name = read_parameter('~robot_name', False)
   grid_spacing = read_parameter('~grid_spacing', 15.94)
   maxdist = read_parameter('~maxdist', 1.2)
   maxtilt = np.deg2rad( read_parameter('~maxtilt', 30) )
   samples = int( read_parameter('~samples', 50) )
   pausetime = read_parameter('~pausetime', 1.0)
   use_weights = read_parameter('~use_weights', True)
   decode = grid_spacing <= 0
   # Load OpenRAVE environment
   env = orpy.Environment()
   orpy.RaveSetDebugLevel(orpy.DebugLevel.Fatal)
   worldxml = 'worlds/bimanual_ikea_assembly.env.xml'
   if not env.Load(worldxml):
     raise Exception('World file does not exist: %s' % worldxml)
   robot = env.GetRobot('denso_{0}'.format(robot_name))
   if robot is None:
     raise Exception('Could not find robot: denso_{0}'.format(robot_name))
   try:
     manip = robot.SetActiveManipulator('denso_ft_sensor_gripper')
   except:
     raise Exception('Could not find manipulator: denso_ft_sensor_gripper')
   # Get camera transform
   table = env.GetKinBody('table')
   if table is None:
     raise Exception('Could not find table in the world')
   camera = env.GetKinBody('camera')
   if table is None:
     raise Exception('Could not find camera in the world')
   # Remove unnecessary objects from the world
   criros.raveutils.remove_bodies(env, remove=['ikea_stick', 'pin', 'reference_frame'])
   # Configure motion planning
   logger.loginfo('Loading motion planning and control pipeline')
   motion = RoboMotion(manip)
   motion.change_active_manipulator('denso_ft_sensor_gripper', iktype=iktype6D)
   motion.scale_velocity_limits(0.3)
   motion.scale_acceleration_limits(0.2)
   motion.set_smoother_iterations(100)
   motion.set_planning_iterations(50)
   motion.enable_collision_checking(True)
   motion.update_link_stats()
   # Add the calibration plate to the world
   Tplate_wrt_gripper = tr.euler_matrix(0, -np.pi/2, np.pi)
   Tplate_wrt_gripper[0,3] = 0.0115 + 0.03
   Tplate_wrt_gripper[2,3] = 0.005
   Tgripper = motion.get_transform()
   Tplate = np.dot(Tgripper, Tplate_wrt_gripper)
   plate = env.ReadKinBodyXMLFile('objects/calibration_plate.kinbody.xml')
   if plate is None:
     raise Exception('Could not find calibration plate: objects/calibration_plate.kinbody.xml')
   with env:
     env.AddKinBody(plate)
     plate.SetName('plate')
     plate.SetTransform(Tplate)
   # Grasp the calibration plate in OpenRAVE
   motion.taskmanip.CloseFingers()
   motion.robot.Grab(plate)
   motion.taskmanip.ReleaseFingers()
   motion.taskmanip.CloseFingers()
   # Add extra protection to avoid hitting the table
   table_aabb = table.GetLink('base').ComputeAABB()
   boxdef = np.zeros(6)
   boxdef[:3] = table_aabb.pos() + np.array([0, 0, table_aabb.extents()[2]+0.05])
   boxdef[3:5] = table_aabb.extents()[:2] + 0.05
   boxdef[5] = 0.01
   boxdef.shape = (1,6)
   with env:
     paranoia = orpy.RaveCreateKinBody(env, '')
     paranoia.SetName('paranoia_cover')
     paranoia.InitFromBoxes(boxdef, True)
     env.Add(paranoia)
   # Move the origin to the robot base link
   criros.raveutils.move_origin_to_body(motion.robot)
   # Camera and pattern seeds
   Tcamera = camera.GetTransform()
   # Initial guess of the camera pose as Transformation. The pose must be given relative to 
   # the robot hand (for a moving camera setup), or relative to the robot origin (for the fixed camera setup).
   camera_seed = criros.conversions.to_pose(Tcamera)
   # Initial guess of the pattern pose as Transformation. This pose must be given relative to
   # the robot hand (for a fixed camera setup), or relative to the robot origin (for the moving camera setup).
   pattern_seed = criros.conversions.to_pose(Tplate_wrt_gripper)
   if debug:
     motion.draw_axes(Tcamera)
   # Trajectory controller
   controller = JointTrajectoryController(namespace=robot_name, log_level=rospy.ERROR)
   # Camera configuration client and services
   logger.loginfo('Connecting to the ensenso_driver')
   dynclient = dynamic_reconfigure.client.Client('/camera/ensenso_driver', timeout=30)
   dynclient.update_configuration({'Cloud':False, 'Images':True, 'Projector':False, 'FrontLight':False, 'TargetBrightness':70})
   logger.loginfo('Waiting for ensenso camera services')
   calibrate_srv = rospy.ServiceProxy('camera/calibrate_handeye', CalibrateHandEye)
   calibrate_srv.wait_for_service()
   pattern_srv = rospy.ServiceProxy('camera/estimate_pattern_pose', EstimatePatternPose)
   pattern_srv.wait_for_service()
   # Get left camera info
   self.left_cam_info = None
   sub = rospy.Subscriber('camera/left/camera_info', CameraInfo, self.cb_left_info)
   while not rospy.is_shutdown() and self.left_cam_info is None:
     rospy.sleep(0.1)
   camera_info = copy.deepcopy(self.left_cam_info)
   sub.unregister()
   # OpenRAVE viewer
   if debug:
     env.SetViewer('QtCoin')
     T = tr.euler_matrix(-2, 0, np.pi/2.)
     T[:3,3] = [2.4, 0, 1.57]
     T[1,3] = Tcamera[1,3]
     env.GetViewer().SetCamera(T)
   # Camera convex hull and triangulation
   points = criros.exploration.camera_fov_corners(camera_info, zdist=maxdist, Tcamera=Tcamera)
   hull = ConvexHull(points)
   triangulation = Delaunay(points[hull.vertices])
   Tideal = np.eye(4)
   Tideal[:3,0] = Tcamera[:3,1]
   Tideal[:3,1] = Tcamera[:3,0]
   Tideal[:3,2] = -Tcamera[:3,2]
   # TODO: Add a mesh showing the camera FOV
   # Generate random poses, plan, move and collect pattern
   starttime = rospy.Time.now()
   logger.loginfo('Starting to collect pattern observations')
   robot_poses = PoseArray()
   pattern_poses = PoseArray()
   pbar = progressbar.ProgressBar(widgets=['Pattern observations: ', progressbar.SimpleProgress()], maxval=samples).start()
   while not rospy.is_shutdown() and (len(robot_poses.poses) < samples):
     dynclient.update_configuration({'Images':False})
     # Random pose for the calibration pattern
     rpy = maxtilt * (2*np.random.random_sample(3) - 1.)
     Tlook = np.dot(Tideal, tr.euler_matrix(*rpy))
     Tlook[:3,3] = criros.exploration.random_point_inside_fov(camera_info, maxdist=maxdist, Tcamera=Tcamera)
     # Check that the plate will be inside the fov hull
     corners = criros.raveutils.compute_bounding_box_corners(plate, Tbody=Tlook)
     if not np.alltrue( triangulation.find_simplex(corners)>=0 ):
       continue
     # Corresponding transform for the gripper
     Tgripper = np.dot( Tlook, criros.spalg.transform_inv(Tplate_wrt_gripper) )
     qstart = controller.get_joint_positions()
     qgoal = motion.find_closest_iksolution(Tgripper, qseed=qstart)
     if qgoal is None:
       continue
     # Plan trajectory
     traj = motion.generate_trajectory(qstart, qgoal)
     if traj is None:
       continue
     if debug:
       motion.draw_axes(Tlook)
     # Move the robot
     ros_traj = motion.openrave_to_ros_traj(traj, rate=125.)
     controller.set_trajectory(ros_traj)
     controller.start()
     motion.robot.GetController().SetPath(traj)
     controller.wait()
     # Start images streaming and collect pattern
     dynclient.update_configuration({'Images':True})
     rospy.sleep(pausetime)   # Wait for the camera to stabilize
     # Collect the pattern and robot poses
     try:
       # We discard previous patterns only for the 1st observation. This way we populate the buffer with the observations collected
       # by this script
       discard = len(robot_poses.poses) == 0
       res = pattern_srv.call(add_to_buffer=True, discard_patterns=discard, average=False, decode=decode, grid_spacing=grid_spacing)
     except rospy.ServiceException as e:
       logger.logwarn('estimate_pattern_pose service failed: ' + str(e))
       continue
     if res.success:
       pattern_poses.poses.append(res.pose)
       qrobot = controller.get_joint_positions()
       motion.robot.SetActiveDOFValues(qrobot)
       Tgripper = motion.get_transform()
       robot_poses.poses.append( criros.conversions.to_pose(Tgripper) )
       pbar.update(len(robot_poses.poses))
   pbar.finish()
   duration = (rospy.Time.now() - starttime).to_sec()
   logger.loginfo('Finished collecting observations in %.2f seconds.' % duration)
   if not ( res.pattern_count == len(robot_poses.poses) == len(pattern_poses.poses) ):
     logger.logwarn('Something went wrong. The collected poses dont match the buffer patterns' % duration)
     return
   # Save files
   rospack = rospkg.RosPack()
   filepath =  rospack.get_path('axxb_data_collection') 
   filename1 = filepath +"/config/pattern_poses"
   pickle.dump(pattern_poses, open(filename1, "wb" ) )
   filename2 = filepath+"/config/robot_poses" + str(read_parameter('~axis',0))
   pickle.dump(robot_poses, open(filename2, "wb" ) )
   rospy.loginfo('Result written to file: %s \n %s' %(filename1,filename2))
   # The ensenso SKD will return the transformation of the robot's origin wrt. the camera frame
   try:
     res = calibrate_srv.call( robot_poses=robot_poses, pattern_poses=pattern_poses, 
                               camera_seed=camera_seed, pattern_seed=pattern_seed, setup='Fixed' )
   except rospy.ServiceException as e:
     logger.logwarn('calibrate_handeye service failed: ' + str(e))
   if not res.success:
     return
   # Store calibration results
   Tcamera_seed = criros.conversions.from_pose(camera_seed)
   seed_rot = tr.quaternion_from_matrix(Tcamera_seed)
   seed_pos = Tcamera_seed[:3,3]
   # Estimated pattern calibration pose. pattern wrt end-effector
   Tpattern_wrt_gripper = criros.conversions.from_pose(res.estimated_pattern_pose)
   pattern_rot = tr.quaternion_from_matrix(Tpattern_wrt_gripper)
   pattern_pos = Tpattern_wrt_gripper[:3,3]
   # Estimated transformation. World wrt camera
   Tworld_wrt_camera = criros.conversions.from_pose(res.estimated_camera_pose)
   rotation = tr.quaternion_from_matrix(Tworld_wrt_camera)
   translation = Tworld_wrt_camera[:3,3]
   angle, axis, point = tr.rotation_from_matrix( Tworld_wrt_camera )
   yamldata = {'camera_%s_robot' % robot_name: {
                 'parent': 'camera_optical_frame',
                 'child': '{0}/base_link'.format(robot_name),
                 'angle_axis': [angle] + axis.flatten().tolist(),
                 'duration': duration,
                 'grid_spacing': grid_spacing,
                 'iterations': res.iterations, 
                 'samples': samples,
                 'rotation': rotation.flatten().tolist(), 
                 'translation': translation.flatten().tolist(),
                 'pattern_pose': {
                   'rotation': pattern_rot.flatten().tolist(), 
                   'translation': pattern_pos.flatten().tolist()},
                 'seed': {
                   'rotation': seed_rot.flatten().tolist(), 
                   'translation': seed_pos.flatten().tolist()}},
               'reprojection_error': {
                 'pixels': res.reprojection_error}}
   # Write the file
   rospack = rospkg.RosPack()
   yaml_path  = rospack.get_path('axxb_data_collection') 
   yaml_path += '/config/camera_%s_robot_calibration.yaml' % robot_name
   # Write YAML file
   scriptname = os.path.basename(__file__)
   header =  '# This document was autogenerated by the script %s\n' % scriptname
   header += '# EDITING THIS FILE BY HAND IS NOT RECOMMENDED\n'
   header += '# NOTE 1: This is the transformation between the camera coordinates and the robot reference system.\n'
   header += '# NOTE 2: The pattern pose is the transformation of the calibration pattern wrt to the end-effector.\n'
   header += '# More information: http://www.ensenso.com/manual/index.html?_cameras_byserialno_$stereoserial_link.htm\n\n'
   with open(yaml_path, 'w') as f:
     f.write(header)
     yaml.safe_dump(yamldata, f)
   rospy.loginfo('Result written to file: %s' % yaml_path)
Пример #31
0

sub = rospy.Subscriber('/tf', TFMessage, set_stamp)

while not rospy.is_shutdown():
    try:
        time = rospy.Time.now()
        # stamp = time - rospy.Duration(0.4)
        Q = listener.asMatrix(target_frame='camera_link_slam',
                              hdr=Header(frame_id='map',
                                         stamp=camera_link_slam_stamp))
        P_inv = listener.asMatrix(target_frame='odom',
                                  hdr=Header(frame_id='camera_link',
                                             stamp=camera_link_stamp))
        res = np.matmul(P_inv, Q)
        angle, direc, point = transformations.rotation_from_matrix(res)
        rot = transformations.rotation_matrix(angle, direc, point)

        trans = transformations.translation_from_matrix(res)
        rot = transformations.quaternion_from_matrix(rot)
    except Exception as err:
        print(err)
    if trans is not None and rot is not None:
        publisher.sendTransform(
            translation=trans,
            rotation=rot,
            time=time,
            child='odom',
            parent='map',
        )
    rate.sleep()
Пример #32
0
def quaternion_to_yaw(quaternion):  # type: (Quaternion) -> float
    yaw, _, _ = transformations.rotation_from_matrix(transformations.quaternion_matrix(numpify(quaternion)))
    return yaw
Пример #33
0
def stat_poses(csv_file):
    print('%s %s %s' % ('>' * 20, csv_file, '<' * 20))
    df = pandas.DataFrame.from_csv(csv_file, index_col=None)

    # Compute average of transformation matrix
    # ========================================
    n_fused = 0
    matrix_fused = None  # average matrix from base -> checkerboard pose
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row[
            'quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        if n_fused == 0:
            matrix_fused = matrix_pose
            n_fused += 1
            continue

        # Fuse transformation matrix
        # --------------------------
        # base -> pointFused (fused calibboard pose)
        # matrix_fused

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))

        # weight of the pointIn is 1 / (n_fused + 1)
        weight_of_new_point = 1. / (n_fused + 1)

        # midpoint of rotation
        angle, direction, point = tff.rotation_from_matrix(matrix_rel)
        matrix_rel_rot_mid = tff.rotation_matrix(angle * weight_of_new_point,
                                                 direction, point)
        # midpoint of translation
        trans_rel = tff.translation_from_matrix(matrix_rel)
        matrix_rel_pos_mid = tff.translation_matrix(
            [x * weight_of_new_point for x in trans_rel])
        # relative transformation for midpoint from pointFused
        matrix_rel_mid = matrix_rel_pos_mid.dot(matrix_rel_rot_mid)

        # base -> pointFused_new
        matrix_fused = matrix_rel_mid.dot(matrix_fused)
        n_fused += 1
        # -------------------------------------------------------------------------
    assert n_fused == len(df)

    print('%s Average %s' % ('-' * 30, '-' * 30))
    print('N fused: %d' % n_fused)
    print('Matrix: \n%s' % matrix_fused)
    trans = tff.translation_from_matrix(matrix_fused)
    print('Position: {}'.format(trans))
    pos_keys = ['position_%s' % x for x in 'xyz']
    print('Position (simple): {}'.format(df.mean()[pos_keys].values))
    euler = tff.euler_from_matrix(matrix_fused)
    print('Orientation: {}'.format(euler))

    # Compute variance of transformation matrix
    # =========================================

    N = len(df)
    keys = ['x', 'y', 'z', 'angle']
    variance = {k: 0 for k in keys}
    for index, row in df.iterrows():
        # base -> pointIn (sensed calibboard pose)
        pos = row['position_x'], row['position_y'], row['position_z']
        matrix_pos = tff.translation_matrix(pos)
        rot = row['quaternion_x'], row['quaternion_y'], row[
            'quaternion_z'], row['quaternion_w']
        matrix_rot = tff.quaternion_matrix(rot)
        matrix_pose = matrix_pos.dot(matrix_rot)

        # pointFused -> pointIn = pointFused -> base -> pointIn
        matrix_rel = matrix_pose.dot(tff.inverse_matrix(matrix_fused))
        # compute distance for translation/rotation
        delta_x, delta_y, delta_z = tff.translation_from_matrix(matrix_rel)
        delta_angle, _, _ = tff.rotation_from_matrix(matrix_rel)

        variance['x'] += delta_x**2
        variance['y'] += delta_y**2
        variance['z'] += delta_z**2
        variance['angle'] += delta_angle**2
    for k in keys:
        variance[k] /= (N - 1)

    print('%s Std Deviation (Variance) %s' % ('-' * 22, '-' * 22))
    for k in keys:
        print('%s: %f (%f)' % (k, variance[k], math.sqrt(variance[k])))
        if k != 'angle':
            print('{} (simple): {}'.format(k, df.std()['position_%s' % k]))
Пример #34
0
def matrix_to_axis_angle(rmat):
    rmat = np.column_stack((rmat, np.matrix([0, 0, 0]).T))
    rmat = np.row_stack((rmat, np.matrix([0, 0, 0, 1])))
    ang, direc, point = tft.rotation_from_matrix(rmat)
    #    print 'point:', point
    return ang, direc
Пример #35
0
# euler = tf.euler_from_matrix(R0)
# quat = tf.quaternion_from_euler(euler[0], euler[1], euler[2])
# quat2 = tf.quaternion_from_matrix(R0)
# print("#### Test ####")
# print("Angle: ", angle)
# print("Direction: ", direc)
# print("Point: ", point)
# print("R0: ", R0)
# print("R1: ", R1)
# print("Euler: ", euler)
# print("Quaternion: ", quat)
# print("Quaternion2: ", quat2)
# print(tf.is_same_transform(R0, R1))
# print("#### End Test ####")

# Get Transformation between camera and gripper (from calibrate_camera.py)
with open('/home/labuser/ros_ws/src/odhe_ros/scripts/Transformation.npy',
          'rb') as f:
    T = np.load(f)
f.close()

trans = tf.translation_from_matrix(T)
rot = tf.rotation_from_matrix(T)
euler = tf.euler_from_matrix(T)
quat = tf.quaternion_from_matrix(T)

print("T: ", T)
print("Translation: ", trans)
print("Rotation: ", rot)
print("Euler: ", euler)
print("Quaternion: ", quat)