예제 #1
0
파일: test_Sim3.py 프로젝트: pu-wei/gtsam
    def test_align_poses_along_straight_line_gauge(self):
        """Test if Align Pose3Pairs method can account for gauge ambiguity.

        Scenario:
           3 object poses
           with gauge ambiguity (2x scale)
           world frame has poses rotated about z-axis (90 degree yaw)
           world and egovehicle frame translated by 11 meters w.r.t. each other
        """
        Rz90 = Rot3.Rz(np.deg2rad(90))

        # Create source poses (three objects o1, o2, o3 living in the egovehicle "e" frame)
        # Suppose they are 3d cuboids detected by an onboard sensor in the egovehicle frame
        eTo0 = Pose3(Rot3(), np.array([1, 0, 0]))
        eTo1 = Pose3(Rot3(), np.array([2, 0, 0]))
        eTo2 = Pose3(Rot3(), np.array([4, 0, 0]))

        eToi_list = [eTo0, eTo1, eTo2]

        # Create destination poses
        # (same three objects, but instead living in the world/city "w" frame)
        wTo0 = Pose3(Rz90, np.array([0, 12, 0]))
        wTo1 = Pose3(Rz90, np.array([0, 14, 0]))
        wTo2 = Pose3(Rz90, np.array([0, 18, 0]))

        wToi_list = [wTo0, wTo1, wTo2]

        we_pairs = Pose3Pairs(list(zip(wToi_list, eToi_list)))

        # Recover the transformation wSe (i.e. world_S_egovehicle)
        wSe = Similarity3.Align(we_pairs)

        for wToi, eToi in zip(wToi_list, eToi_list):
            self.gtsamAssertEquals(wToi, wSe.transformFrom(eToi))
예제 #2
0
    def run(self):
        reached = 0

        r = rospy.Rate(10)
        while not rospy.is_shutdown():

            goal = self.goals[self.goalindex]
            self.sTg = Pose3(Rot3.Rz(goal[3]), Point3(goal[0], goal[1],
                                                      goal[2]))
            R = quaternion_matrix([
                self.sTcurr_ros.pose.orientation.x,
                self.sTcurr_ros.pose.orientation.y,
                self.sTcurr_ros.pose.orientation.z,
                self.sTcurr_ros.pose.orientation.w
            ])[0:3, 0:3]
            self.sTcurr = Pose3(
                Rot3(R),
                Point3(self.sTcurr_ros.pose.position.x,
                       self.sTcurr_ros.pose.position.y,
                       self.sTcurr_ros.pose.position.z))

            currTg = self.sTcurr.between(self.sTg)

            u_pitch = self.kp_x * currTg.x() + self.kd_x * (
                currTg.x() - self.lastT.x()) + min(
                    max(-0.2, self.ki_x * self.ix), 0.2)
            u_roll = self.kp_y * currTg.y() + self.kd_y * (currTg.y() -
                                                           self.lastT.y())
            currTg_yaw = currTg.rotation().rpy()[2]
            u_yaw = self.kp_yaw * currTg_yaw + self.kd_yaw * (currTg_yaw -
                                                              self.lastyaw)
            u_z = self.kp_x * currTg.z() + self.kd_z * (currTg.z() -
                                                        self.lastT.z())

            self.lastT = currTg
            self.lastyaw = currTg_yaw

            self.ix += currTg.x()
            self.iy += currTg.y()
            self.iz += currTg.z()
            self.iyaw += currTg_yaw

            self.move_cmd.linear.x = min(max(-1, u_pitch), 1)
            self.move_cmd.linear.y = min(max(-1, u_roll), 1)
            self.move_cmd.linear.z = min(max(-1, u_z), 1)
            self.move_cmd.angular.z = min(max(-1, u_yaw), 1)

            self.velPub.publish(self.move_cmd)
            reached = self.check(currTg)
            if (reached == 1):
                reached = 0
                self.goalindex += 1

            r.sleep()
예제 #3
0
파일: test_Sim3.py 프로젝트: pu-wei/gtsam
    def test_align_poses_scaled_squares(self):
        """Test if Align Pose3Pairs method can account for gauge ambiguity.

        Make sure a big and small square can be aligned.
        The u's represent a big square (10x10), and v's represents a small square (4x4).

        Scenario:
           4 object poses
           with gauge ambiguity (2.5x scale)
        """
        # 0, 90, 180, and 270 degrees yaw
        R0 = Rot3.Rz(np.deg2rad(0))
        R90 = Rot3.Rz(np.deg2rad(90))
        R180 = Rot3.Rz(np.deg2rad(180))
        R270 = Rot3.Rz(np.deg2rad(270))

        aTi0 = Pose3(R0, np.array([2, 3, 0]))
        aTi1 = Pose3(R90, np.array([12, 3, 0]))
        aTi2 = Pose3(R180, np.array([12, 13, 0]))
        aTi3 = Pose3(R270, np.array([2, 13, 0]))

        aTi_list = [aTi0, aTi1, aTi2, aTi3]

        bTi0 = Pose3(R0, np.array([4, 3, 0]))
        bTi1 = Pose3(R90, np.array([8, 3, 0]))
        bTi2 = Pose3(R180, np.array([8, 7, 0]))
        bTi3 = Pose3(R270, np.array([4, 7, 0]))

        bTi_list = [bTi0, bTi1, bTi2, bTi3]

        ab_pairs = Pose3Pairs(list(zip(aTi_list, bTi_list)))

        # Recover the transformation wSe (i.e. world_S_egovehicle)
        aSb = Similarity3.Align(ab_pairs)

        for aTi, bTi in zip(aTi_list, bTi_list):
            self.gtsamAssertEquals(aTi, aSb.transformFrom(bTi))