示例#1
0
    def test_composition(self):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        R_b_c = RigidTransform.random_rotation()
        t_b_c = RigidTransform.random_translation()
        T_a_b = RigidTransform(R_a_b, t_a_b, "a", "b")
        T_b_c = RigidTransform(R_b_c, t_b_c, "b", "c")

        # multiply with numpy arrays
        M_a_b = np.r_[np.c_[R_a_b, t_a_b], [[0, 0, 0, 1]]]
        M_b_c = np.r_[np.c_[R_b_c, t_b_c], [[0, 0, 0, 1]]]
        M_a_c = M_b_c.dot(M_a_b)

        # use multiplication operator
        T_a_c = T_b_c * T_a_b

        self.assertTrue(
            np.sum(np.abs(T_a_c.matrix - M_a_c)) < 1e-5,
            msg="Composition gave incorrect transformation",
        )

        # check frames
        self.assertEqual(
            T_a_c.from_frame, "a", msg="Composition has incorrect input frame"
        )
        self.assertEqual(
            T_a_c.to_frame, "c", msg="Composition has incorrect output frame"
        )
示例#2
0
    def test_similarity_transformation(self):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        s_a_b = 2 * np.random.rand()
        R_b_c = RigidTransform.random_rotation()
        t_b_c = RigidTransform.random_translation()
        s_b_c = 2 * np.random.rand()
        T_a_b = SimilarityTransform(R_a_b, t_a_b, s_a_b, "a", "b")
        T_b_c = SimilarityTransform(R_b_c, t_b_c, s_b_c, "b", "c")

        T_b_a = T_a_b.inverse()

        x_a = np.random.rand(3)
        p_a = Point(x_a, "a")
        p_a2 = T_b_a * T_a_b * p_a
        self.assertTrue(np.allclose(p_a.data, p_a2.data))

        p_b = T_a_b * p_a
        p_b2 = s_a_b * (R_a_b.dot(p_a.data)) + t_a_b
        self.assertTrue(np.allclose(p_b.data, p_b2))

        p_c = T_b_c * T_a_b * p_a
        p_c2 = s_b_c * (R_b_c.dot(p_b2)) + t_b_c
        self.assertTrue(np.allclose(p_c.data, p_c2))

        v_a = np.random.rand(3)
        v_a = v_a / np.linalg.norm(v_a)
        v_a = Direction(v_a, "a")
        v_b = T_a_b * v_a
        v_b2 = R_a_b.dot(v_a.data)
        self.assertTrue(np.allclose(v_b.data, v_b2))
    def test_linear_trajectory(self):
        R_a = RigidTransform.random_rotation()
        t_a = RigidTransform.random_translation()
        R_b = RigidTransform.random_rotation()
        t_b = RigidTransform.random_translation()
        T_a = RigidTransform(R_a, t_a, "w", "a")
        T_b = RigidTransform(R_b, t_b, "w", "b")

        for i in range(10):
            traj = T_a.linear_trajectory_to(T_b, i)
            self.assertEqual(len(traj), i, "Trajectory has incorrect length")
示例#4
0
    def test_point_transformation(self):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        T_a_b = RigidTransform(R_a_b, t_a_b, "a", "b")

        x_a = np.random.rand(3)
        p_a = Point(x_a, "a")

        # multiply with numpy arrays
        x_b = R_a_b.dot(x_a) + t_a_b

        # use multiplication operator
        p_b = T_a_b * p_a

        self.assertTrue(
            np.sum(np.abs(p_b.vector - x_b)) < 1e-5,
            msg="Point transformation incorrect: Expected {}, Got {}".format(
                x_b, p_b.data
            ),
        )

        # check frames
        self.assertEqual(
            p_b.frame, "b", msg="Transformed point has incorrect frame"
        )
示例#5
0
 def test_init(self):
     R = RigidTransform.random_rotation()
     t = RigidTransform.random_translation()
     from_frame = "a"
     to_frame = "b"
     T_a_b = RigidTransform(R, t, from_frame, to_frame)
     self.assertTrue(np.sum(np.abs(R - T_a_b.rotation)) < 1e-5)
     self.assertTrue(np.sum(np.abs(t - T_a_b.translation)) < 1e-5)
    def test_registration(self):
        np.random.seed(101)

        source_points = np.random.rand(3, NUM_POINTS).astype(np.float32)
        source_normals = np.random.rand(3, NUM_POINTS).astype(np.float32)
        source_normals = source_normals / np.tile(
            np.linalg.norm(source_normals, axis=0)[np.newaxis, :], [3, 1])

        source_point_cloud = PointCloud(source_points, frame='world')
        source_normal_cloud = NormalCloud(source_normals, frame='world')

        matcher = PointToPlaneFeatureMatcher()
        solver = PointToPlaneICPSolver(sample_size=NUM_POINTS)

        # 3d registration
        tf = RigidTransform(rotation=RigidTransform.random_rotation(),
                            translation=RigidTransform.random_translation(),
                            from_frame='world',
                            to_frame='world')
        tf = RigidTransform(from_frame='world',
                            to_frame='world').interpolate_with(tf, 0.01)
        target_point_cloud = tf * source_point_cloud
        target_normal_cloud = tf * source_normal_cloud

        result = solver.register(source_point_cloud,
                                 target_point_cloud,
                                 source_normal_cloud,
                                 target_normal_cloud,
                                 matcher,
                                 num_iterations=NUM_ITERS)

        self.assertTrue(
            np.allclose(tf.matrix, result.T_source_target.matrix, atol=1e-3))

        # 2d registration
        theta = 0.1 * np.random.rand()
        t = 0.005 * np.random.rand(3, 1)
        t[2] = 0
        R = np.array([[np.cos(theta), -np.sin(theta), 0],
                      [np.sin(theta), np.cos(theta), 0], [0, 0, 1]])
        tf = RigidTransform(R, t, from_frame='world', to_frame='world')
        target_point_cloud = tf * source_point_cloud
        target_normal_cloud = tf * source_normal_cloud

        result = solver.register_2d(source_point_cloud,
                                    target_point_cloud,
                                    source_normal_cloud,
                                    target_normal_cloud,
                                    matcher,
                                    num_iterations=NUM_ITERS)

        self.assertTrue(
            np.allclose(tf.matrix, result.T_source_target.matrix, atol=1e-3))
示例#7
0
    def test_inverse(self):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        T_a_b = RigidTransform(R_a_b, t_a_b, 'a', 'b')
        T_b_a = T_a_b.inverse()

        # multiple with numpy arrays
        M_a_b = np.r_[np.c_[R_a_b, t_a_b], [[0, 0, 0, 1]]]
        M_b_a = np.linalg.inv(M_a_b)

        self.assertTrue(np.sum(np.abs(T_b_a.matrix - M_b_a)) < 1e-5,
                        msg='Inverse gave incorrect transformation')

        # check frames
        self.assertEqual(T_b_a.from_frame,
                         'b',
                         msg='Inverse has incorrect input frame')
        self.assertEqual(T_b_a.to_frame,
                         'a',
                         msg='Inverse has incorrect output frame')
示例#8
0
    def test_bad_transformation(self, num_points=10):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        T_a_b = RigidTransform(R_a_b, t_a_b, "a", "b")

        # bad point frame
        caught_bad_frame = False
        try:
            x_c = np.random.rand(3)
            p_c = Point(x_c, "c")
            T_a_b * p_c
        except ValueError:
            caught_bad_frame = True
        self.assertTrue(
            caught_bad_frame, msg="Failed to catch bad point frame"
        )

        # bad point cloud frame
        caught_bad_frame = False
        try:
            x_c = np.random.rand(3, num_points)
            pc_c = PointCloud(x_c, "c")
            T_a_b * pc_c
        except ValueError:
            caught_bad_frame = True
        self.assertTrue(
            caught_bad_frame, msg="Failed to catch bad point cloud frame"
        )

        # illegal input
        caught_bad_input = False
        try:
            x_a = np.random.rand(3, num_points)
            T_a_b * x_a
        except ValueError:
            caught_bad_input = True
        self.assertTrue(
            caught_bad_input, msg="Failed to catch numpy array input"
        )
示例#9
0
    def test_point_cloud_transformation(self, num_points=10):
        R_a_b = RigidTransform.random_rotation()
        t_a_b = RigidTransform.random_translation()
        T_a_b = RigidTransform(R_a_b, t_a_b, "a", "b")

        x_a = np.random.rand(3, num_points)
        pc_a = PointCloud(x_a, "a")

        # multiply with numpy arrays
        x_b = R_a_b.dot(x_a) + np.tile(t_a_b.reshape(3, 1), [1, num_points])

        # use multiplication operator
        pc_b = T_a_b * pc_a

        self.assertTrue(
            np.sum(np.abs(pc_b.data - x_b)) < 1e-5,
            msg="Point cloud transformation incorrect:\n"
            "Expected:\n{}\nGot:\n{}".format(x_b, pc_b.data),
        )

        # check frames
        self.assertEqual(
            pc_b.frame, "b", msg="Transformed point cloud has incorrect frame"
        )
示例#10
0
    # turn relative paths absolute
    if not os.path.isabs(config_filename):
        config_filename = os.path.join(os.getcwd(), config_filename)

    # parse config
    config = YamlConfig(config_filename)
    database_name = config['database']
    dataset_name = config['dataset']
    gripper_name = config['gripper']
    metric_name = config['metric']
    object_name = config['object']

    # fake transformation from the camera frame of reference to the robot frame of reference
    # in practice this could be computed by registering the camera to the robot base frame with a chessboard
    T_camera_robot = RigidTransform(
        rotation=RigidTransform.random_rotation(),
        translation=RigidTransform.random_translation(),
        from_frame='camera',
        to_frame='robot')

    # fake transformation from a known 3D object model frame of reference to the camera frame of reference
    # in practice this could be computed using point cloud registration
    T_obj_camera = RigidTransform(
        rotation=RigidTransform.random_rotation(),
        translation=RigidTransform.random_translation(),
        from_frame='obj',
        to_frame='camera')

    # load gripper
    gripper = RobotGripper.load(gripper_name)
示例#11
0
    # turn relative paths absolute
    if not os.path.isabs(config_filename):
        config_filename = os.path.join(os.getcwd(), config_filename)

    # parse config
    config = YamlConfig(config_filename)
    database_name = config['database']
    dataset_name = config['dataset']
    gripper_name = config['gripper']
    metric_name = config['metric']
    object_name = config['object']

    # fake transformation from the camera frame of reference to the robot frame of reference
    # in practice this could be computed by registering the camera to the robot base frame with a chessboard
    T_camera_robot = RigidTransform(rotation=RigidTransform.random_rotation(),
                                    translation=RigidTransform.random_translation(),
                                    from_frame='camera', to_frame='robot')

    # fake transformation from a known 3D object model frame of reference to the camera frame of reference
    # in practice this could be computed using point cloud registration
    T_obj_camera = RigidTransform(rotation=RigidTransform.random_rotation(),
                                  translation=RigidTransform.random_translation(),
                                  from_frame='obj', to_frame='camera')
    
    # load gripper
    gripper = RobotGripper.load(gripper_name)

    # open Dex-Net API
    dexnet_handle = DexNet()
    dexnet_handle.open_database(database_path)