コード例 #1
0
    def test_pose_error(self):
        """
        Test rotation and translation error metric.
        """

        self.ground_truth = ProjectScene.load(self.data_path,
                                              'bounding_box_sample2')
        self.submission = ProjectScene.load(self.data_path,
                                            'bounding_box_sample2')
        self.settings.thresholds = [0.5]

        # verify that correct pose is ok
        evaluator = BBEvaluator(self.submission, self.ground_truth,
                                self.settings)
        rotation_error, translation_error = evaluator.pose_error()
        self.assertAlmostEqual(rotation_error, 0)
        self.assertAlmostEqual(translation_error, 0)

        # verify that rotation by symmetry amount is ok
        pose_orig = self.submission.elements["1069"].pose
        new_pose = Pose3(R=pose_orig.R * Rot3.Ry(math.pi), t=pose_orig.t)
        self.submission.elements["1069"].pose = new_pose
        evaluator = BBEvaluator(self.submission, self.ground_truth,
                                self.settings)
        rotation_error, translation_error = evaluator.pose_error()
        self.assertAlmostEqual(rotation_error, 0)
        self.assertAlmostEqual(translation_error, 0)

        # verify that rotation by non-symmetry amount give correct error
        new_pose = Pose3(R=pose_orig.R * Rot3.Ry(math.radians(10)),
                         t=pose_orig.t)
        self.submission.elements["1069"].pose = new_pose
        evaluator = BBEvaluator(self.submission, self.ground_truth,
                                self.settings)
        rotation_error, translation_error = evaluator.pose_error()
        self.assertAlmostEqual(rotation_error, math.radians(10))
        self.assertAlmostEqual(translation_error, 0)

        # verify that translation gives translation error
        new_pose = Pose3(R=pose_orig.R, t=pose_orig.t + [0.05, 0, 0])
        self.submission.elements["1069"].pose = new_pose
        evaluator = BBEvaluator(self.submission, self.ground_truth,
                                self.settings)
        rotation_error, translation_error = evaluator.pose_error()
        self.assertAlmostEqual(rotation_error, 0)
        self.assertAlmostEqual(translation_error, 0.05)

        # verify that empty sumission gives None, None
        new_pose = Pose3(R=pose_orig.R, t=pose_orig.t + [1, 0, 0])
        self.submission.elements["1069"].pose = new_pose
        evaluator = BBEvaluator(self.submission, self.ground_truth,
                                self.settings)
        rotation_error, translation_error = evaluator.pose_error()
        self.assertEqual(rotation_error, None)
        self.assertEqual(translation_error, None)
コード例 #2
0
    def test_to_rotation_matrix(self):
        rot1 = self.q1.to_rotation_matrix()
        np.testing.assert_array_almost_equal(rot1, np.identity(3))

        M2 = Rot3.Rx(self.theta1).matrix()
        rot2 = self.q2.to_rotation_matrix()
        self.assertTrue(np.allclose(rot2, M2))

        M3 = Rot3.Rz(self.theta2).matrix()
        rot3 = self.q3.to_rotation_matrix()
        np.testing.assert_array_almost_equal(rot3, M3)
コード例 #3
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
    def test_ENU_camera_w_photosphere(self):
        # Make sure we agree with conventions at
        # https://developers.google.com/streetview/spherical-metadata
        pitch = 0.1
        roll = 0.1
        wRt = Rot3.ENU_camera(pitch=pitch, roll=roll)

        expected_wRr = Rot3.Rx(pitch) * Rot3.Ry(roll)  # from rotated to ENU
        # But our tilted frame uses the camera convention.
        # TODO: maybe it should not! Why do we do this?
        expected_wRt = Rot3(expected_wRr * ENU_R_CAMERA)
        np.testing.assert_array_almost_equal(
            wRt.matrix(), expected_wRt.matrix(), decimal=2
        )
コード例 #4
0
def euler_to_matrix(e):
    """
    Convert ZYX Euler angles to 3x3 rotation matrix.

    Inputs:
    e (numpy 3-vector of float) - ZYX Euler angles (radians)

    Return:
    matrix (3x3 numpy array2 of float) - rotation matrix

    TODO: This could be optimized somewhat by using the direct
    equations for the final matrix rather than multiplying out the 
    matrices.
    """
    return (Rot3.Rz(e[0]) * Rot3.Ry(e[1]) * Rot3.Rx(e[2])).R
コード例 #5
0
 def test_transform_from(self):
     '''Test transform_from.'''
     t = Vector3(1, 1, 1)
     R = Rot3()
     T = Pose3(R, t).inverse()
     cloud = PointCloud(np.zeros((3, 1)))
     new_cloud = cloud.transform_from(T)
     np.testing.assert_array_equal(new_cloud.points(),
                                   [[-1.0], [-1.0], [-1.0]])
コード例 #6
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
    def test_from_xml(self):
        '''Conversion from xml'''

        # test common case
        s = '<rotation><c1>1, 4, 7</c1><c2>2, 5, 8</c2><c3>3, 6, 9</c3></rotation>'
        rot_xml = ET.fromstring(s)
        rot = Rot3.from_xml(rot_xml)
        expected_rot = Rot3(R=np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]))
        rot.assert_almost_equal(expected_rot)

        # test missing field
        s = '<rotation><c1>1, 4, 7</c1><c3>3, 6, 9</c3></rotation>'
        self.assertRaises(ValueError, Rot3.from_xml, ET.fromstring(s))

        # test incorrect field
        s = '<rotation><foobar>1, 4, 7</foobar><c2>2, 5, 8</c2><c3>3, 6, 9</c3>\
</rotation>'

        self.assertRaises(ValueError, Rot3.from_xml, ET.fromstring(s))
コード例 #7
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
    def test_to_xml(self):
        '''
        Test conversion to xml.
        '''
        rot = Rot3(R=np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]))
        rot_xml = rot.to_xml()
        expected_xml = '<rotation><c1>1, 4, 7</c1><c2>2, 5, 8</c2><c3>3, 6, 9</c3>\
</rotation>'

        self.assertEqual(ET.tostring(rot_xml, encoding='unicode'), expected_xml)
コード例 #8
0
 def test_register(self):
     ''' Test registering point clouds in different frames. '''
     t = Vector3(1, 1, 1)
     R = Rot3()
     T = Pose3(R, t).inverse()
     cloud = PointCloud(np.zeros((3, 1)))
     registred_cloud = PointCloud.register([(Pose3(), cloud), (T, cloud)])
     np.testing.assert_array_equal(
         registred_cloud.points(),
         np.column_stack([Vector3(0, 0, 0),
                          Vector3(-1.0, -1.0, -1.0)]))
コード例 #9
0
    def test_quat_matrix(self):
        rx = 10  # degrees
        ry = 20
        rz = 30
        Rx = Rot3.Rx(math.radians(rx))
        Ry = Rot3.Ry(math.radians(ry))
        Rz = Rot3.Rz(math.radians(rz))

        # matrix -> quat
        R = Rz * Ry * Rx
        q = utils.matrix_to_quat(R.R)
        expected_q = np.array([0.9515485, 0.0381346, 0.1893079,
                               0.2392983])  # computed manually
        np.testing.assert_array_almost_equal(q, expected_q, 4)

        # quat -> matrix
        R2 = utils.quat_to_matrix(expected_q)
        np.testing.assert_array_almost_equal(R2, R.R, 4)

        # round trip
        R2 = utils.quat_to_matrix(q)
        np.testing.assert_array_almost_equal(R.R, R2, 4)
コード例 #10
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
 def test_RollPitchYaw(self):
     roll, pitch, yaw = 0.1, 0.2, 0.3
     np.testing.assert_array_equal(
         Rot3.Rx(roll).matrix(), Rot3.AxisAngle(UNIT_X, roll).matrix()
     )
     np.testing.assert_array_equal(
         Rot3.Ry(pitch).matrix(), Rot3.AxisAngle(UNIT_Y, pitch).matrix()
     )
     np.testing.assert_array_equal(
         Rot3.Rz(yaw).matrix(), Rot3.AxisAngle(UNIT_Z, yaw).matrix()
     )
コード例 #11
0
    def cylindrical_symmetric_rot(self, det_rot, gt_rot, axis):
        """
        Compute rot that zeroes out the rotation error between 
        <det_rot> and <gt_rot> along axis <axis>.

        Inputs
        det_rot (Rot3) - rotation for detection
        gt_rot (Rot3) - rotation for ground truth
        axis (int) - cylindrical rotation axis (0=x, 1=y, 2=z)

        Return:
        Rot3 - new rotation matrix with gt set to det rotation on target axis

        Algorithm: 
        1. convert to zyx Euler angles
        2. set gt to detection value for target axis
        3. convert back to matrix rep
        """
        det_euler = utils.matrix_to_euler(det_rot.R)
        gt_euler = utils.matrix_to_euler(gt_rot.R)
        # note e[0] is z
        det_euler[2 - axis] = gt_euler[2 - axis]
        return Rot3(utils.euler_to_matrix(det_euler))
コード例 #12
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
 def test_ENU_camera(self):
     d = 0.1  # positive pitch
     wRt = Rot3.ENU_camera(pitch=d)
     expected = np.transpose(np.array([[1, 0, 0], [0, d, -1], [0, 1, d]]))
     np.testing.assert_array_almost_equal(wRt.matrix(), expected, decimal=2)
コード例 #13
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
 def setUp(self):
     # Camera with z-axis looking at world y-axis
     wRc = np.transpose(
         np.array([[1, 0, 0], [0, 0, -1], [0, 1, 0]], dtype=float)
     )
     self.rot = Rot3(wRc)
コード例 #14
0
ファイル: rot3_tests.py プロジェクト: qicny/sumo-challenge
 def test_json(self):
     '''Test conversion to/from json dict.'''
     json_dict = self.rot.to_json()
     decoded_rot = Rot3.from_json(json_dict)
     decoded_rot.assert_almost_equal(self.rot)
コード例 #15
0
ファイル: sumo-api-example.py プロジェクト: kuixu/Linux
print('colored_category.category_id_to_rgb(133):\n',
      colored_category.category_id_to_rgb(133))
print('colored_category.category_name_to_rgb(\'shoes\'):\n',
      colored_category.category_name_to_rgb('shoes'))
print('colored_category.LUT:\n', colored_category.LUT)
lut = colored_category.LUT
print('lut.shape[0]: ', lut.shape[0], 'lut.shape[1]: ', lut.shape[1])

from sumo.base.vector import Vector2, Vector2f, Vector3, Vector3f, on_left, unitize
print('unitize(Vector3(100, 201, 50)): ', unitize(Vector3(100, 201, 50)))

# rot3_tests
from sumo.geometry.rot3 import Rot3, ENU_R_CAMERA
import numpy as np
wRc = np.transpose(np.array([[1, 0, 0], [0, 0, -1], [0, 1, 0]], dtype=float))
rot = Rot3(wRc)
print(rot.matrix())

# MultiImageTiff
from sumo.geometry.inverse_depth import depth_image_of_inverse_depth_map
from sumo.images.multi_image_tiff import MultiImageTiff, MultiImagePageType

tiff_path = parutil.get_file_path(
    '/mnt/lustre/sunjiankai/Dataset/sample_data/sumo-input/sumo-input.tif')
multi = MultiImageTiff.load(tiff_path)
print('multi.rgb.shape: ', multi.rgb.shape, 'multi.range.shape (Depth): ',
      multi.range.shape, 'multi.category.shape (Category): ',
      multi.category.shape, 'multi.instance.shape (Instance): ',
      multi.instance.shape)
# multi.rgb.shape:  (1024, 6144, 3) multi.range.shape:  (1024, 6144) multi.category.shape:  (1024, 6144) multi.instance.shape:  (1024, 6144)