def test_to_and_from_unreal_is_reflexive_for_points(self): point = (125, -73, 403) ue_point = uetrans.transform_to_unreal(point) point_prime = uetrans.transform_from_unreal(ue_point) self.assert_array(point, point_prime) for i in range(10): ue_point = uetrans.transform_to_unreal(point_prime) point_prime = uetrans.transform_from_unreal(ue_point) self.assert_array(point, point_prime)
def test_to_and_from_unreal_is_reflexive_for_transforms(self): pose = mytf.Transform((-19, -23, 300), (0.6, -0.7, -0.3, 1.4)) ue_pose = uetrans.transform_to_unreal(pose) pose_prime = uetrans.transform_from_unreal(ue_pose) self.assert_array(pose.location, pose_prime.location) self.assert_close(pose.rotation_quat(), pose_prime.rotation_quat()) for i in range(10): ue_pose = uetrans.transform_to_unreal(pose_prime) pose_prime = uetrans.transform_from_unreal(ue_pose) self.assert_array(pose.location, pose_prime.location) self.assert_close(pose.rotation_quat(), pose_prime.rotation_quat())
def test_to_unreal_preserves_find_relative_simple(self): pose1 = mytf.Transform((73, -47, -23), (0.92387953, 0, 0.38268343, 0), w_first=True) pose2 = mytf.Transform((-19, 83, -61), (0.5, -0.5, 0.5, -0.5), w_first=True) relative_pose = pose1.find_relative(pose2) ue_pose1 = uetrans.transform_to_unreal(pose1) ue_pose2 = uetrans.transform_to_unreal(pose2) ue_relative_pose = ue_pose1.find_relative(ue_pose2) relative_pose_prime = uetrans.transform_from_unreal(ue_relative_pose) self.assert_close(relative_pose.location, relative_pose_prime.location) self.assert_close(relative_pose.rotation_quat(), relative_pose_prime.rotation_quat())
def test_to_and_from_unreal_interchangable_point_and_pose(self): point = (125, -73, 403) pose = mytf.Transform((-19, -23, 300), (0.6, -0.7, -0.3, 1.4)) ue_point = uetrans.transform_to_unreal(point) ue_pose = uetrans.transform_to_unreal(pose) ue_point_trans = uetrans.UnrealTransform(location=ue_point) ue_pose_point = ue_pose.location point_prime = uetrans.transform_from_unreal(ue_point_trans).location pose_prime = uetrans.transform_from_unreal(ue_pose_point) self.assert_array(point, point_prime) self.assert_array(pose.location, pose_prime)
def move_camera_to(self, pose): """ Move the camera to a given pose, colliding with objects and stopping if blocked. :param pose: :return: """ if self._client is not None: if isinstance(pose, uetf.UnrealTransform): self._current_pose = uetf.transform_from_unreal(pose) else: self._current_pose = pose pose = uetf.transform_to_unreal(pose) pose = self._origin.find_independent(pose) self._client.request("vset /camera/0/moveto {0} {1} {2}".format( pose.location[0], pose.location[1], pose.location[2])) self._client.request("vset /camera/0/rotation {0} {1} {2}".format( pose.pitch, pose.yaw, pose.roll)) # Re-extract the location from the sim, because we might not have made it to the desired location location = self._client.request("vget /camera/0/location") location = location.split(' ') if len(location) == 3: pose = uetf.UnrealTransform(location=(float(location[0]), float(location[1]), float(location[2])), rotation=pose.euler) self._current_pose = uetf.transform_from_unreal(pose)
def export_data(self, db_client: database.client.DatabaseClient): """ Allow experiments to export some data, usually to file. I'm currently using this to dump camera trajectories so I can build simulations around them, but there will be other circumstances where we want to :param db_client: :return: """ for trajectory_group in self._trajectory_groups.values(): trajectory = traj_help.get_trajectory_for_image_source( db_client, trajectory_group.reference_dataset) with open('trajectory_{0}.csv'.format(trajectory_group.name), 'w') as output_file: output_file.write('Name,X,Y,Z,Roll,Pitch,Yaw\n') for idx, timestamp in enumerate(sorted(trajectory.keys())): ue_pose = uetf.transform_to_unreal(trajectory[timestamp]) output_file.write( '{name},{x},{y},{z},{roll},{pitch},{yaw}\n'.format( name=idx, x=ue_pose.location[0], y=ue_pose.location[1], z=ue_pose.location[2], roll=ue_pose.euler[0], pitch=ue_pose.euler[1], yaw=ue_pose.euler[2]))
def test_yaw_to_unreal(self): # This pose is rotated 45 degrees clockwise around up (Z) pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((0, 0, 1), np.pi / 4), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((0, 0, -45), ue_pose.euler) # This pose is rotated 30 degrees anticlockwise around up pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((0, 0, 1), -np.pi / 6), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((0, 0, 30), ue_pose.euler)
def test_pitch_to_unreal(self): # This pose is rotated 45 degrees clockwise around right (neg Y) pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((0, -1, 0), np.pi / 4), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((0, 45, 0), ue_pose.euler) # This pose is rotated 30 degrees anticlockwise around right (neg Y) pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((0, -1, 0), -np.pi / 6), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((0, -30, 0), ue_pose.euler)
def test_roll_to_unreal(self): # This pose is rotated 45 degrees clockwise around X pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((1, 0, 0), np.pi / 4), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((45, 0, 0), ue_pose.euler) # And this is 30 degrees anticlockwise around X pose = mytf.Transform(location=(0, 0, 0), rotation=tf.quaternions.axangle2quat((1, 0, 0), -np.pi / 6), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_close((-30, 0, 0), ue_pose.euler)
def test_to_unreal_preserves_find_relative_complex(self): pose1 = mytf.Transform( (73, -47, -23), (0.68400501, 0.37550612, 0.62456579, 0.03240183), w_first=True) pose2 = mytf.Transform((-19, 83, -61), (0.5, -0.5, 0.5, -0.5), w_first=True) relative_pose = pose1.find_relative(pose2) ue_pose1 = uetrans.transform_to_unreal(pose1) ue_pose2 = uetrans.transform_to_unreal(pose2) ue_relative_pose = ue_pose1.find_relative(ue_pose2) relative_pose_prime = uetrans.transform_from_unreal(ue_relative_pose) self.assert_close(relative_pose.location, relative_pose_prime.location) self.assert_close(relative_pose.rotation_quat(), relative_pose_prime.rotation_quat())
def test_pose_to_unreal(self): pose = mytf.Transform(location=(37, 113, -97), rotation=tf.quaternions.axangle2quat((1, -3, 2), -np.pi / 4), w_first=True) ue_pose = uetrans.transform_to_unreal(pose) self.assert_array((3700, -11300, -9700), ue_pose.location) # This rotation was generated by Unreal, when asked to rotate around the same axis # I used Rotator from Axis Angle given 1 Forward, 2 Up, and 3 Right, and 45 degrees self.assert_close((-21.688360, -31.675301, 31.189531), ue_pose.euler)
def test_set_camera_pose_handles_frame_conversion(self, mock_client, mock_cv2, *_): mock_cv2.imread.side_effect = make_mock_image mock_client_instance = make_mock_unrealcv_client() mock_client.return_value = mock_client_instance subject = uecvsim.UnrealCVSimulator('temp/test_project/test.sh', 'sim-world') subject.begin() pose = tf.Transform((17, -21, 3), (0.1, 0.7, -0.3, 0.5)) ue_pose = uetf.transform_to_unreal(pose) subject.set_camera_pose(pose) self.assertEqual(subject.current_pose, pose) self.assertIn( mock.call(("vset /camera/0/location {0} {1} {2}".format( ue_pose.location[0], ue_pose.location[1], ue_pose.location[2]))), mock_client_instance.request.call_args_list) self.assertIn( mock.call(("vset /camera/0/rotation {0} {1} {2}".format( ue_pose.pitch, ue_pose.yaw, ue_pose.roll))), mock_client_instance.request.call_args_list) pose = tf.Transform((-175, 29, -870), (0.3, -0.2, -0.8, 0.6)) ue_pose = uetf.transform_to_unreal(pose) subject.set_camera_pose(ue_pose) self.assertTrue( np.array_equal(subject.current_pose.location, pose.location)) self.assertTrue( np.all( np.isclose(subject.current_pose.rotation_quat(True), pose.rotation_quat(True)))) self.assertIn( mock.call(("vset /camera/0/location {0} {1} {2}".format( ue_pose.location[0], ue_pose.location[1], ue_pose.location[2]))), mock_client_instance.request.call_args_list) self.assertIn( mock.call(("vset /camera/0/rotation {0} {1} {2}".format( ue_pose.pitch, ue_pose.yaw, ue_pose.roll))), mock_client_instance.request.call_args_list)
def set_camera_pose(self, pose): """ Set the camera pose in the simulator :param pose: :return: """ if self._client is not None: if isinstance(pose, uetf.UnrealTransform): self._current_pose = uetf.transform_from_unreal(pose) else: self._current_pose = pose pose = uetf.transform_to_unreal(pose) pose = self._origin.find_independent( pose ) # Find world coordinates from relative to simulator origin self._client.request("vset /camera/0/location {0} {1} {2}".format( pose.location[0], pose.location[1], pose.location[2])) self._client.request("vset /camera/0/rotation {0} {1} {2}".format( pose.pitch, pose.yaw, pose.roll))
def get_obstacle_avoidance_force(self, radius=1, velocity=(1, 0, 0)): """ Get a force for obstacle avoidance. The simulator should get all objects within the given radius, and provide a net force away from all the objects, scaled by the distance to the objects. :param radius: Distance to detect objects, in meters :param velocity: The current velocity of the camera, for predicting collisions. :return: A repulsive force vector, as a numpy array """ if self._client is not None: velocity = uetf.transform_to_unreal(velocity) force = self._client.request( "vget /camera/0/avoid {0} {1} {2} {3}".format( radius * 100, velocity[0], velocity[1], velocity[2])) force = force.split(' ') if len(force) == 3: force = np.array( [float(force[0]), float(force[1]), float(force[2])]) return uetf.transform_from_unreal(force) return np.array([0, 0, 0])
def test_point_to_unreal(self): point = (13, -7, 156) # 13 forward, 7 right, 156 up ue_point = uetrans.transform_to_unreal(point) self.assert_array((1300, 700, 15600), ue_point)