def move_camera_to(self, pose): """ Move the camera to a given pose, colliding with objects and stopping if blocked. :param pose: :return: """ if self._client is not None: if isinstance(pose, uetf.UnrealTransform): self._current_pose = uetf.transform_from_unreal(pose) else: self._current_pose = pose pose = uetf.transform_to_unreal(pose) pose = self._origin.find_independent(pose) self._client.request("vset /camera/0/moveto {0} {1} {2}".format( pose.location[0], pose.location[1], pose.location[2])) self._client.request("vset /camera/0/rotation {0} {1} {2}".format( pose.pitch, pose.yaw, pose.roll)) # Re-extract the location from the sim, because we might not have made it to the desired location location = self._client.request("vget /camera/0/location") location = location.split(' ') if len(location) == 3: pose = uetf.UnrealTransform(location=(float(location[0]), float(location[1]), float(location[2])), rotation=pose.euler) self._current_pose = uetf.transform_from_unreal(pose)
def test_to_and_from_unreal_is_reflexive_for_points(self): point = (125, -73, 403) ue_point = uetrans.transform_to_unreal(point) point_prime = uetrans.transform_from_unreal(ue_point) self.assert_array(point, point_prime) for i in range(10): ue_point = uetrans.transform_to_unreal(point_prime) point_prime = uetrans.transform_from_unreal(ue_point) self.assert_array(point, point_prime)
def test_to_and_from_unreal_is_reflexive_for_transforms(self): pose = mytf.Transform((-19, -23, 300), (0.6, -0.7, -0.3, 1.4)) ue_pose = uetrans.transform_to_unreal(pose) pose_prime = uetrans.transform_from_unreal(ue_pose) self.assert_array(pose.location, pose_prime.location) self.assert_close(pose.rotation_quat(), pose_prime.rotation_quat()) for i in range(10): ue_pose = uetrans.transform_to_unreal(pose_prime) pose_prime = uetrans.transform_from_unreal(ue_pose) self.assert_array(pose.location, pose_prime.location) self.assert_close(pose.rotation_quat(), pose_prime.rotation_quat())
def test_roll_from_unreal(self): # This pose is rotated 45 degrees clockwise around X ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(45, 0, 0)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((1, 0, 0), np.pi / 4), pose.rotation_quat(w_first=True)) # And this is 30 degrees anticlockwise around X ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(-30, 0, 0)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((1, 0, 0), -np.pi / 6), pose.rotation_quat(w_first=True))
def test_yaw_from_unreal(self): # This pose is rotated 45 degrees clockwise around up (Z in my coordinate frame) ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(0, 0, 45)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((0, 0, 1), -np.pi / 4), pose.rotation_quat(w_first=True)) # This pose is rotated 30 degrees anticlockwise around up ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(0, 0, -30)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((0, 0, 1), np.pi / 6), pose.rotation_quat(w_first=True))
def test_pitch_from_unreal(self): # This pose is rotated 45 degrees clockwise around right (neg Y) ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(0, 45, 0)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((0, -1, 0), np.pi / 4), pose.rotation_quat(w_first=True)) # This pose is rotated 30 degrees anticlockwise around right (neg Y) ue_pose = uetrans.UnrealTransform(location=(0, 0, 0), rotation=(0, -30, 0)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_close(tf.quaternions.axangle2quat((0, -1, 0), -np.pi / 6), pose.rotation_quat(w_first=True))
def test_to_and_from_unreal_interchangable_point_and_pose(self): point = (125, -73, 403) pose = mytf.Transform((-19, -23, 300), (0.6, -0.7, -0.3, 1.4)) ue_point = uetrans.transform_to_unreal(point) ue_pose = uetrans.transform_to_unreal(pose) ue_point_trans = uetrans.UnrealTransform(location=ue_point) ue_pose_point = ue_pose.location point_prime = uetrans.transform_from_unreal(ue_point_trans).location pose_prime = uetrans.transform_from_unreal(ue_pose_point) self.assert_array(point, point_prime) self.assert_array(pose.location, pose_prime)
def get_object_pose(self, object_name): """ Get the world pose of an object within the simulation. :param object_name: The name of the object :return: """ if self._client is not None: location = self._client.request( "vget /object/{0}/location".format(object_name)) location = location.split(' ') if len(location) == 3: rotation = self._client.request( "vget /object/{0}/rotation".format(object_name)) rotation = rotation.split(' ') if len(rotation) == 3: ue_trans = uetf.UnrealTransform( location=(float(location[0]), float(location[1]), float(location[2])), # Reorder to roll, pitch, yaw, Unrealcv order is pitch, yaw, roll rotation=(float(rotation[2]), float(rotation[0]), float(location[1]))) ue_trans = self._origin.find_relative( ue_trans) # Express relative to the simulator origin return uetf.transform_from_unreal(ue_trans) return None
def test_pose_from_unreal(self): ue_pose = uetrans.UnrealTransform(location=(37, 113, -97), rotation=(-21.688360, -31.675301, 31.189531)) pose = uetrans.transform_from_unreal(ue_pose) self.assert_array((0.37, -1.13, -0.97), pose.location) self.assert_close(tf.quaternions.axangle2quat((1, -3, 2), -np.pi / 4), pose.rotation_quat(w_first=True))
def test_to_unreal_preserves_find_relative_simple(self): pose1 = mytf.Transform((73, -47, -23), (0.92387953, 0, 0.38268343, 0), w_first=True) pose2 = mytf.Transform((-19, 83, -61), (0.5, -0.5, 0.5, -0.5), w_first=True) relative_pose = pose1.find_relative(pose2) ue_pose1 = uetrans.transform_to_unreal(pose1) ue_pose2 = uetrans.transform_to_unreal(pose2) ue_relative_pose = ue_pose1.find_relative(ue_pose2) relative_pose_prime = uetrans.transform_from_unreal(ue_relative_pose) self.assert_close(relative_pose.location, relative_pose_prime.location) self.assert_close(relative_pose.rotation_quat(), relative_pose_prime.rotation_quat())
def test_to_unreal_preserves_find_relative_complex(self): pose1 = mytf.Transform( (73, -47, -23), (0.68400501, 0.37550612, 0.62456579, 0.03240183), w_first=True) pose2 = mytf.Transform((-19, 83, -61), (0.5, -0.5, 0.5, -0.5), w_first=True) relative_pose = pose1.find_relative(pose2) ue_pose1 = uetrans.transform_to_unreal(pose1) ue_pose2 = uetrans.transform_to_unreal(pose2) ue_relative_pose = ue_pose1.find_relative(ue_pose2) relative_pose_prime = uetrans.transform_from_unreal(ue_relative_pose) self.assert_close(relative_pose.location, relative_pose_prime.location) self.assert_close(relative_pose.rotation_quat(), relative_pose_prime.rotation_quat())
def parse_transform(location, rotation): """ Construct a transform from a stored location and rotation. Handles changing the coordinate frame from Unreal space to a conventional coordinate frame. :param location: dict containing X, Y, and Z :param rotation: dict containing W, Z, Y, and Z :return: a Transform object, in a sane coordinate frame and scaled to meters """ ue_camera_pose = ue_tf.UnrealTransform(location=(location['X'], location['Y'], location['Z']), rotation=ue_tf.quat2euler( w=rotation['W'], x=rotation['X'], y=rotation['Y'], z=rotation['Z'], )) return ue_tf.transform_from_unreal(ue_camera_pose)
def set_camera_pose(self, pose): """ Set the camera pose in the simulator :param pose: :return: """ if self._client is not None: if isinstance(pose, uetf.UnrealTransform): self._current_pose = uetf.transform_from_unreal(pose) else: self._current_pose = pose pose = uetf.transform_to_unreal(pose) pose = self._origin.find_independent( pose ) # Find world coordinates from relative to simulator origin self._client.request("vset /camera/0/location {0} {1} {2}".format( pose.location[0], pose.location[1], pose.location[2])) self._client.request("vset /camera/0/rotation {0} {1} {2}".format( pose.pitch, pose.yaw, pose.roll))
def get_obstacle_avoidance_force(self, radius=1, velocity=(1, 0, 0)): """ Get a force for obstacle avoidance. The simulator should get all objects within the given radius, and provide a net force away from all the objects, scaled by the distance to the objects. :param radius: Distance to detect objects, in meters :param velocity: The current velocity of the camera, for predicting collisions. :return: A repulsive force vector, as a numpy array """ if self._client is not None: velocity = uetf.transform_to_unreal(velocity) force = self._client.request( "vget /camera/0/avoid {0} {1} {2} {3}".format( radius * 100, velocity[0], velocity[1], velocity[2])) force = force.split(' ') if len(force) == 3: force = np.array( [float(force[0]), float(force[1]), float(force[2])]) return uetf.transform_from_unreal(force) return np.array([0, 0, 0])
def begin(self): """ Start producing images. This will trigger any necessary startup code, and will allow get_next_image to be called. Return False if there is a problem starting the source. :return: True iff the simulator has started correctly. """ # Start the simulator process if self._host == 'localhost': if self._client is not None: # Stop the client to free up the port self._client.disconnect() self._client = None self._store_config() start_simulator(self._executable) # Wait for the UnrealCV server to start, pulling lines from stdout to check time.sleep( 2) # Wait, we can't capture some of the output right now # while self._simulator_process.poll() is not None: # line = self._simulator_process.stdout.readline() # if 'listening on port {0}'.format(self._port) in line.lower(): # # Server has started, break # break # Create and connect the client if self._client is None: self._client = unrealcv.Client((self._host, self._port)) if not self._client.isconnected(): # Try and connect to the server 3 times, waiting 2s between tries for _ in range(10): self._client.connect() if self._client.isconnected(): break else: time.sleep(2) if not self._client.isconnected(): # Cannot connect to the server, shutdown completely. self.shutdown() # Set camera state from configuration if self._client is not None and self._client.isconnected(): # Set camera properties self.set_field_of_view(self._fov) self.set_fstop(self._aperture) self.set_enable_dof(self._use_dof) if self._use_dof: # Don't bother setting dof properties if dof is disabled if self._focus_distance is None: self.set_autofocus(True) else: self.set_focus_distance(self._focus_distance) # Set quality settings self._client.request( "vset /quality/texture-mipmap-bias {0}".format( self._texture_mipmap_bias)) self._client.request( "vset /quality/normal-maps-enabled {0}".format( int(self._normal_maps_enabled))) self._client.request("vset /quality/roughness-enabled {0}".format( int(self._roughness_enabled))) self._client.request( "vset /quality/geometry-decimation {0}".format( self._geometry_decimation)) # Get the initial camera pose, which will be set by playerstart in the level location = self._client.request("vget /camera/0/location") location = location.split(' ') if len(location) == 3: rotation = self._client.request("vget /camera/0/rotation") rotation = rotation.split(' ') if len(rotation) == 3: ue_trans = uetf.UnrealTransform( location=(float(location[0]), float(location[1]), float(location[2])), # Reorder to roll, pitch, yaw, Unrealcv order is pitch, yaw, roll rotation=(float(rotation[2]), float(rotation[0]), float(location[1]))) self._current_pose = uetf.transform_from_unreal(ue_trans) return self._client is not None and self._client.isconnected()