def _apply_geometric_transformations(self, theta, symbolic): if symbolic: # Angle symbolique qui paramètre la rotation du joint en cours symbolic_frame_matrix = np.eye(4) # Apply translation matrix symbolic_frame_matrix = symbolic_frame_matrix * sympy.Matrix( geometry.homogeneous_translation_matrix( *self.translation_vector)) # Apply orientation matrix symbolic_frame_matrix = symbolic_frame_matrix * geometry.cartesian_to_homogeneous( geometry.rpy_matrix(*self.orientation)) # Apply rotation matrix if self.rotation is not None and self.is_revolute: symbolic_frame_matrix = symbolic_frame_matrix * geometry.cartesian_to_homogeneous( geometry.symbolic_axis_rotation_matrix( self.rotation, theta), matrix_type="sympy") symbolic_frame_matrix = sympy.lambdify(theta, symbolic_frame_matrix, "numpy") return symbolic_frame_matrix else: # Init the transformation matrix frame_matrix = np.eye(4) # First, apply translation matrix frame_matrix = np.dot( frame_matrix, geometry.homogeneous_translation_matrix( *self.translation_vector)) # Apply orientation frame_matrix = np.dot( frame_matrix, geometry.cartesian_to_homogeneous( geometry.rpy_matrix(*self.orientation))) # Apply rotation matrix if self.rotation is not None: if self.is_revolute: frame_matrix = np.dot( frame_matrix, geometry.cartesian_to_homogeneous( geometry.axis_rotation_matrix( self.rotation, theta))) else: frame_matrix = np.dot( frame_matrix, geometry.homogeneous_translation_matrix( *self.rotation * theta)) return frame_matrix
def test_target_frame(self): """Tests setting a target frame directly""" # set target frame rpyM = geometry.rpy_matrix(0.79, 1.57, 3.14) target_frame = geometry.to_transformation_matrix([0.4, 0.3, 0.5], rpyM) self.visualizer.set_target_frame(target_frame) # check if frame of the target node matches target frame frame = self.visualizer.robot.scene.get_pose( self.visualizer.robot.target_node) assert_array_equal(frame, target_frame)
def get_rotation_axis(self): if self.rotation is not None: return np.dot( geometry.homogeneous_translation_matrix(*self.translation_vector), np.dot( geometry.cartesian_to_homogeneous(geometry.rpy_matrix(*self.orientation)), geometry.cartesian_to_homogeneous_vectors(self.rotation * self.axis_length) ) ) else: raise ValueError("This link doesn't provide a rotation")
def convert_relative(self, global_coord: np.array) -> np.array: """ Converts a global coordinate to a relative coordinate in the frame of the robot. :param global_coord: The global coordiante frame :return: """ arm_pos = np.array(self.getSelf().getPosition()) relative_pos = global_coord - arm_pos rot_mat = geometry.rpy_matrix(*self.get_rpy()) relative_pos = rot_mat @ relative_pos return relative_pos
def set_target_position(self, x, y, z, roll=0.0, pitch=0.0, yaw=0.0): """ Visualize given position and orientation :param x: position x (forward) :type x: float :param y: position y (horizontal) :type y: float :param z: position z (vertical) :type z: float :param roll: rotation around x-axis in degree :type roll: float :param pitch: rotation around y-axis in degree :type pitch: float :param yaw: rotation around z-axis in degree :type yaw: float """ roll, pitch, yaw = map(math.radians, [roll, pitch, yaw]) rpyM = geometry.rpy_matrix(roll, pitch, yaw) target_frame = geometry.to_transformation_matrix([x, y, z], rpyM) self.set_target_frame(target_frame)