Ejemplo n.º 1
0
 def test_rotate(self):
     self.assertApprox(rotate('x', pi / 2), np.matrix('1. 0. 0.;0. 0. -1.;0. 1. 0.'))
     self.assertApprox(rotate('x', pi) * rotate('y', pi), rotate('z', pi))
     m = rotate('x', pi / 3) * rotate('y', pi / 5) * rotate('y', pi / 7)
     self.assertApprox(rpyToMatrix(matrixToRpy(m)), m)
     rpy = np.matrix(list(range(3))).T * pi / 2
     self.assertApprox(matrixToRpy(rpyToMatrix(rpy)), rpy)
Ejemplo n.º 2
0
    def test_rotate(self):
        self.assertApprox(
            rotate('x', pi / 2),
            np.array([[1., 0., 0.], [0., 0., -1.], [0., 1., 0.]]))
        self.assertApprox(
            rotate('x', pi).dot(rotate('y', pi)), rotate('z', pi))
        m = rotate('x', pi / 3).dot(rotate('y',
                                           pi / 5)).dot(rotate('y', pi / 7))
        self.assertApprox(rpyToMatrix(matrixToRpy(m)), m)
        rpy = np.array(list(range(3))) * pi / 2
        self.assertApprox(matrixToRpy(rpyToMatrix(rpy)), rpy)
        self.assertApprox(
            rpyToMatrix(rpy),
            rpyToMatrix(float(rpy[0]), float(rpy[1]), float(rpy[2])))

        try:
            rotate('toto', 10.)
        except ValueError:
            self.assertTrue(True)
        else:
            self.assertTrue(False)

        try:
            rotate('w', 10.)
        except ValueError:
            self.assertTrue(True)
        else:
            self.assertTrue(False)
Ejemplo n.º 3
0
 def test_rotate(self):
     self.assertApprox(rotate('x', pi / 2), np.array([[1., 0., 0.],[0., 0., -1.],[0., 1., 0.]]))
     self.assertApprox(rotate('x', pi).dot(rotate('y', pi)), rotate('z', pi))
     m = rotate('x', pi / 3).dot(rotate('y', pi / 5)).dot(rotate('y', pi / 7))
     self.assertApprox(rpyToMatrix(matrixToRpy(m)), m)
     rpy = np.array(list(range(3))) * pi / 2
     self.assertApprox(matrixToRpy(rpyToMatrix(rpy)), rpy)
     self.assertApprox(rpyToMatrix(rpy), rpyToMatrix(float(rpy[0]), float(rpy[1]), float(rpy[2])))
Ejemplo n.º 4
0
 def test_rotate(self):
     self.assertApprox(rotate('x', pi / 2),
                       np.matrix('1. 0. 0.;0. 0. -1.;0. 1. 0.'))
     self.assertApprox(rotate('x', pi) * rotate('y', pi), rotate('z', pi))
     m = rotate('x', pi / 3) * rotate('y', pi / 5) * rotate('y', pi / 7)
     self.assertApprox(rpyToMatrix(matrixToRpy(m)), m)
     rpy = np.matrix(list(range(3))).T * pi / 2
     self.assertApprox(matrixToRpy(rpyToMatrix(rpy)), rpy)
Ejemplo n.º 5
0
    def test_matrixToRpy(self):
        n = 100
        for _ in range(n):
            quat = pin.Quaternion(np.random.rand(4, 1)).normalized()
            R = quat.toRotationMatrix()

            v = matrixToRpy(R)
            Rprime = rpyToMatrix(v)

            self.assertApprox(Rprime, R)
            self.assertTrue(-pi <= v[0] and v[0] <= pi)
            self.assertTrue(-pi / 2 <= v[1] and v[1] <= pi / 2)
            self.assertTrue(-pi <= v[2] and v[2] <= pi)

        n2 = 100

        # Test singular case theta = pi/2
        Rp = np.array([[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [-1.0, 0.0, 0.0]])
        for _ in range(n2):
            r = random() * 2 * pi - pi
            y = random() * 2 * pi - pi

            R = AngleAxis(y, np.array([0., 0., 1.])).matrix() \
                .dot(Rp) \
                .dot(AngleAxis(r, np.array([1., 0., 0.])).matrix())

            v = matrixToRpy(R)
            Rprime = rpyToMatrix(v)

            self.assertApprox(Rprime, R)
            self.assertTrue(-pi <= v[0] and v[0] <= pi)
            self.assertTrue(-pi / 2 <= v[1] and v[1] <= pi / 2)
            self.assertTrue(-pi <= v[2] and v[2] <= pi)

        # Test singular case theta = -pi/2
        Rp = np.array([[0.0, 0.0, -1.0], [0.0, 1.0, 0.0], [1.0, 0.0, 0.0]])
        for _ in range(n2):
            r = random() * 2 * pi - pi
            y = random() * 2 * pi - pi

            R = AngleAxis(y, np.array([0., 0., 1.])).matrix() \
                    .dot(Rp) \
                    .dot(AngleAxis(r, np.array([1., 0., 0.])).matrix())

            v = matrixToRpy(R)
            Rprime = rpyToMatrix(v)

            self.assertApprox(Rprime, R)
            self.assertTrue(-pi <= v[0] and v[0] <= pi)
            self.assertTrue(-pi / 2 <= v[1] and v[1] <= pi / 2)
            self.assertTrue(-pi <= v[2] and v[2] <= pi)
Ejemplo n.º 6
0
    def test_computeRpyJacobianTimeDerivative(self):
        # Check zero at zero velocity
        r = random() * 2 * pi - pi
        p = random() * pi - pi / 2
        y = random() * 2 * pi - pi
        rpy = np.array([r, p, y])
        rpydot = np.zeros(3)
        dj0 = computeRpyJacobianTimeDerivative(rpy, rpydot)
        self.assertTrue((dj0 == np.zeros((3, 3))).all())
        djL = computeRpyJacobianTimeDerivative(rpy, rpydot, pin.LOCAL)
        self.assertTrue((djL == np.zeros((3, 3))).all())
        djW = computeRpyJacobianTimeDerivative(rpy, rpydot, pin.WORLD)
        self.assertTrue((djW == np.zeros((3, 3))).all())
        djA = computeRpyJacobianTimeDerivative(rpy, rpydot,
                                               pin.LOCAL_WORLD_ALIGNED)
        self.assertTrue((djA == np.zeros((3, 3))).all())

        # Check correct identities between different versions
        rpydot = np.random.rand(3)
        dj0 = computeRpyJacobianTimeDerivative(rpy, rpydot)
        djL = computeRpyJacobianTimeDerivative(rpy, rpydot, pin.LOCAL)
        djW = computeRpyJacobianTimeDerivative(rpy, rpydot, pin.WORLD)
        djA = computeRpyJacobianTimeDerivative(rpy, rpydot,
                                               pin.LOCAL_WORLD_ALIGNED)
        self.assertTrue((dj0 == djL).all())
        self.assertTrue((djW == djA).all())

        R = rpyToMatrix(rpy)
        jL = computeRpyJacobian(rpy, pin.LOCAL)
        jW = computeRpyJacobian(rpy, pin.WORLD)
        omegaL = jL.dot(rpydot)
        omegaW = jW.dot(rpydot)
        self.assertApprox(omegaW, R.dot(omegaL))
        self.assertApprox(djW, pin.skew(omegaW).dot(R).dot(jL) + R.dot(djL))
        self.assertApprox(djW, R.dot(pin.skew(omegaL)).dot(jL) + R.dot(djL))
Ejemplo n.º 7
0
    def setCameraTransform(self,
                           translation=np.zeros(3),
                           rotation=np.zeros(3),
                           relative=False):
        # translation : [Px, Py, Pz],
        # rotation : [Roll, Pitch, Yaw]

        R_pnc = rpyToMatrix(np.array(rotation))
        if Viewer.backend == 'gepetto-gui':
            H_abs = SE3(R_pnc, np.array(translation))
            if relative:
                H_orig = XYZQUATToSe3(
                    self._client.getCameraTransform(self._window_id))
                H_abs = H_abs * H_orig
            self._client.setCameraTransform(self._window_id,
                                            se3ToXYZQUAT(H_abs).tolist())
        else:
            if relative:
                raise RuntimeError(
                    "'relative'=True not available with meshcat.")
            import meshcat.transformations as tf
            # Transformation of the camera object
            T_meshcat = tf.translation_matrix(translation)
            self._client.viewer[
                "/Cameras/default/rotated/<object>"].set_transform(T_meshcat)
            # Orientation of the camera object
            Q_pnc = Quaternion(R_pnc).coeffs()
            Q_meshcat = np.roll(Q_pnc, shift=1)
            R_meshcat = tf.quaternion_matrix(Q_meshcat)
            self._client.viewer["/Cameras/default"].set_transform(R_meshcat)
Ejemplo n.º 8
0
def config_sot_to_urdf(q):
    qUrdf = mat_zeros(39)
    qUrdf[:3, 0] = q[:3, 0]
    quatMat = rpyToMatrix(q[3:6, 0])
    quatVec = Quaternion(quatMat)
    qUrdf[3:7, 0] = quatVec.coeffs()
    qUrdf[7:, 0] = q[6:, 0]
    return qUrdf
Ejemplo n.º 9
0
    def test_rpyToMatrix(self):
        r = random() * 2 * pi - pi
        p = random() * pi - pi / 2
        y = random() * 2 * pi - pi

        R = rpyToMatrix(r, p, y)

        Raa = AngleAxis(y, np.array([0., 0., 1.])).matrix() \
            .dot(AngleAxis(p, np.array([0., 1., 0.])).matrix()) \
            .dot(AngleAxis(r, np.array([1., 0., 0.])).matrix())

        self.assertApprox(R, Raa)

        v = np.array([r, p, y])

        Rv = rpyToMatrix(v)

        self.assertApprox(Rv, Raa)
        self.assertApprox(Rv, R)
Ejemplo n.º 10
0
def _origin_info_to_se3(origin_info: Optional[ET.Element]) -> pin.SE3:
    """Convert an XML element with str attribute 'xyz' [X, Y, Z] encoding the
    translation and 'rpy' encoding [Roll, Pitch, Yaw] into a Pinocchio.SE3
    object.
    """
    if origin_info is not None:
        origin_xyz = _string_to_array(origin_info.attrib['xyz'])
        origin_rpy = _string_to_array(origin_info.attrib['rpy'])
        return pin.SE3(rpyToMatrix(origin_rpy), origin_xyz)
    else:
        return pin.SE3.Identity()
Ejemplo n.º 11
0
    def test_computeRpyJacobian(self):
        # Check identity at zero
        rpy = np.zeros(3)
        j0 = computeRpyJacobian(rpy)
        self.assertTrue((j0 == np.eye(3)).all())
        jL = computeRpyJacobian(rpy, pin.LOCAL)
        self.assertTrue((jL == np.eye(3)).all())
        jW = computeRpyJacobian(rpy, pin.WORLD)
        self.assertTrue((jW == np.eye(3)).all())
        jA = computeRpyJacobian(rpy, pin.LOCAL_WORLD_ALIGNED)
        self.assertTrue((jA == np.eye(3)).all())

        # Check correct identities between different versions
        r = random() * 2 * pi - pi
        p = random() * pi - pi / 2
        y = random() * 2 * pi - pi
        rpy = np.array([r, p, y])
        R = rpyToMatrix(rpy)
        j0 = computeRpyJacobian(rpy)
        jL = computeRpyJacobian(rpy, pin.LOCAL)
        jW = computeRpyJacobian(rpy, pin.WORLD)
        jA = computeRpyJacobian(rpy, pin.LOCAL_WORLD_ALIGNED)
        self.assertTrue((j0 == jL).all())
        self.assertTrue((jW == jA).all())
        self.assertApprox(jW, R.dot(jL))

        # Check against finite differences
        rpydot = np.random.rand(3)
        eps = 1e-7
        tol = 1e-4

        dRdr = (rpyToMatrix(r + eps, p, y) - R) / eps
        dRdp = (rpyToMatrix(r, p + eps, y) - R) / eps
        dRdy = (rpyToMatrix(r, p, y + eps) - R) / eps
        Rdot = dRdr * rpydot[0] + dRdp * rpydot[1] + dRdy * rpydot[2]

        omegaL = jL.dot(rpydot)
        self.assertTrue(np.allclose(Rdot, R.dot(pin.skew(omegaL)), atol=tol))

        omegaW = jW.dot(rpydot)
        self.assertTrue(np.allclose(Rdot, pin.skew(omegaW).dot(R), atol=tol))
Ejemplo n.º 12
0
def velocityXYZRPYToXYZQuat(xyzrpy: np.ndarray,
                            dxyzrpy: np.ndarray) -> np.ndarray:
    """Convert the derivative of [X,Y,Z,Roll,Pitch,Yaw] to [X,Y,Z,Qx,Qy,Qz,Qw].

    .. warning::
        Linear velocity in XYZRPY must be local-world-aligned frame, while
        returned linear velocity in XYZQuat is in local frame.
    """
    rpy = xyzrpy[3:]
    R = rpyToMatrix(rpy)
    J_rpy = computeRpyJacobian(rpy)
    return np.concatenate((R.T @ dxyzrpy[:3], J_rpy @ dxyzrpy[3:]))
Ejemplo n.º 13
0
def config_sot_to_urdf(q):
    # GEPETTO VIEWER Free flyer 0-6, CHEST HEAD 7-10, LARM 11-17, RARM 18-24, LLEG 25-30, RLEG 31-36
    # ROBOT VIEWER # Free flyer0-5, RLEG 6-11, LLEG 12-17, CHEST HEAD 18-21, RARM 22-28, LARM 29-35
    qUrdf = mat_zeros(37);
    qUrdf[:3,0] = q[:3,0];
    quatMat = rpyToMatrix(q[3:6,0]);
    quatVec = Quaternion(quatMat);
    qUrdf[3:7,0]   = quatVec.coeffs();
    qUrdf[7:11,0]  = q[18:22,0]; # chest-head
    qUrdf[11:18,0] = q[29:,0]; # larm
    qUrdf[18:25,0] = q[22:29,0]; # rarm
    qUrdf[25:31,0] = q[12:18,0]; # lleg
    qUrdf[31:,0]   = q[6:12,0]; # rleg
    return qUrdf;
def config_sot_to_urdf(q):
    # GEPETTO VIEWER Free flyer 0-6, CHEST HEAD 7-10, LARM 11-17, RARM 18-24, LLEG 25-30, RLEG 31-36
    # ROBOT VIEWER # Free flyer0-5, RLEG 6-11, LLEG 12-17, CHEST HEAD 18-21, RARM 22-28, LARM 29-35
    qUrdf = mat_zeros(37);
    qUrdf[:3,0] = q[:3,0];
    quatMat = rpyToMatrix(q[3:6,0]);
    quatVec = Quaternion(quatMat);
    qUrdf[3:7,0]   = quatVec.coeffs();
    qUrdf[7:11,0]  = q[18:22,0]; # chest-head
    qUrdf[11:18,0] = q[29:,0]; # larm
    qUrdf[18:25,0] = q[22:29,0]; # rarm
    qUrdf[25:31,0] = q[12:18,0]; # lleg
    qUrdf[31:,0]   = q[6:12,0]; # rleg
    return qUrdf;
Ejemplo n.º 15
0
    def setCameraTransform(self, translation, rotation):
        # translation : [Px, Py, Pz],
        # rotation : [Roll, Pitch, Yaw]

        R_pnc = rpyToMatrix(np.array(rotation))
        if Viewer.backend == 'gepetto-gui':
            T_pnc = np.array(translation)
            T_R = SE3(R_pnc, T_pnc)
            self._client.setCameraTransform(self._window_id,
                                            se3ToXYZQUAT(T_R).tolist())
        else:
            import meshcat.transformations as tf
            # Transformation of the camera object
            T_meshcat = tf.translation_matrix(translation)
            self._client.viewer[
                "/Cameras/default/rotated/<object>"].set_transform(T_meshcat)
            # Orientation of the camera object
            Q_pnc = Quaternion(R_pnc).coeffs()
            Q_meshcat = np.roll(Q_pnc, shift=1)
            R_meshcat = tf.quaternion_matrix(Q_meshcat)
            self._client.viewer["/Cameras/default"].set_transform(R_meshcat)
Ejemplo n.º 16
0
    def initialize(self,
                   urdf_path: str,
                   hardware_path: Optional[str] = None,
                   mesh_path: Optional[str] = None,
                   has_freeflyer: bool = True,
                   avoid_instable_collisions: bool = True,
                   verbose: bool = True) -> None:
        """Initialize the robot.

        :param urdf_path: Path of the URDF file of the robot.
        :param hardware_path: Path of Jiminy hardware description toml file.
                              Optional: Looking for '*_hardware.toml' file in
                              the same folder and with the same name. If not
                              found, then no hardware is added to the robot,
                              which is valid and can be used for display.
        :param mesh_path: Path to the folder containing the URDF meshes. It
                          will overwrite any absolute mesh path.
                          Optional: Env variable 'JIMINY_DATA_PATH' will be
                          used if available.
        :param has_freeflyer: Whether the robot is fixed-based wrt its root
                              link, or can move freely in the world.
        :param avoid_instable_collisions: Prevent numerical instabilities by
                                          replacing collision mesh by vertices
                                          of associated minimal volume bounding
                                          box, and replacing primitive box by
                                          its vertices.
        :param verbose: Whether or not to print warnings.
        """
        # Handle verbosity level
        if verbose:
            logger.setLevel(logging.DEBUG)
        else:
            logger.setLevel(logging.ERROR)

        # Backup the original URDF path
        self.urdf_path_orig = urdf_path

        # Fix the URDF mesh paths
        if mesh_path is not None:
            urdf_path = _fix_urdf_mesh_path(urdf_path, mesh_path)

        # Initialize the robot without motors nor sensors
        if mesh_path is not None:
            mesh_root_dirs = [mesh_path]
        else:
            mesh_root_dirs = [os.path.dirname(urdf_path)]
        mesh_env_path = os.environ.get('JIMINY_DATA_PATH', None)
        if mesh_env_path is not None:
            mesh_root_dirs += [mesh_env_path]
        return_code = super().initialize(urdf_path, has_freeflyer,
                                         mesh_root_dirs)

        if return_code != jiminy.hresult_t.SUCCESS:
            raise ValueError(
                "Impossible to load the URDF file. Either the file is "
                "corrupted or does not exit.")

        # Load the hardware description file if available
        if hardware_path is None:
            hardware_path = str(
                pathlib.Path(
                    self.urdf_path_orig).with_suffix('')) + '_hardware.toml'

        self.hardware_path = hardware_path
        if not os.path.exists(hardware_path):
            if hardware_path:
                logger.warning(
                    "Hardware configuration file not found. Not adding any "
                    "hardware to the robot.\n Default file can be generated "
                    "using 'generate_hardware_description_file' method.")
            return
        hardware_info = toml.load(hardware_path)
        self.extra_info = hardware_info.pop('Global', {})
        motors_info = hardware_info.pop('Motor', {})
        sensors_info = hardware_info.pop('Sensor', {})

        # Parse the URDF
        tree = ET.parse(urdf_path)
        root = tree.getroot()

        # Extract the list of bodies having visual and collision meshes or
        # primitives.
        geometry_info = {
            'visual': {
                'primitive': None,
                'mesh': None
            },
            'collision': {
                'primitive': None,
                'mesh': None
            }
        }
        for geometry_type in geometry_info.keys():
            geometry_links = set(
                link.get('name') for link in root.findall(
                    f"./link/{geometry_type}/geometry/../.."))
            mesh_links = [
                name for name in geometry_links if 'mesh' in set(
                    link[0].tag
                    for link in root.find(f"./link[@name='{name}']").findall(
                        f'{geometry_type}/geometry'))
            ]
            primitive_links = [
                name for name in geometry_links if set(
                    link[0].tag
                    for link in root.find(f"./link[@name='{name}']").findall(
                        f'{geometry_type}/geometry')).difference({'mesh'})
            ]
            geometry_info[geometry_type]['mesh'] = mesh_links
            geometry_info[geometry_type]['primitive'] = primitive_links

        # Checking the collision bodies, to make sure they are associated with
        # supported collision geometries. If not, fixing the issue after
        # throwing a warning.
        collision_bodies_names = self.extra_info.pop('collisionBodiesNames',
                                                     [])
        contact_frames_names = self.extra_info.pop('contactFramesNames', [])
        for body_name in collision_bodies_names.copy():
            # Filter out the different cases.
            # After this filter, we know that their is no collision geometry
            # associated with the body but their is a visual mesh, or there is
            # only collision meshes.
            if body_name in geometry_info['collision']['mesh'] and \
                    body_name in geometry_info['collision']['primitive']:
                if not avoid_instable_collisions:
                    continue
                logger.warning(
                    "Collision body having both primitive and mesh geometries "
                    "is not supported. Enabling only primitive collision for "
                    "this body.")
                continue
            elif body_name in geometry_info['collision']['primitive']:
                pass
            elif body_name in geometry_info['collision']['mesh']:
                if not avoid_instable_collisions:
                    continue
                logger.warning(
                    "Collision body associated with mesh geometry is not "
                    "supported for now. Replacing it by contact points at the "
                    "vertices of the minimal volume bounding box.")
            elif body_name not in geometry_info['visual']['mesh']:
                logger.warning(
                    "No visual mesh nor collision geometry associated with "
                    "the collision body. Fallback to adding a single contact "
                    "point at body frame.")
                contact_frames_names.append(body_name)
                continue
            else:
                logger.warning(
                    "No collision geometry associated with the collision "
                    "body. Fallback to replacing it by contact points at "
                    "the vertices of the minimal volume bounding box of the "
                    "available visual meshes.")
            # Check if collision primitive box are available
            body_link = root.find(f"./link[@name='{body_name}']")
            collision_box_sizes_info, collision_box_origin_info = [], []
            for link in body_link.findall('collision'):
                box_link = link.find('geometry/box')
                if box_link is not None:
                    collision_box_sizes_info.append(box_link.get('size'))
                    collision_box_origin_info.append(link.find('origin'))

            # Replace the collision boxes by contact points, if any
            if collision_box_sizes_info:
                if not avoid_instable_collisions:
                    continue
                logger.warning(
                    "Collision body associated with box geometry is not "
                    "numerically stable for now. Replacing it by contact "
                    "points at the vertices.")

                for i, (box_size_info, box_origin_info) in enumerate(
                        zip(collision_box_sizes_info,
                            collision_box_origin_info)):
                    box_size = list(map(float, box_size_info.split()))
                    box_origin = _origin_info_to_se3(box_origin_info)
                    vertices = [
                        e.flatten() for e in np.meshgrid(*[
                            0.5 * v * np.array([-1.0, 1.0]) for v in box_size
                        ])
                    ]
                    for j, (x, y, z) in enumerate(zip(*vertices)):
                        frame_name = "_".join(
                            (body_name, "CollisionBox", str(i), str(j)))
                        vertex_pos_rel = pin.SE3(np.eye(3), np.array([x, y,
                                                                      z]))
                        frame_transform = box_origin.act(vertex_pos_rel)
                        self.add_frame(frame_name, body_name, frame_transform)
                        contact_frames_names.append(frame_name)
            elif body_name in geometry_info['collision']['primitive']:
                # Do nothing if the primitive is not a box. It should be fine.
                continue

            # Remove the body from the collision detection set
            collision_bodies_names.remove(body_name)

            # Early return if collision box primitives have been replaced
            if collision_box_sizes_info:
                continue

            # Extract meshes paths and origins.
            if body_name in geometry_info['collision']['mesh']:
                mesh_links = body_link.findall('collision')
            else:
                mesh_links = body_link.findall('visual')
            mesh_paths = [
                link.find('geometry/mesh').get('filename')
                for link in mesh_links
            ]
            mesh_scales = [
                _string_to_array(
                    link.find('geometry/mesh').get('scale', '1.0 1.0 1.0'))
                for link in mesh_links
            ]
            mesh_origins = []
            for link in mesh_links:
                mesh_origin_info = link.find('origin')
                mesh_origin_transform = \
                    _origin_info_to_se3(mesh_origin_info)
                mesh_origins.append(mesh_origin_transform)

            # Replace the collision body by contact points
            for mesh_path, mesh_scale, mesh_origin in zip(
                    mesh_paths, mesh_scales, mesh_origins):
                # Replace relative mesh path by absolute one
                if mesh_path.startswith("package://"):
                    mesh_path_orig = mesh_path
                    for root_dir in mesh_root_dirs:
                        mesh_path = mesh_path_orig.replace(
                            "package:/", root_dir)
                        if os.path.exists(mesh_path):
                            break

                # Compute the minimal volume bounding box, then add new frames
                # to the robot model at its vertices and register contact
                # points at their location.
                try:
                    mesh = trimesh.load(mesh_path)
                except ValueError:  # Mesh file is not available
                    continue
                box = mesh.bounding_box_oriented
                for i in range(8):
                    frame_name = "_".join((body_name, "BoundingBox", str(i)))
                    frame_transform_rel = pin.SE3(
                        np.eye(3), mesh_scale * np.asarray(box.vertices[i]))
                    frame_transform = mesh_origin.act(frame_transform_rel)
                    self.add_frame(frame_name, body_name, frame_transform)
                    contact_frames_names.append(frame_name)

        # Add the collision bodies and contact points.
        # Note that it must be done before adding the sensors because
        # Contact sensors requires contact points to be defined.
        # Mesh collisions is not numerically stable for now, so disabling it.
        self.add_collision_bodies(collision_bodies_names,
                                  ignore_meshes=avoid_instable_collisions)
        self.add_contact_points(list(set(contact_frames_names)))

        # Add the motors to the robot
        for motor_type, motors_descr in motors_info.items():
            for motor_name, motor_descr in motors_descr.items():
                # Create the sensor and attach it
                motor = getattr(jiminy, motor_type)(motor_name)
                self.attach_motor(motor)

                # Initialize the motor
                joint_name = motor_descr.pop('joint_name')
                motor.initialize(joint_name)

                # Set the motor options
                options = motor.get_options()
                option_fields = options.keys()
                for name, value in motor_descr.items():
                    if name not in option_fields:
                        logger.warning(
                            f"'{name}' is not a valid option for the motor "
                            f"{motor_name} of type {motor_type}.")
                    options[name] = value
                options['enableArmature'] = True
                motor.set_options(options)

        # Add the sensors to the robot
        for sensor_type, sensors_descr in sensors_info.items():
            for sensor_name, sensor_descr in sensors_descr.items():
                # Create the sensor and attach it
                sensor = getattr(jiminy, sensor_type)(sensor_name)
                self.attach_sensor(sensor)

                # Initialize the sensor
                if sensor_type == encoder.type:
                    joint_name = sensor_descr.pop('joint_name')
                    sensor.initialize(joint_name)
                elif sensor_type == effort.type:
                    motor_name = sensor_descr.pop('motor_name')
                    sensor.initialize(motor_name)
                elif sensor_type == contact.type:
                    frame_name = sensor_descr.pop('frame_name')
                    sensor.initialize(frame_name)
                elif sensor_type in [force.type, imu.type]:
                    # Create the frame and add it to the robot model
                    frame_name = sensor_descr.pop('frame_name', None)

                    # Create a frame if a frame name has been specified.
                    # In such a case, the body name must be specified.
                    if frame_name is None:
                        # Get the body name
                        body_name = sensor_descr.pop('body_name')

                        # Generate a frame name both intelligible and available
                        i = 0
                        frame_name = "_".join(
                            (sensor_name, sensor_type, "Frame"))
                        while self.pinocchio_model.existFrame(frame_name):
                            frame_name = "_".join(
                                (sensor_name, sensor_type, "Frame", str(i)))
                            i += 1

                        # Compute SE3 object representing the frame placement
                        frame_pose_xyzrpy = np.array(
                            sensor_descr.pop('frame_pose'))
                        frame_trans = frame_pose_xyzrpy[:3]
                        frame_rot = rpyToMatrix(frame_pose_xyzrpy[3:])
                        frame_placement = pin.SE3(frame_rot, frame_trans)

                        # Add the frame to the robot model
                        self.add_frame(frame_name, body_name, frame_placement)

                    # Initialize the sensor
                    sensor.initialize(frame_name)
                else:
                    raise ValueError(f"Unsupported sensor type {sensor_type}.")

                # Set the sensor options
                options = sensor.get_options()
                option_fields = options.keys()
                for name, value in sensor_descr.items():
                    if name not in option_fields:
                        logger.warning(
                            f"'{name}' is not a valid option for the sensor "
                            f"{sensor_name} of type {sensor_type}.")
                    options[name] = value
                sensor.set_options(options)
Ejemplo n.º 17
0
def XYZRPYToSe3(xyzrpy):
    return SE3(rpyToMatrix(xyzrpy[3:]), xyzrpy[:3])
Ejemplo n.º 18
0
def XYZRPYToSE3(xyzrpy):
    """Convert [X,Y,Z,Roll,Pitch,Yaw] vector to Pinocchio SE3 object.
    """
    return pin.SE3(rpyToMatrix(xyzrpy[3:]), xyzrpy[:3])
Ejemplo n.º 19
0
    def test_imu_sensor(self):
        """
        @brief   Test IMU sensor on pendulum motion.

        @details Note that the actual expected solution of the pendulum motion
                 is used to compute the expected IMU data, instead of the
                 result of the simulation done by jiminy itself. So this test
                 is checking at the same time that the result of the simulation
                 matches the solution, and that the sensor IMU data are valid.
                 Though it is redundant, it validates that an IMU mounted on a
                 pendulum gives the signal one would expect from an IMU on a
                 pendulum, which is what a user would expect. Moreover, Jiminy
                 output log does not feature the acceleration - to this test is
                 indirectly checking that the acceleration computed by jiminy
                 is valid.

        @remark  Since we don't have a simple analytical expression for the
                 solution of a (nonlinear) pendulum motion, we perform the
                 simulation in python, with the same integrator.
        """
        # Create an engine: no controller and no internal dynamics
        engine = jiminy.Engine()
        setup_controller_and_engine(engine, self.robot)

        # Run simulation and extract log data
        x0 = np.array([0.1, 0.1])
        tf = 2.0
        time, quat_jiminy, gyro_jiminy, accel_jiminy = \
            SimulateSimplePendulum._simulate_and_get_imu_data_evolution(engine, tf, x0, split=True)

        # Pendulum dynamics
        def dynamics(t, x):
            return np.stack([x[..., 1], self.g / self.l * np.sin(x[..., 0])],
                            axis=-1)

        # Integrate this non-linear dynamics
        x_rk_python = integrate_dynamics(time, x0, dynamics)

        # Compute sensor acceleration, i.e. acceleration in polar coordinates
        theta = x_rk_python[:, 0]
        dtheta = x_rk_python[:, 1]
        dtheta = x_rk_python[:, 1]

        # Acceleration: to resolve algebraic loop (current acceleration is
        # function of input which itself is function of sensor signal, sensor
        # data is computed using q_t, v_t, a_t
        ddtheta = dynamics(0.0, x_rk_python)[:, 1]

        expected_accel = np.stack([
            -self.l * ddtheta + self.g * np.sin(theta),
            np.zeros_like(theta), self.l * dtheta**2 - self.g * np.cos(theta)
        ],
                                  axis=-1)
        expected_gyro = np.stack(
            [np.zeros_like(theta), dtheta,
             np.zeros_like(theta)], axis=-1)

        expected_quat = np.stack([
            Quaternion(rpyToMatrix(np.array([0., t, 0.]))).coeffs()
            for t in theta
        ],
                                 axis=0)

        # Compare sensor signal, ignoring first iterations that correspond to
        # system initialization
        self.assertTrue(
            np.allclose(expected_quat[2:, :],
                        quat_jiminy[2:, :],
                        atol=TOLERANCE))
        self.assertTrue(
            np.allclose(expected_gyro[2:, :],
                        gyro_jiminy[2:, :],
                        atol=TOLERANCE))
        self.assertTrue(
            np.allclose(expected_accel[2:, :],
                        accel_jiminy[2:, :],
                        atol=TOLERANCE))
Ejemplo n.º 20
0
    def test_imu_sensor(self):
        """
        @brief   Test IMU sensor on pendulum motion.

        @details Note that the actual expected solution of the pendulum motion is
                 used to compute the expected IMU data, instead of the result of
                 the simulation done by jiminy itself. So this test is checking at
                 the same time that the result of the simulation matches the
                 solution, and that the sensor IMU data are valid. Though it is
                 redundant, it validates that an IMU mounted on a pendulum gives
                 the signal one would expect from an IMU on a pendulum, which is
                 what a user would expect. Moreover, Jiminy output log does not
                 feature the acceleration - to this test is indirectly checking
                 that the acceleration computed by jiminy is valid.

        @remark  Since we don't have a simple analytical expression for the
                 solution of a (nonlinear) pendulum motion, we perform the
                 simulation in python, with the same integrator.
        """
        # Add IMU
        imu_sensor = jiminy.ImuSensor("PendulumLink")
        self.robot.attach_sensor(imu_sensor)
        imu_sensor.initialize("PendulumLink")

        # Create an engine: no controller and no internal dynamics
        engine = jiminy.Engine()
        engine.initialize(self.robot)

        x0 = np.array([0.1, 0.1])
        tf = 2.0

        # Run simulation
        engine.simulate(tf, x0)
        log_data, _ = engine.get_log()
        time = log_data['Global.Time']
        quat_jiminy = np.stack(
            [log_data['PendulumLink.Quat' + s] for s in ['x', 'y', 'z', 'w']],
            axis=-1)
        gyro_jiminy = np.stack(
            [log_data['PendulumLink.Gyro' + s] for s in ['x', 'y', 'z']],
            axis=-1)
        accel_jiminy = np.stack(
            [log_data['PendulumLink.Accel' + s] for s in ['x', 'y', 'z']],
            axis=-1)

        # System dynamics: get length and inertia
        l = -self.robot.pinocchio_model_th.inertias[1].lever[2]
        g = self.robot.pinocchio_model.gravity.linear[2]

        # Pendulum dynamics
        def dynamics(t, x):
            return np.stack([x[..., 1], g / l * np.sin(x[..., 0])], axis=-1)

        # Integrate this non-linear dynamics
        x_rk_python = integrate_dynamics(time, x0, dynamics)

        # Compute sensor acceleration, i.e. acceleration in polar coordinates
        theta = x_rk_python[:, 0]
        dtheta = x_rk_python[:, 1]

        # Acceleration: to resolve algebraic loop (current acceleration is
        # function of input which itself is function of sensor signal, sensor
        # data is computed using q_t, v_t, a_(t-1)
        ddtheta = np.concatenate((np.zeros(1), dynamics(0.0, x_rk_python)[:-1,
                                                                          1]))

        expected_accel = np.stack([
            -l * ddtheta + g * np.sin(theta),
            np.zeros_like(theta), l * dtheta**2 - g * np.cos(theta)
        ],
                                  axis=-1)
        expected_gyro = np.stack(
            [np.zeros_like(theta), dtheta,
             np.zeros_like(theta)], axis=-1)

        expected_quat = np.stack([
            Quaternion(rpyToMatrix(np.array([0., t, 0.]))).coeffs()
            for t in theta
        ],
                                 axis=0)

        # Compare sensor signal, ignoring first iterations that correspond to system initialization
        self.assertTrue(
            np.allclose(expected_quat[2:, :],
                        quat_jiminy[2:, :],
                        atol=TOLERANCE))
        self.assertTrue(
            np.allclose(expected_gyro[2:, :],
                        gyro_jiminy[2:, :],
                        atol=TOLERANCE))
        self.assertTrue(
            np.allclose(expected_accel[2:, :],
                        accel_jiminy[2:, :],
                        atol=TOLERANCE))
 def get_transform(origin):
     from pinocchio import SE3, rpy
     _xyz = [float(v) for v in origin.attrib.get('xyz', '0 0 0').split(' ')]
     _rpy = [float(v) for v in origin.attrib.get('rpy', '0 0 0').split(' ')]
     return SE3(rpy.rpyToMatrix(*_rpy), np.array(_xyz))