Exemple #1
0
def write_quaterniondata_to_bvh(bvh_filename, quaternion_data):

    #print (quaternion_data.shape)
    out_seq = []
    for one_frame_quat in quaternion_data:
        out_frame = [one_frame_quat[0], one_frame_quat[1],
                     one_frame_quat[2]]  #hip pos
        hip_x, hip_y, hip_z = euler.quat2euler(one_frame_quat[3:7],
                                               'sxyz')  #hip euler rotation
        out_frame += [
            hip_z * 180 / np.pi, hip_y * 180 / np.pi, hip_x * 180 / np.pi
        ]  #notice in cmu bvh files, the channel for hip rotation is z y x
        for joint in skeleton:
            if (("hip" not in joint)
                    and (len(skeleton[joint]["channels"]) == 3)):
                index = joint_index[joint]
                y, x, z = euler.quat2euler(
                    one_frame_quat[3 + index * 4:3 + index * 4 + 4], 'syxz')
                out_frame += [
                    z * 180 / np.pi, x * 180 / np.pi, y * 180 / np.pi
                ]  #notice in cmu bvh files, the channel for joint rotation is z x y
        out_seq += [out_frame]
    ##out_seq now should be seq_len*(3+3*joint_num)
    out_seq = np.array(out_seq)
    out_seq = np.round(out_seq, 6)

    #out_seq2=np.ones(out_seq.shape)
    #out_seq2[:,3:out_seq2.shape[1]]=out_seq[:,3:out_seq2.shape[1]].copy()

    #print ("bvh data shape")
    #print (out_seq.shape)
    write_frames(standard_bvh_file, bvh_filename, out_seq)
Exemple #2
0
def get_pose(pose):
	pos, quat = pose_4x4_to_pos_quat(pose)
	euler = np.array([quat2euler(quat)[0], quat2euler(quat)[1],quat2euler(quat)[2]])
	euler = euler * 180.0 / np.pi
	alpha, beta, gamma = euler[0], euler[1], euler[2]
	x, y, z = pos[0], pos[1], pos[2]
	return x,y,z, alpha, beta, gamma
Exemple #3
0
def poses_m_to_px(as_pose,
                  img_size_px,
                  world_size_px,
                  world_size_m,
                  batch_dim=False):
    world_size_px = np.asarray(world_size_px)
    pos = as_pose.position
    rot = as_pose.orientation

    #torch.cuda.synchronize()
    #prof = SimpleProfiler(torch_sync=True, print=True)

    # Turn into numpy
    if hasattr(pos, "is_cuda"):
        pos = pos.data.cpu().numpy()
        rot = rot.data.cpu().numpy()

    if len(pos.shape) == 1:
        pos = pos[np.newaxis, :]

    #prof.tick(".")

    pos_img = pos_m_to_px(pos[:, 0:2], img_size_px, world_size_m,
                          world_size_px)

    #prof.tick("pos")

    yaws = []
    if batch_dim:
        #rotm = rot.copy()
        #rotm = rot
        #rotm[:, 1] = 0
        #rotm[:, 2] = 0
        for i in range(rot.shape[0]):
            # Manual quat2euler
            #mag = math.sqrt(rotm[i][0] ** 2 + rotm[i][3] ** 2)
            #rotm[i, :] /= mag
            #sign = np.sign(rotm[i][3])
            #yaw = 2*math.acos(rotm[i][0]) * sign
            roll, pitch, yaw = euler.quat2euler(rot[i])
            #print(yaw, yaw_manual, sign)
            yaws.append(yaw)
    else:
        roll, pitch, yaw = euler.quat2euler(rot)
        yaws.append(yaw)
        pos_img = pos_img[0]

    #prof.tick("rot")

    if batch_dim:
        # Add additional axis so that orientation becomes Bx1 instead of just B,
        out = Pose(pos_img, np.asarray(yaws)[:, np.newaxis])
    else:
        out = Pose(pos_img, yaws[0])

    #prof.tick("fin")
    #prof.print_stats()

    return out
Exemple #4
0
    def assertRobotPose(self,
                        pose: geometry_msgs.msg.Pose,
                        robot_name: str = "amy",
                        *,
                        lin_threshold: float = 0.5,
                        x_threshold: float = None,
                        y_threshold: float = None,
                        z_threshold: float = None,
                        ang_threshold: float = math.tau / 8,
                        roll_threshold: float = None,
                        pitch_threshold: float = None,
                        yaw_threshold: float = None):
        """
        Assert that a robot is in a specified pose or at least close to it.

        Use :func:`assertRobotPosition` if only a position assertion is needed since it is way simpler.

        :param pose: Absolute pose in webots coordinates in which the robot should be.
        :param robot_name: The robot whose pose should be verified.
            Defaults to amy which is the only robot in single-robot simulations.
        :param lin_threshold: Threshold which defines the amount of linear (position) derivation from the specified
            position that is still allowed.
            By default this means equal allowed derivation in all three axes although each axis can be overwritten by
            the axis-specific argument.
        :param x_threshold: Maximum allowed derivation from the position on the x axis
        :param y_threshold: Maximum allowed derivation from the position on the y axis
        :param z_threshold: Maximum allowed derivation from the position on the z axis
        :param ang_threshold: Threshold which defines the amount of angular (rotation) derivation in radians from
            the specified rotation that is still allowed.
            By default this means equal allowed derivation in all three rotations although each rotation can be
            overwritten by the specific argument.
        :param roll_threshold: Maximum allowed roll derivation
        :param pitch_threshold: Maximum allowed pitch derivation
        :param yaw_threshold: Maximum allowed yaw derivation
        """
        # set defaults
        roll_threshold = roll_threshold if roll_threshold else ang_threshold
        pitch_threshold = pitch_threshold if pitch_threshold else ang_threshold
        yaw_threshold = yaw_threshold if yaw_threshold else ang_threshold

        # calculate rpy values from quaternions
        real_orientation = self.get_robot_pose().orientation
        real_rpy = quat2euler([real_orientation.w, real_orientation.x, real_orientation.y, real_orientation.z])
        goal_rpy = quat2euler([pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z])

        # assert pose
        self.assertRobotPosition(position=pose.position, robot_name=robot_name, threshold=lin_threshold,
                                 x_threshold=x_threshold, y_threshold=y_threshold, z_threshold=z_threshold)
        try:
            self.assertInRange(goal_rpy[0], (real_rpy[0] - roll_threshold, real_rpy[0] + roll_threshold))
            self.assertInRange(goal_rpy[1], (real_rpy[1] - pitch_threshold, real_rpy[1] + pitch_threshold))
            self.assertInRange(goal_rpy[2], (real_rpy[2] - yaw_threshold, real_rpy[2] + yaw_threshold))
        except AssertionError:
            raise AssertionError(
                f"Robot orientation is not at (or close to) target orientation:\n{goal_rpy}\nActual pose:\n{real_rpy}")
Exemple #5
0
 def on_imu_received(self, msg):
     """
     Process incoming imu messages
     :param sensor_msgs.msg.Imu msg: incoming msg message
     """
     q = msg.orientation
     if self.init_heading is not None:
         _, _, new_heading = quat2euler([q.w, q.x, q.y, q.z])
         self.int += to_angle(new_heading - self.old_heading)
         self.pub.publish(Float32(self.int / (2 * pi)))
     else:
         _, _, self.init_heading = quat2euler([q.w, q.x, q.y, q.z])
     _, _, self.old_heading = quat2euler([q.w, q.x, q.y, q.z])
Exemple #6
0
def calculate_imu_angle_quaternion(angle_velocity, delta_time, imud):
    quaternion = Quaternion([1, 0, 0, 0])
    imu_angle = np.array(quat2euler(quaternion.vector))
    imu_time = []
    imu_time.append(imud['ts'][0][0])
    for i in range(len(angle_velocity)):
        temp = angle_velocity[i][:] * delta_time[i]
        quaternion = quaternion.multiply(
            Quaternion(euler2quat(temp[0], temp[1], temp[2])))

        imu_angle = np.vstack((imu_angle, quat2euler(quaternion.vector)))
        imu_time.append(imud['ts'][0][i + 1])
    return imu_angle, imu_time
def imu_state_update(state: np.ndarray,
                     input: np.ndarray,
                     time_interval=0.01) -> np.ndarray:
    '''
    state transaction method base on transform3d library.
    :param state:
    :param input:
    :param time_interval:
    :return:
    '''
    q = euler.euler2quat(state[6], state[7], state[8])
    # r_q = euler.euler2quat(input[])
    w = input[3:6] * time_interval
    r_q = euler.euler2quat(w[0], w[1], w[2])
    q = q * r_q
    # q.normalize()
    # q = q.norm()
    quaternions.qmult(q, r_q)

    # acc = np.dot(quaternions.quat2mat(q), input[0:3]) + np.asarray((0, 0, -9.8))
    acc = quaternions.rotate_vector(input[0:3], q) + np.asarray((0, 0, -9.8))
    state[0:3] = state[0:3] + state[3:6] * time_interval
    state[3:6] = state[3:6] + acc * time_interval
    # state[6:9] = quaternions.quat2axangle(q)
    state[6:9] = euler.quat2euler(q)

    return state
 def generate_string(self, data: TeamData):
     lines = []
     lines.append(f"Robot {data.robot_id}")
     time = min(100, round((rospy.Time.now() - data.header.stamp).to_sec()))
     lines.append(f"Time since message: {time:<3}")
     lines.append(f"State: {self.states[data.state]:<11}")
     lines.append(f"Position")
     lines.append(f"  x: {round(data.robot_position.pose.position.x, 3):<4}")
     lines.append(f"  y: {round(data.robot_position.pose.position.y, 3):<4}")
     quat_xyzw = data.robot_position.pose.orientation
     rpy = quat2euler((quat_xyzw.w, quat_xyzw.x, quat_xyzw.y, quat_xyzw.z))
     lines.append(f"  yaw: {round(rpy[2], 3):>4}")
     lines.append(f"  x_cov: {round(data.robot_position.covariance[0], 3):<4}")
     lines.append(f"  y_cov: {round(data.robot_position.covariance[7], 3):<4}")
     lines.append(f"  yaw_cov: {round(data.robot_position.covariance[35], 3):<4}")
     lines.append(f"Ball absolute")
     lines.append(f"  x: {round(data.ball_absolute.pose.position.x, 3):<4}")
     lines.append(f"  y: {round(data.ball_absolute.pose.position.y, 3):<4}")
     lines.append(f"  x_cov: {round(data.ball_absolute.covariance[0], 3):<4}")
     lines.append(f"  y_cov: {round(data.ball_absolute.covariance[7], 3):<4}")
     lines.append(f"Obstacles found: {len(data.obstacles.obstacles)}")
     lines.append(f"Strategy")
     lines.append(f"  Role: {self.roles[data.strategy.role]:<9}")
     lines.append(f"  Action: {self.actions[data.strategy.action]:<15}")
     lines.append(f"  Side: {self.sides[data.strategy.offensive_side]:<9}")
     return lines
def test_euler_axes():
    # Test there and back with all axis specs
    aba_perms = [(v[0], v[1], v[0]) for v in permutations('xyz', 2)]
    axis_perms = list(permutations('xyz', 3)) + aba_perms
    for (a, b, c), mat in zip(euler_tuples, euler_mats):
        for rs, axes in product('rs', axis_perms):
            ax_spec = rs + ''.join(axes)
            conventioned = [EulerFuncs(ax_spec)]
            if ax_spec in euler.__dict__:
                conventioned.append(euler.__dict__[ax_spec])
            mat = euler2mat(a, b, c, ax_spec)
            a1, b1, c1 = mat2euler(mat, ax_spec)
            mat_again = euler2mat(a1, b1, c1, ax_spec)
            assert_almost_equal(mat, mat_again)
            quat = euler2quat(a, b, c, ax_spec)
            a1, b1, c1 = quat2euler(quat, ax_spec)
            mat_again = euler2mat(a1, b1, c1, ax_spec)
            assert_almost_equal(mat, mat_again)
            ax, angle = euler2axangle(a, b, c, ax_spec)
            a1, b1, c1 = axangle2euler(ax, angle, ax_spec)
            mat_again = euler2mat(a1, b1, c1, ax_spec)
            assert_almost_equal(mat, mat_again)
            for obj in conventioned:
                mat = obj.euler2mat(a, b, c)
                a1, b1, c1 = obj.mat2euler(mat)
                mat_again = obj.euler2mat(a1, b1, c1)
                assert_almost_equal(mat, mat_again)
                quat = obj.euler2quat(a, b, c)
                a1, b1, c1 = obj.quat2euler(quat)
                mat_again = obj.euler2mat(a1, b1, c1)
                assert_almost_equal(mat, mat_again)
                ax, angle = obj.euler2axangle(a, b, c)
                a1, b1, c1 = obj.axangle2euler(ax, angle)
                mat_again = obj.euler2mat(a1, b1, c1)
                assert_almost_equal(mat, mat_again)
Exemple #10
0
    def _update_coordinate_system(self,
                                  origin_device_id: Optional[DeviceId],
                                  device_positions: Dict[DeviceId, np.ndarray],
                                  device_rotations: Dict[DeviceId, np.ndarray],
                                  num_samples: int = 10):
        """Updates the coordinate system origin."""
        if origin_device_id:
            # Collect samples for the devices.
            pos_samples = []
            quat_samples = []
            for _ in range(num_samples):
                self._refresh_poses()
                state = self._get_tracker_state(origin_device_id,
                                                ignore_device_transform=True)
                pos_samples.append(state.pos)
                quat_samples.append(state.rot_quat)
            global_translation = -np.mean(pos_samples, axis=0)
            global_rotation = qconjugate(average_quaternions(quat_samples))
            origin_rx, origin_ry, origin_rz = quat2euler(global_rotation,
                                                         axes='rxyz')
            logging.info('Have origin rotation: %1.2f %1.2f %1.2f', origin_rx,
                         origin_ry, origin_rz)
            self._coord_system.set_global_transform(global_translation,
                                                    euler2mat(0, 0, origin_rz))

        for device_id, position in device_positions.items():
            self._coord_system.set_local_transform(device_id,
                                                   translation=position)
        for device_id, rotation in device_rotations.items():
            self._coord_system.set_local_transform(device_id,
                                                   rotation=rotation)
Exemple #11
0
    def forward(self, image, cam_pos, cam_rot):

        # First build the affine 2D matrices representing the rotation around Z axis
        batch_size = image.size(0)
        affines_np = np.zeros((batch_size, 3, 3))
        for batch in range(batch_size):
            b_rot = cam_rot.data[batch].cpu().numpy()
            b_pos = cam_pos.data[batch].cpu().numpy()
            roll, pitch, yaw = euler.quat2euler(b_rot)
            c = math.cos(-yaw)
            s = math.sin(-yaw)

            pos_rel = b_pos / self.world_size
            pos_rel = pos_rel - 0.5
            pos_rel *= 2

            # Affine matrix to center the map around the drone
            affine_displacement = np.asarray([[1, 0, pos_rel[0]], [0, 1, pos_rel[1]], [0, 0, 1]])
            # Affine matrix to rotate the map to the drone's frame
            affine_rotation = np.asarray([[c, s, 0], [-s, c, 0], [0, 0, 1]])

            # Translate first, then rotate
            affines_np[batch] = np.matmul(affine_displacement, affine_rotation)

        affine_in = torch.from_numpy(affines_np[:, 0:2, :])
        affine_in = affine_in.to(image.device)

        # Build the affine grid
        grid = F.affine_grid(affine_in, torch.Size((batch_size, 1, self.map_size, self.map_size))).float()

        # Rotate the input image
        rot_img = F.grid_sample(image, grid)
        return rot_img
Exemple #12
0
 def test_average_two(self):
     """Averaging two different quaternions."""
     quat1 = euler2quat(np.pi / 4, 0, 0)
     quat2 = euler2quat(-np.pi / 4, 0, 0)
     avg_quat = average_quaternions([quat1, quat2])
     result = quat2euler(avg_quat)
     np.testing.assert_array_almost_equal(result, [0, 0, 0])
    def draw_box(self, marker, lifetime, color):
        """
        draw box from ros marker
        """
        box = carla.BoundingBox()
        box.location.x = marker.pose.position.x
        box.location.y = -marker.pose.position.y
        box.location.z = marker.pose.position.z
        box.extent.x = marker.scale.x / 2
        box.extent.y = marker.scale.y / 2
        box.extent.z = marker.scale.z / 2

        roll, pitch, yaw = quat2euler([
            marker.pose.orientation.w, marker.pose.orientation.x,
            marker.pose.orientation.y, marker.pose.orientation.z
        ])
        rotation = carla.Rotation()
        rotation.roll = math.degrees(roll)
        rotation.pitch = math.degrees(pitch)
        rotation.yaw = -math.degrees(yaw)
        self.node.loginfo(
            "Draw Box {} (rotation: {}, color: {}, lifetime: {})".format(
                box, rotation, color, lifetime))
        self.debug.draw_box(box,
                            rotation,
                            thickness=0.1,
                            color=color,
                            life_time=lifetime)
Exemple #14
0
def integrate(imu_data, train=True):
    dt = [(imu_data['ts'][0, i + 1] - imu_data['ts'][0, i])
          for i in range(imu_data['ts'].shape[1] - 1)]
    angular_velocities = imu_data['vals'][3:, :]
    quaternion_estimates = [Quaternion(1, [0, 0, 0])]
    for t in range(angular_velocities.shape[1] - 1):
        w = angular_velocities[:, t][[1, 2, 0]]

        #quaternion_estimates.append(quaternion_estimates[-1].multiply(Quaternion(0, (0.5*w*dt[t])).exp().unit()))
        quaternion_estimates.append((quaternion_estimates[t].multiply(
            Quaternion(0, 0.5 * w * dt[t]).exp())).unit())
    euler_estimates = []
    for quaternion_estimate in quaternion_estimates:
        euler_estimates.append(e3d.quat2euler(quaternion_estimate.to_numpy()))

    vicon_data = read_dataset('trainset', 'vicon', '1')
    vicon_euler = []
    for i in range(vicon_data['rots'].shape[2]):
        vicon_euler.append(e3d.mat2euler(vicon_data['rots'][:, :, i]))
    if train:
        for i in range(3):
            pl.plot(imu_data['ts'][0], np.array(euler_estimates)[:, i])
            pl.plot(vicon_data['ts'][0], np.array(vicon_euler)[:, i])
            #pl.plot(vicon_data['ts'][0], np.vstack((np.array(euler_estimates)[:len(vicon_euler), i], np.array(vicon_euler)[:, i])).T)
            pl.show()
    else:
        for i in range(3):
            pl.plot(imu_data['ts'][0], np.array(euler_estimates)[:, i])
            #pl.plot(vicon_data['ts'][0],np.array(vicon_euler)[:, i])
            #pl.plot(vicon_data['ts'][0], np.vstack((np.array(euler_estimates)[:len(vicon_euler), i], np.array(vicon_euler)[:, i])).T)
            pl.show()
def __pose_2d_from_pose_stamped(ps):
    quat = ps.pose.orientation
    quat_array = [quat.w, quat.x, quat.y, quat.z]
    euler = t3de.quat2euler(quat_array)
    pose_2d = gms.Pose2D(x=ps.pose.position.x,
                         y=ps.pose.position.y,
                         theta=euler[2])
    return pose_2d
Exemple #16
0
 def set_rot_quat(self, sim_scene: SimScene, quat: np.ndarray):
     """Sets the cartesian position of the element."""
     if self.qpos_indices is not None:
         qpos = quat
         if self._is_euler:
             qpos = quat2euler(quat, axes='rxyz')
         sim_scene.data.qpos[self.qpos_indices[3:]] = qpos
         return
     self.element_attr(sim_scene.model, 'quat')[self.element_id, :] = quat
Exemple #17
0
 def test_quat2euler(self):
     s = (N, N, 4)
     quats = uniform(-1, 1, size=s) * randint(2, size=s)
     eulers = quat2euler(quats)
     self.assertEqual(eulers.shape, (N, N, 3))
     for i in range(N):
         for j in range(N):
             res = euler.quat2euler(quats[i, j], axes="rxyz")
             np.testing.assert_almost_equal(eulers[i, j], res)
Exemple #18
0
def compute_imu_orientation_from_world(robot_quat_in_world):
    # imu orientation has roll and pitch relative to gravity vector. yaw in world frame
    # get global yaw
    yrp_world_frame = quat2euler(robot_quat_in_world, axes='szxy')
    # remove global yaw rotation from roll and pitch
    yaw_quat = euler2quat(yrp_world_frame[0], 0, 0, axes='szxy')
    rp = rotate_vector((yrp_world_frame[1], yrp_world_frame[2], 0), qinverse(yaw_quat))
    # save in correct order
    return [rp[0], rp[1], 0], yaw_quat
Exemple #19
0
 def snap_cv(self, pos, quat):
     self.drone.reset_environment()
     pos_birdseye_as = pos
     rpy_birdseye_as = euler.quat2euler(quat)
     self.drone.teleport_3d(pos_birdseye_as, rpy_birdseye_as, pos_in_airsim=True, fast=True)
     time.sleep(0.3)
     self.drone.teleport_3d(pos_birdseye_as, rpy_birdseye_as, pos_in_airsim=True, fast=True)
     time.sleep(0.3)
     _, image = self.drone.get_state(depth=False)
     return image
Exemple #20
0
    def imu_callback(self, msg):
        q = [
            msg.orientation.w, msg.orientation.x, msg.orientation.y,
            msg.orientation.z
        ]
        euler_vec = euler.quat2euler(q, 'sxyz')

        self.roll = euler_vec[0] * 57.2958
        self.pitch = euler_vec[1] * 57.2958
        self.yaw = euler_vec[2] * 57.2958
Exemple #21
0
    def angles(self, sample: GloveSample) -> Dict[str, Vec3f]:
        """Convert sensor data to euler angles in joints.

        Arguments:
            sample {GloveSample} -- data from the glove

        Returns:
            Dict[str, Vec3f] -- euler angles (in degrees) by link name
        """
        # pylint: disable=no-self-use
        w_roll, w_pitch, w_yaw = np.rad2deg(
            euler.quat2euler(sample.wrist_quat, 'sxyz'))
        _, h_pitch, _ = np.rad2deg(euler.quat2euler(sample.hand_quat, 'sxyz'))
        return {
            'wrist': (-w_roll, -w_pitch, w_yaw),
            'hand': (0.0, w_pitch - h_pitch, 0.0),
            'thumb0':
            (0.0, 57.5 * (sample.palm_arch + sample.thumb_cross_over), 0.0),
            'thumb1':
            (0.0, 20.0 * sample.pip_joints[0], -25.0 * sample.abductions[0]),
            'thumb2': (0.0, 30.0 * sample.pip_joints[0], 0.0),
            'thumb3': (0.0, 85.0 * sample.dip_joints[0], 0.0),
            'index0': (0.0, 0.0, 0.0),
            'index1':
            (0.0, 70.0 * sample.pip_joints[1], -25.0 * sample.abductions[1]),
            'index2': (0.0, 100.0 * sample.dip_joints[1], 0.0),
            'index3': (0.0, 30.0 * sample.dip_joints[1], 0.0),
            'middle0': (0.0, 0.0, 0.0),
            'middle1': (0.0, 70.0 * sample.pip_joints[2], 0.0),
            'middle2': (0.0, 100.0 * sample.dip_joints[2], 0.0),
            'middle3': (0.0, 30.0 * sample.dip_joints[2], 0.0),
            'ring0': (0.0, 0.0, 0.0),
            'ring1':
            (0.0, 70.0 * sample.pip_joints[3], 25.0 * sample.abductions[2]),
            'ring2': (0.0, 100.0 * sample.dip_joints[3], 0.0),
            'ring3': (0.0, 30.0 * sample.dip_joints[3], 0.0),
            'little0': (0.0, 0.0, 0.0),
            'little1': (0.0, 70.0 * sample.pip_joints[4],
                        25.0 * sample.abductions[3]),
            'little2': (0.0, 100.0 * sample.dip_joints[4], 0.0),
            'little3': (0.0, 30.0 * sample.dip_joints[4], 0.0),
        }
Exemple #22
0
 def imu_callback(self, msg):
     quat = [
         msg.orientation.w, msg.orientation.x, msg.orientation.y,
         msg.orientation.z
     ]
     _, _, yaw = quat2euler(quat)
     self.yawReading = yaw
     self.sense_cov = np.copy(assumedSenseCov)
     self.sense_cov[2][2] = msg.orientation_covariance[8]
     self.sense_cov *= 1.5
     self.update(ignoreIndices=[0, 1])
Exemple #23
0
def ik(foot_locations, quat_orientation):
    # Construct foot rotation matrix to compensate for body tilt
    (roll, pitch, yaw) = quat2euler(quat_orientation)
    roll_compensation = CORRECTION_FACTOR * np.clip(roll, -MAX_TILT, MAX_TILT)
    pitch_compensation = CORRECTION_FACTOR * np.clip(pitch, -MAX_TILT,
                                                     MAX_TILT)
    rmat = euler2mat(roll_compensation, pitch_compensation, 0)
    rotated_foot_locations = rmat.T @ foot_locations
    joint_angles = four_legs_inverse_kinematics(rotated_foot_locations)

    return joint_angles
Exemple #24
0
    def assertRobotStanding(self, robot_name: str = "amy"):
        """
        Assert that the robot is standing and has not fallen down.

        :param robot_name: The robot of which the position should be verified.
            Defaults to amy which is the only robot in single-robot simulations
        """
        pose = self.get_robot_pose(robot_name)
        rpy = quat2euler((pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z))
        if abs(rpy[0]) > math.tau / 8 or abs(rpy[1]) > math.tau / 8:
            raise AssertionError(f"Robot has fallen down")