示例#1
0
 def make_obs(self):
     '''
     State vector of length 17
     contains two poses, and length,width and height of object bb
     '''
     state = None
     if self.state_rep == "state":
         objs = self.machine.get_objects(True)
         gripper_pose = list(
             self.machine.env._scene.get_observation().gripper_pose)
         gripper_quat = gripper_pose[3:]
         gripper_quat[0], gripper_quat[-1] = gripper_quat[-1], gripper_quat[
             0]
         quat = quaternion(*gripper_quat)
         g_vec = list(as_rotation_vector(quat))
         obj_pose = list(objs[OBJECT].get_pose())
         obj_quat = obj_pose[3:]
         obj_quat[0], obj_quat[-1] = obj_quat[-1], obj_quat[0]
         quat = quaternion(*obj_quat)
         r_vec = list(as_rotation_vector(quat))
         obj_bb = objs[OBJECT].get_bounding_box()
         x_diff = obj_bb[0] - obj_bb[3]
         y_diff = obj_bb[1] - obj_bb[4]
         z_diff = obj_bb[2] - obj_bb[5]
         state = np.array(gripper_pose[:3]+g_vec+ \
                             obj_pose[:3]+r_vec+[x_diff,y_diff,z_diff]).reshape(1,-1)
     elif self.state_rep == "vision":
         obs = self.machine.env._scene.get_observation()
         state = obs.wrist_rgb
         # plt.imsave("image.png",state)
     return state
示例#2
0
def st_induced_axial_rot_swing_twist(st: PoseTrajectory,
                                     ht: PoseTrajectory) -> np.ndarray:
    """Compute ST-induced HT axial rotation using swing-twist decomposition."""
    num_frames = st.rot_matrix.shape[0]
    # rotational difference between frames expressed in torso coordinate system
    st_diff = st.quat[1:] * np.conjugate(st.quat[:-1])

    induced_axial_rot_delta = np.empty((num_frames, 4), dtype=np.float)
    induced_axial_rot_delta[0, :] = 0
    for i in range(num_frames - 1):
        hum_axis = ht.rot_matrix[i, :, 1]

        # this computes the induced axial rotation from the ST rotation in its entirety
        rot_vec = q.as_rotation_vector(quat_project(st_diff[i], hum_axis))
        rot_vec_theta = np.linalg.norm(rot_vec)
        rot_vec_axis = rot_vec / rot_vec_theta
        # Note that rot_vec_theta will always be + because of np.linalg.norm. But a rotation about an axis v by an angle
        # theta is the same as a rotation about -v by an angle -theta. So here the humeral axis sets our direction. That
        # is, we always rotate around hum_axis (and not -hum_axis) and adjust the sign of rot_vec_theta accordingly
        induced_axial_rot_delta[i + 1, 3] = rot_vec_theta * (
            1 if np.dot(rot_vec_axis, hum_axis) > 0 else -1)

        # this computes it for each individual axis of the scapula
        for j in range(3):
            # first project the scapula rotation onto one of its axis
            st_axis_proj = quat_project(st_diff[i], st.rot_matrix[i, :, j])
            # then proceed as above
            rot_vec = q.as_rotation_vector(quat_project(
                st_axis_proj, hum_axis))
            rot_vec_theta = np.linalg.norm(rot_vec)
            rot_vec_axis = rot_vec / rot_vec_theta
            induced_axial_rot_delta[i + 1, j] = rot_vec_theta * (
                1 if np.dot(rot_vec_axis, hum_axis) > 0 else -1)

    return np.add.accumulate(induced_axial_rot_delta)
示例#3
0
    def get_obs(self, get_space=False):
        """
            Get observation of state for RL. If get_space is true return space description.
            Space is defined as:
                0: time [1]
                1:7 hand pos + rotation vector [6]
                7: gripper state [1]
                8:14 obj pos + rotation vector [6],
                14: obj height
                15: obj radius
            In case of two objects:
                16:22 obj pos + rotation vector [6],
                22: obj height
                23: obj radius
        """
        if get_space:
            if self.two_objects:
                return FloatBox(low=[0.] + [-1.] * 6 + [0.] + [-1.] * 6 +
                                [0.] * 2 + [-1.] * 6 + [0.] * 2,
                                high=[1.] + [1.] * 6 + [self.FINGER_OPEN_POS] +
                                [1.] * 6 + [0.2] * 2 + [1.] * 6 + [0.2] * 2)
            return FloatBox(low=[0.] + [-1.] * 6 + [0.] + [-1.] * 6 + [0.] * 2,
                            high=[1.] + [1.] * 6 + [self.FINGER_OPEN_POS] +
                            [1.] * 6 + [0.2] * 2)

        t = self.scene.simulation_time
        hand_pose = self.hand_actors[0].get_global_pose()
        grip = self.get_grip()
        obj_pose = self.obj.get_global_pose()
        if self.two_objects:
            obj2_pose = self.obj2.get_global_pose()
            return np.concatenate([
                [t / 10.],
                hand_pose[0],
                npq.as_rotation_vector(hand_pose[1]),
                [grip],
                obj_pose[0],
                npq.as_rotation_vector(obj_pose[1]),
                [self.obj_height, self.obj_radius],
                obj2_pose[0],
                npq.as_rotation_vector(obj2_pose[1]),
                [self.obj2_height, self.obj2_radius],
            ]).astype(np.float32)

        return np.concatenate([
            [t / 10.],
            hand_pose[0],
            npq.as_rotation_vector(hand_pose[1]),
            [grip],
            obj_pose[0],
            npq.as_rotation_vector(obj_pose[1]),
            [self.obj_height, self.obj_radius],
        ]).astype(np.float32)
示例#4
0
文件: data.py 项目: ehu-ai/domrand
def load_eval_data(eval_data_path, data_shape='asus'):
    """Read the data from the real world stored in jpgs"""
    imgs = []
    labels = []
    pickle_path = os.path.join(eval_data_path, "ep_data.pkl")
    with open(pickle_path, "rb") as f:
        context = pickle.load(f)
    files = sorted(os.listdir(eval_data_path))
    obj_world_pose = context["obj_world_pose"]
    obj_world_pos = obj_world_pose[:3]
    obj_world_quat = quaternion.as_quat_array(obj_world_pose[3:])
    zrot = quaternion.as_rotation_vector(obj_world_quat)[-1]
    pose = np.zeros((3,))
    pose[:2] = obj_world_pos[:2]
    pose[2] = zrot
    for f in files:
        if not f.endswith('.png'):
            continue

        labels.append(pose.copy())
        filename = os.path.join(eval_data_path, f)
        img = plt.imread(filename)
        if data_shape == 'asus':
            img = preproc_image(img, dtype=np.float32)
        # plt.imshow(img); plt.show()
        # img = (img / 127.5) - 1.0
        imgs.append(img)
    return np.array(imgs, dtype=np.float32), np.array(labels, dtype=np.float32)
示例#5
0
文件: app.py 项目: ww1251256/Somatic
    def queue_handler(self):
        try:
            command = self.queue.get(block=False)

            if command['type'] is 'ack':
                if self.state is self.State.connecting:
                    self.state = self.State.connected
                    logging.info('Got ack - connected')

            if command['type'] is 'rx':
                orientation = command['quat']
                if self.state is not self.State.disconnected and self.state is not self.State.quitting:
                    self.update_status(command['fingers'], orientation.w,
                                       orientation.x, orientation.y,
                                       orientation.z, command['freq'])

                    fingers = command['fingers']
                    hand_id = fingers[0] * 0b1000 + fingers[
                        1] * 0b0100 + fingers[2] * 0b0010 + fingers[3] * 0b0001
                    if hand_id != self.last_hand_id:
                        self.hand_display.create_image(
                            (0, 0),
                            image=self.hand_icons[hand_id],
                            anchor=N + W)
                        self.last_hand_id = hand_id

                if self.state is self.State.recording:
                    roll, yaw, pitch = quaternion.as_rotation_vector(
                        orientation)

                    x_coord = np.tan(pitch) * 125 + 125
                    y_coord = np.tan(yaw) * -125 + 125

                    logging.info('Rendering ({}, {})'.format(x_coord, y_coord))

                    if self.last_coordinate_visualized:
                        logging.info(self.last_coordinate_visualized)

                        self.path_display.create_line(
                            self.last_coordinate_visualized[0],
                            self.last_coordinate_visualized[1],
                            x_coord,
                            y_coord,
                            width=2,
                            fill='blue')

                    self.last_coordinate_visualized = [x_coord, y_coord]

                    self.path_display.create_oval(
                        (x_coord - 2, y_coord - 2, x_coord + 2, y_coord + 2),
                        fill='SeaGreen1')

            if command['type'] is 'quit':
                self.master.destroy()
                return

        except Empty:
            pass

        self.master.after(10, self.queue_handler)
示例#6
0
    def load_sample_sequence(self, filename):
        if filename.endswith('.pkl'):
            # This is a pickle file that contains a motion sequence stored in angle-axis format
            sample_sequence = pkl.load(open(filename, 'rb'))
            poses = np.array(sample_sequence)

        elif filename.endswith('.npz'):
            # This is a numpy file that was produced using the evaluation code
            preds = np.load(filename)['prediction']

            # Choose the first sample in the file to be displayed
            pred = preds[1]

            # This is in rotation matrix format, so convert to angle-axis
            poses = np.reshape(pred, [pred.shape[0], -1, 3, 3])
            poses = quaternion.as_rotation_vector(
                quaternion.from_rotation_matrix(poses))
            poses = np.reshape(poses, [poses.shape[0], -1])

        else:
            raise ValueError(
                "do not recognize file format of file {}".format(filename))

        # Unity can also display orientations and accelerations, but they are not available here
        oris = np.zeros([poses.shape[0], 6 * 3])
        accs = np.zeros([poses.shape[0], 6 * 3])

        self.sample_sequence = {'gt': poses, 'ori': oris, 'acc': accs}
        self.next_frame = 0
示例#7
0
 def test_avg(self):
     # distribute a bunch of rotation around I,
     # such that the average should be I:
     # at random axis but constant angle,
     nb_rotations = 1000
     angles = np.linspace(0.0, np.deg2rad(45), 100)
     averages_q = []
     np.random.seed(1)
     for angle in angles:
         rotation_axis = np.random.uniform(-1., 1., size=(nb_rotations, 3))
         rotation_axis /= np.linalg.norm(rotation_axis,
                                         axis=1,
                                         keepdims=True)
         rotation_axis *= angle
         rotation_qs = quaternion.from_rotation_vector(rotation_axis)
         rotation_qs = quaternion.as_float_array(rotation_qs)
         # print(rotation_qs)
         r = average_quaternion(rotation_qs)
         averages_q.append(r.tolist())
     averages_q = np.array(averages_q)
     # normalizes rotation (resolve the opposite ambiguity)
     averages_q *= np.sign(averages_q[:, 0]).reshape((-1, 1))
     # convert quaternions to angle-axis representation
     averages_q = quaternion.from_float_array(averages_q)
     averages_v = quaternion.as_rotation_vector(averages_q)
     # retrieve angle amplitudes.
     errors_rad = np.linalg.norm(averages_v, axis=1)
     # # plot for debug
     # import matplotlib.pyplot as plt
     # plt.plot(np.rad2deg(errors_rad), 'x', label='Errors (in deg.) as a function of angle amplitude (in deg.)')
     # plt.show()
     maximum_error_deg = np.rad2deg(np.max(errors_rad))
     self.assertLess(maximum_error_deg, 3.6)
示例#8
0
def st_induced_axial_rot_fha(st: PoseTrajectory,
                             ht: PoseTrajectory) -> np.ndarray:
    """Compute ST-induced HT axial rotation using finite helical axes."""
    num_frames = st.rot_matrix.shape[0]
    st_diff = st.quat[1:] * np.conjugate(st.quat[:-1])
    st_fha = q.as_rotation_vector(st_diff)
    st_fha_x = (
        extended_dot(st_fha, st.rot_matrix[:-1, :, 0])[..., np.newaxis] *
        st.rot_matrix[:-1, :, 0])
    st_fha_y = (
        extended_dot(st_fha, st.rot_matrix[:-1, :, 1])[..., np.newaxis] *
        st.rot_matrix[:-1, :, 1])
    st_fha_z = (
        extended_dot(st_fha, st.rot_matrix[:-1, :, 2])[..., np.newaxis] *
        st.rot_matrix[:-1, :, 2])

    st_fha_proj_x = extended_dot(st_fha_x, ht.rot_matrix[:-1, :, 1])
    st_fha_proj_y = extended_dot(st_fha_y, ht.rot_matrix[:-1, :, 1])
    st_fha_proj_z = extended_dot(st_fha_z, ht.rot_matrix[:-1, :, 1])
    st_fha_proj = extended_dot(st_fha, ht.rot_matrix[:-1, :, 1])

    induced_axial_rot = np.empty((num_frames, 4), dtype=np.float)
    induced_axial_rot[0, :] = 0
    induced_axial_rot[1:, 0] = np.add.accumulate(st_fha_proj_x)
    induced_axial_rot[1:, 1] = np.add.accumulate(st_fha_proj_y)
    induced_axial_rot[1:, 2] = np.add.accumulate(st_fha_proj_z)
    induced_axial_rot[1:, 3] = np.add.accumulate(st_fha_proj)

    return induced_axial_rot
示例#9
0
def arr_quat_to_rots(quats):
    ret = q.as_rotation_vector(q.as_quat_array(quats))
    ret = ret.reshape(-1, 3)
    ret = ret[:, 1] - 3.14
    ret[np.where(ret > 3.14)] -= 6.28
    ret[np.where(ret < -3.14)] += 6.28
    return ret
示例#10
0
def mySlerp(t, q0, qf):
    teta = np.linalg.norm(quaternion.as_rotation_vector((q0.conjugate() * qf)))
    t0 = 0
    tf = 10
    dt = (t - t0) / (tf - t0)
    quat_slerp = (sin(teta / 2 *
                      (1 - dt)) * q0 + sin(teta / 2 * dt) * qf) / sin(teta / 2)
    return quat_slerp
示例#11
0
文件: utils.py 项目: majunfu/ROMP
def rot_matrix_to_axan(data):
    """
    Converts rotation matrices to axis angles
    :param data: Rotation matrices. Shape: (Persons, Seq, 24, 3, 3)
    :return: Axis angle representation of inpute matrices. Shape: (Persons, Seq, 24, 3)
    """
    aa = quaternion.as_rotation_vector(quaternion.from_rotation_matrix(data))
    return aa
示例#12
0
 def __mul__(self, q: _np.quaternion) -> Frame:
     if not _np.all(
             _np.isclose(
                 _quaternion.as_rotation_vector(q)[1:], _np.array(
                     [0.0, 0.0]))):
         raise FrameException(
             "Frenet frames cannot be multiplied (rotated) by arbitrary quaternions."
         )
     super().__mul__(q)
示例#13
0
def test_rotation_vector():
    quat = quaternion.from_float_array(
        np.array([np.cos(np.pi / 3), 0,
                  np.sin(np.pi / 3), 0]))
    print("quaterion angle pi*2/3 about y-axis", quat)
    rvec = quaternion.as_rotation_vector(quat)
    print("rotation vector:", rvec)
    assert (np.isclose(np.linalg.norm(rvec), np.pi * 2 / 3))
    print("!!! test_rotation_vector passed")
示例#14
0
    def move(self,
             pose: Pose,
             move_type: MoveType,
             velocity: float = 50.0,
             acceleration: float = 50.0) -> None:
        """Moves the robot's end-effector to a specific pose.

        :param pose: Target pose.
        :move_type: Move type.
        :param velocity: Speed of move (percent).
        :param acceleration: Acceleration of move (percent).
        :return:
        """

        if not (0.0 <= velocity <= 100.0):
            raise DobotException("Invalid velocity.")

        if not (0.0 <= acceleration <= 100.0):
            raise DobotException("Invalid acceleration.")

        with self._move_lock:

            rp = tr.make_pose_rel(self.pose, pose)

            # prevent Dobot from moving when given an unreachable goal
            try:
                jv = self._inverse_kinematics(rp)
            except Arcor2NotImplemented:  # TODO remove this once M1 has IK
                pass

            if self.simulator:
                self._joint_values = jv
                time.sleep((100.0 - velocity) * 0.05)
                return

            self._handle_pose_in(rp)

            try:
                self._dobot.clear_alarms()
                unrotated = self.UNROTATE_EEF * rp.orientation
                rotation = math.degrees(
                    quaternion.as_rotation_vector(
                        unrotated.as_quaternion())[2])
                self._dobot.speed(velocity, acceleration)

                self._dobot.wait_for_cmd(
                    self._dobot.move_to(
                        rp.position.x * 1000.0,
                        rp.position.y * 1000.0,
                        rp.position.z * 1000.0,
                        rotation,
                        MOVE_TYPE_MAPPING[move_type],
                    ))
            except DobotApiException as e:
                raise DobotException("Move failed.") from e

        self.alarms_to_exception()
示例#15
0
    def get_rotation_vector(self, ref: Optional[Frame] = None) -> _np.ndarray:
        """

        Args:
            ref:

        Returns:

        """
        return _quaternion.as_rotation_vector(self.get_quaternion(ref))
 def frame_diff(df_row):
     pos_diff = df_row['v3d'][0:3, 3] - df_row['isb'][0:3, 3]
     matrix_diff = df_row['isb'][0:3, 0:3].T @ df_row['v3d'][0:3, 0:3]
     pose_diff = np.full((6, ), np.nan)
     pose_diff[0:3] = pos_diff
     pose_diff[3:] = np.rad2deg(
         quaternion.as_rotation_vector(
             quaternion.from_rotation_matrix(matrix_diff,
                                             nonorthogonal=False)))
     return pose_diff
示例#17
0
def quat_pw(q, rho):
    theta = np.nan_to_num(
        np.linalg.norm(quat.as_rotation_vector(q.normalized())))
    n = normalize(q.vec)
    scale = (np.sqrt(q.norm()))**rho
    #theta = np.arccos(q.w)
    sc_part = scale * np.cos(theta * rho)
    vec_part = scale * n * np.sin(theta * rho)
    q_pw = np.quaternion(sc_part, vec_part[0], vec_part[1], vec_part[2])
    return q_pw
示例#18
0
def test_as_rotation_vector():
    np.random.seed(1234)
    n_tests = 1000
    vecs = np.random.uniform(high=math.pi/math.sqrt(3), size=n_tests*3).reshape((n_tests, 3))
    quats = np.zeros(vecs.shape[:-1]+(4,))
    quats[..., 1:] = vecs[...]
    quats = quaternion.as_quat_array(quats)
    quats = np.exp(quats/2)
    quat_vecs = quaternion.as_rotation_vector(quats)
    assert allclose(quat_vecs, vecs)
示例#19
0
def load_image_data(image_filename, table_file):
    cols = [
        "OBSERVATION_END_MET", "IMAGE_FILENAME", "OBSERVATION_END_TIME",
        "SPC_X", "SPC_Y", "SPC_Z", "AST_J2_X", "AST_J2_Y", "AST_J2_Z",
        "SPC_J2_X", "SPC_J2_Y", "SPC_J2_Z", "BODY_SURFACE_DISTANCE",
        "CENTER_LON", "CENTER_LAT", "CENTER_PIXEL_RES",
        "CELESTIAL_N_CLOCK_ANGLE", "BODY_POLE_CLOCK_ANGLE",
        "BODY_POLE_ASPECT_ANGLE", "SUN_DIR_CLOCK_ANGLE", "RIGHT_ASCENSION",
        "DECLINATION", "SUBSOLAR_LON", "SUBSOLAR_LAT", "INCIDENCE_ANGLE",
        "EMISSION_ANGLE", "PHASE_ANGLE", "SOLAR_ELONGATION", "SUB_SC_LON",
        "SUB_SC_LAT", "BODY_CENTER_DISTANCE", "PIXEL_OFFSET_X",
        "PIXEL_OFFSET_Y", "AST_SUN_ROT_ANGLE"
    ]
    idx = dict(zip(cols, range(len(cols))))

    with open(table_file, 'r') as fh:
        alldata = [re.split(r'\s+', row)[1:] for row in fh]

    d = None
    for row in alldata:
        if row[idx['IMAGE_FILENAME']] == image_filename:
            d = row
            break

    assert d is not None, 'data for image %s not found' % image_filename

    # spacecraft orientation, equatorial J2000
    sc_rot_ra = float(d[idx['RIGHT_ASCENSION']])
    sc_rot_dec = float(d[idx['DECLINATION']])
    sc_rot_cnca = float(d[idx['CELESTIAL_N_CLOCK_ANGLE']])
    sc_igrf_q = tools.ypr_to_q(
        rads(sc_rot_dec), rads(sc_rot_ra),
        -rads(sc_rot_cnca))  # same with rosetta lbls also

    sun_ast_igrf_r = np.array(
        [d[idx['AST_J2_X']], d[idx['AST_J2_Y']],
         d[idx['AST_J2_Z']]]).astype(np.float)
    sun_sc_igrf_r = np.array(
        [d[idx['SPC_J2_X']], d[idx['SPC_J2_Y']],
         d[idx['SPC_J2_Z']]]).astype(np.float)

    arr2str = lambda arr: '[%s]' % ', '.join(['%f' % v for v in arr])
    print('sun_ast_igrf_r: %s' % arr2str(sun_ast_igrf_r * 1e3))
    print('sun_sc_igrf_r: %s' % arr2str(sun_sc_igrf_r * 1e3))
    print('ast_sc_igrf_r: %s' % arr2str(
        (sun_sc_igrf_r - sun_ast_igrf_r) * 1e3))
    #     print('ast_axis_ra: %f' % degs(ast_axis_ra))
    #     print('ast_axis_dec: %f' % degs(ast_axis_dec))
    #     print('ast_zlon_ra: %f' % degs(ast_zlon_ra))

    aa = quaternion.as_rotation_vector(sc_igrf_q)
    angle = np.linalg.norm(aa)
    sc_angleaxis = [angle] + list(aa / angle)
    print('sc_angleaxis [rad]: %s' % arr2str(sc_angleaxis))
    def orientationControl(self, target):
        r = quater.from_float_array(target)
        r_vect = quater.as_rotation_vector(r)

        cop_vect = copy.copy(r_vect)
        r_vect[0] = cop_vect[1]
        r_vect[1] = cop_vect[0]
        r_vect[2] *= -1

        r = quater.from_rotation_vector(r_vect)
        out = quater.as_float_array(r)
        return out
示例#21
0
文件: dataset.py 项目: xianyubai/fpl
def accumulate_egomotion(rots, vels):
    # Accumulate translation and rotation
    egos = []
    qa = np.quaternion(1, 0, 0, 0)
    va = np.array([0., 0., 0.])
    for rot, vel in zip(rots, vels):
        vel_rot = quaternion.rotate_vectors(qa, vel)
        va += vel_rot
        qa = qa * quaternion.from_rotation_vector(rot)
        egos.append(
            np.concatenate((quaternion.as_rotation_vector(qa), va), axis=0))
    return egos
示例#22
0
def rot_to_aa_representation(sample_list):
    """
    Args:
        sample_list (list): of motion samples with shape of (seq_len, SMPL_NR_JOINTS*9).

    Returns:

    """
    out = [np.reshape(s, [-1, SMPL_NR_JOINTS, 3, 3]) for s in sample_list]
    aa = [quaternion.as_rotation_vector(quaternion.from_rotation_matrix(s)) for s in out]
    aa = [np.reshape(s, (-1, 72)) for s in aa]

    return aa
示例#23
0
    def transform(self, R, H, S, numbers, positions=None, forces=None):
        q = quaternion.from_rotation_matrix(R)
        vR = quaternion.as_rotation_vector(q) * self.phase
        q = quaternion.from_rotation_vector(vR)

        UDUs = self._calc_UDUs(q)

        M = chain(*[UDUs[self.lvec[z]] for z in numbers])
        M = la.block_diag(*M)

        H = M.T @ H @ M
        S = M.T @ S @ M

        pos_rot = positions @ R.T
        if forces is not None:
            force_rot = forces @ R.T
            return H, S, pos_rot, force_rot
        return H, S, pos_rot
示例#24
0
 def _get_ground_truth(self):
     """
     Return x, y, and z rotation
     3 dim total
     """
     obj_gid = self.sim.model.geom_name2id('object')
     obj_bid = self.sim.model.geom_name2id('object')
     # only x and y pos needed
     # obj_world_pos = self.sim.model.geom_pos[obj_gid]
     # obj_world_quat = quaternion.as_quat_array(self.sim.model.geom_quat[obj_gid].copy())
     obj_world_pose = self.sim.data.get_joint_qpos('object:joint')
     obj_world_pos = obj_world_pose[:3]
     obj_world_quat = quaternion.as_quat_array(obj_world_pose[3:])
     zrot = quaternion.as_rotation_vector(obj_world_quat)[-1]
     pose = np.zeros((3, ))
     pose[:2] = obj_world_pos[:2]
     pose[2] = zrot
     print(pose)
     return pose
def update_tf_log(time_event):
    # type:(rospy.timer.TimerEvent)->None
    global diffs, tf_child, tf_parent

    try:
        (trans, rot) = get_tf_transform(tf_parent, tf_child,
                                        time_event.current_expected,
                                        rospy.Duration(0.1))
        rot_axis = qt.as_rotation_vector(rot)
        angle = rot_axis[0] * rot_axis[0] + \
                rot_axis[1] * rot_axis[1] + \
                rot_axis[2] * rot_axis[2]
    except Exception:
        rospy.logwarn("No tf found!")
        trans = (0, 0, 0, 0)
        angle = 0

    added = np.array([[trans[0], trans[1], trans[2], math.sqrt(angle)]]).T

    diffs = np.append(diffs, added, axis=1)
示例#26
0
def find_final_angle_axis(pulse_sequence, n):
    '''
    Finds the final angle-axis representation of the rotation on each of the first n subspaces
    after the sequence of pulses given by pulse_sequence
    
    Inputs:
        pulse_sequence (list): a list of angle-axis tuples specifying the pulse sequence to be performed
    
    Returns:
        list of axis-angle tuples representing the composite rotations on the first n subspaces
    '''
    final_quaternions = find_final_quaternions(pulse_sequence, n)

    final_rotations = [
        quaternion.as_rotation_vector(q) for q in final_quaternions
    ]

    return [(np.linalg.norm(vec),
             vec / np.linalg.norm(vec)) if np.linalg.norm(vec) > 1e-12 else
            (0, np.array([0.0, 0.0, 0.0])) for vec in final_rotations]
示例#27
0
def quat_to_angle_axis(quat: qt.quaternion) -> Tuple[float, np.ndarray]:
    r"""Converts a quaternion to angle axis format

    :param quat: The quaternion
    :return:
        -   `float` --- The angle to rotate about the axis by
        -   `numpy.ndarray` --- The axis to rotate about. If :math:`\theta = 0`,
            then this is harded coded to be the +x axis
    """

    rot_vec = qt.as_rotation_vector(quat)

    theta = np.linalg.norm(rot_vec)
    if np.abs(theta) < 1e-5:
        w = np.array([1, 0, 0])
        theta = 0.0
    else:
        w = rot_vec / theta

    return (theta, w)
示例#28
0
def quat_to_angle_axis(quat: np.quantile) -> (float, np.array):
    r"""Converts a quaternion to angle axis format

    Args:
        quat (np.quaternion): The quaternion

    Returns:
        float: The angle to rotate about the axis by
        np.array: The axis to rotate about.  If theta = 0, then this is harded coded to be the +x axis
    """

    rot_vec = quaternion.as_rotation_vector(quat)

    theta = np.linalg.norm(rot_vec)
    if np.abs(theta) < 1e-5:
        w = np.array([1, 0, 0])
        theta = 0.0
    else:
        w = rot_vec / theta

    return (theta, w)
示例#29
0
 def rudders(self):
     if self.event['start_movement'] is not None:
         orientation = self.Λ
         angular_velocity = self.angular_velocity('SSK')
         a0 = np.array([B0, A0(self.movement_time), A0(self.movement_time)])
         a1 = np.array([B1, A1(self.movement_time), A1(self.movement_time)])
         program_orientation, program_angular_velocity = program_trajectory(
             self.movement_time)
         delta_angle = qn.as_rotation_vector(program_orientation.conj() *
                                             orientation)
         delta_angular_velocity = angular_velocity - program_angular_velocity
         signal = a0 * (delta_angle + a1 * delta_angular_velocity)
         signal = np.array([[1, +np.sqrt(2), +np.sqrt(2)],
                            [1, -np.sqrt(2), +np.sqrt(2)],
                            [1, -np.sqrt(2), -np.sqrt(2)],
                            [1, +np.sqrt(2), -np.sqrt(2)]]) @ signal
         res = np.concatenate(
             [self.dδ, (signal - TAU_2 * self.dδ - self.δ) / TAU_1])
     else:
         res = np.zeros(8)
     return res
示例#30
0
文件: app.py 项目: ww1251256/Somatic
def _path_to_line_points(path, height, width, xpad=0, ypad=0):
    dots = []

    for quat in path:
        roll, yaw, pitch = quaternion.as_rotation_vector(quat)

        x_coord = np.tan(pitch) * 120 + 122
        y_coord = np.tan(yaw) * -120 + 122
        dots.append([x_coord, y_coord])

    min_x = min([coord[0] for coord in dots])
    max_x = max([coord[0] for coord in dots])
    min_y = min([coord[1] for coord in dots])
    max_y = max([coord[1] for coord in dots])

    output = []

    for x, y in dots:
        output.append((_scale(x, min_x, max_x, xpad, width - xpad * 2),
                       _scale(y, min_y, max_y, ypad, height - ypad * 2)))

    return output
    def sim_map_to_sim_continuous(self, coords):
        """Converts ground-truth 2D Map coordinates to absolute Habitat
        simulator position and rotation.
        """
        agent_state = self._env.sim.get_agent_state(0)
        y, x = coords
        min_x, min_y = self.map_obj_origin / 100.0

        cont_x = x / 20. + min_x
        cont_y = y / 20. + min_y
        agent_state.position[0] = cont_y
        agent_state.position[2] = cont_x

        rotation = agent_state.rotation
        rvec = quaternion.as_rotation_vector(rotation)

        if self.args.train_single_eps:
            rvec[1] = 0.0
        else:
            rvec[1] = np.random.rand() * 2 * np.pi
        rot = quaternion.from_rotation_vector(rvec)

        return agent_state.position, rot