Exemplo n.º 1
0
def get_corrected_orientation(E, target_orientation, evs):
    # Correct cs orientation so that it aligns with standard defined as foot touchdown

    E_quats = quaternion.from_rotation_matrix(E)
    target_quats = np.repeat(
        quaternion.from_rotation_matrix(target_orientation), len(evs))
    # Get relative rotation between marker-cs and ideal-cs during touchdowns
    rel_quats = E_quats[evs].conj() * target_quats
    rel_quat_array = quaternion.as_float_array(rel_quats)
    # Find the average relative orientation between the ideal cs and all touchdown cs
    eigvals, eigvecs = np.linalg.eig(rel_quat_array.T @ rel_quat_array)
    av_rel_quat_array = eigvecs[:, np.argmax(eigvals)]
    # Check sign of the averaged quaternion for consistency
    av_signs = np.sign(av_rel_quat_array)
    ar_signs = np.sign(rel_quat_array)
    if (av_signs != ar_signs).all():
        av_rel_quat_array = -1 * av_rel_quat_array
    elif (av_signs == ar_signs).all():
        pass
    else:
        pass

    av_rel_quats = np.repeat(quaternion.as_quat_array(av_rel_quat_array),
                             len(E_quats))
    corrected_quats = E_quats * av_rel_quats
    corrected_orientation = quaternion.as_rotation_matrix(corrected_quats)

    return corrected_orientation
Exemplo n.º 2
0
def move_and_rotate_agent(my_agent, my_rotation, move=[0.9539339, 0.1917877, 12.163067]):
    """
    The agent has a translation and rotation. The sensor orientation and position wrt to habitat frame changes when this
    changes. The sensors themselves remain unchanged in their orientation or position with respect to agent frame.
    :param my_agent: agent object
    :param my_rotation: quaternion with respect to habitat frame
    :param move: in habitat frame [x,y,z]
    :return:
    """

    my_agent_state = my_agent.get_state()
    r1 = my_agent_state.rotation  # rotation from Habitat to Agent wrt Habitat frame
    t1 = my_agent_state.position  # translation from Habitat to Agent wrt Habitata frame

    # h1 is current habitat to Agent homogenous transformation (HT), h1new is habitat to Agent with new one
    # h3 - habitat to sensor which is h1 or h1new multiplied and agent to the sensor HT
    # Example - h1new(left_rgb)*h1_inv*h3 where h1_inv*h3 provides sensor pose wrt to agent.
    # Import to remember that get agent_state provides position, orientation wrt to habitant

    h1 = homogenous_transform(quaternion.as_rotation_matrix(r1), list(t1))
    h1_inv = inverse_homogenous_transform(h1)
    h1new = homogenous_transform(quaternion.as_rotation_matrix(my_rotation), move)
    h3_left_rgb = homogenous_transform(quaternion.as_rotation_matrix(
        my_agent_state.sensor_states["left_rgb_sensor"].rotation),
        list(my_agent_state.sensor_states["left_rgb_sensor"].position))
    h3_right_rgb = homogenous_transform(quaternion.as_rotation_matrix(
        my_agent_state.sensor_states["right_rgb_sensor"].rotation),
        list(my_agent_state.sensor_states["right_rgb_sensor"].position))
    h3_depth = homogenous_transform(quaternion.as_rotation_matrix(
        my_agent_state.sensor_states["depth_sensor"].rotation),
        list(my_agent_state.sensor_states["depth_sensor"].position))

    h = h1new.dot(h1_inv)
    newh3_left_rgb = h.dot(h3_left_rgb)
    newh3_right_rgb = h.dot(h3_right_rgb)
    newh3_depth = h.dot(h3_depth)
    # compute new sensor states for each sensor
    my_agent_state.sensor_states["left_rgb_sensor"].rotation = \
        quaternion.from_rotation_matrix(newh3_left_rgb[0:3, 0:3])
    D = newh3_left_rgb[0:3, 3]
    my_agent_state.sensor_states["left_rgb_sensor"].position = \
        D.T
    my_agent_state.sensor_states["right_rgb_sensor"].rotation = \
        quaternion.from_rotation_matrix(newh3_right_rgb[0:3, 0:3])
    D = newh3_right_rgb[0:3, 3]
    my_agent_state.sensor_states["right_rgb_sensor"].position = \
        D.T
    my_agent_state.sensor_states["depth_sensor"].rotation = \
        quaternion.from_rotation_matrix(newh3_depth[0:3, 0:3])
    D = newh3_depth[0:3, 3]
    my_agent_state.sensor_states["depth_sensor"].position = \
        D.T
    # agent state
    my_agent_state.rotation = my_rotation
    my_agent_state.position = move

    my_agent.set_state(my_agent_state, infer_sensor_states=False)
    return
Exemplo n.º 3
0
def decompose_svd_mat4(M):
    Rl, S, Rr = np.linalg.svd(M[0:3, 0:3])

    s = S
    ql = quaternion.from_rotation_matrix(Rl)
    qr = quaternion.from_rotation_matrix(Rr)

    t = M[0:3, 3]
    return t, ql, s, qr
def calOrientationError(R):
    R1 = np.array(R)
    R2 = np.identity(3)
    q1 = quaternion.from_rotation_matrix(R1)
    q2 = quaternion.from_rotation_matrix(R2)
    q = q1.inverse() * q2
    theta, v = __cal_axis_angle_from_q__(q)
    oError = np.degrees(theta)
    if oError > 180.0:
        oError = 360.0 - oError
    return oError
Exemplo n.º 5
0
    def get_world_coordinates(self,
                              filename,
                              axes=['z'],
                              times=None,
                              averagedur=0.1):
        axinddict = {'x': 0, 'y': 1, 'z': 2}

        with h5py.File(filename, 'r') as h5calib:
            acc = np.array(h5calib['/data/Accel'])
            t = np.array(h5calib['/data/t'])

        gax = np.eye(3)

        if len(axes) == 1:
            times = [0]
            averagedur = 4 * t[-1]

        d2 = averagedur / 2

        axord = []
        for axis1, time1 in zip(axes, times):
            ist = np.logical_and(t >= time1 - d2, t <= time1 + d2)
            ax = np.mean(acc[ist, :], axis=0)

            m = re.match('([+-]?)([XYZxyz])', axis1)
            if m is None:
                raise ValueError('Unrecognized axis specification axis')

            if m.group(1) == '-':
                axsign = -1.0
            else:
                axsign = 1.0

            axind = axinddict[m.group(2).lower()]

            gax[:, axind] = ax * axsign
            axord.append(axind)

        axord = np.concatenate((axord, np.setdiff1d(range(3), axord)))
        axrevord = np.argsort(np.arange(3)[axord])

        basis = self._gramschmidt(gax[:, axord])
        basis = basis[:, axrevord]

        # check for right handed-ness
        # the Z axis should be equal to the cross product of the X and Y axes
        # because of small numerical issues, it's sometimes not exactly equal,
        # so we check that they're in the same direction
        assert (np.dot(np.cross(basis[:, 0], basis[:, 1]), basis[:, 2]) > 0.9)

        self.chip2world_rot = basis
        self.qchip2world = quaternion.from_rotation_matrix(basis)
        self.qworld2chip = quaternion.from_rotation_matrix(basis.T)
def calPositionAndOrientationError(H, H_exact):
    H1 = np.array(H)
    H2 = np.array(H_exact)
    pError = np.linalg.norm(H1[0:2, 3] - H2[0:2, 3])
    q1 = quaternion.from_rotation_matrix(H1[0:3, 0:3])
    q2 = quaternion.from_rotation_matrix(H2[0:3, 0:3])
    q = q1.inverse()*q2
    theta, v = __cal_axis_angle_from_q__(q)
    oError = np.degrees(theta) 
    if oError > 180.0:
        oError = 360.0 - oError 
    return pError, oError
Exemplo n.º 7
0
def setAgentRotation(agent, nextBestCameraRotation):
    R_w_s = nextBestCameraRotation.dot(R_s_c.T)

    agent_state = agent.get_state()
    agent_state.rotation = quaternion.from_rotation_matrix(R_w_s)
    agent_state.sensor_states["left_sensor"].rotation = quaternion.from_rotation_matrix(R_w_s)
    agent_state.sensor_states["right_sensor"].rotation = quaternion.from_rotation_matrix(R_w_s)
    agent_state.sensor_states["left_sensor_depth"].rotation = quaternion.from_rotation_matrix(R_w_s)
    agent_state.sensor_states["right_sensor_depth"].rotation = quaternion.from_rotation_matrix(R_w_s)

    agent.set_state(agent_state)
    return agent.state  # agent.scene_node.transformation_matrix()
Exemplo n.º 8
0
def bislerp(Qa, Qb, t):
    r"""
    Linear interpolation of two orthogonal matrices.
    Assiume that :math:`Q_a` and :math:`Q_b` refers to
    timepoint :math:`0` and :math:`1` respectively.
    Using spherical linear interpolation (slerp) find the
    orthogonal matrix at timepoint :math:`t`.
    """

    if Qa is None and Qb is None:
        return None
    if Qa is None:
        return Qb
    if Qb is None:
        return Qa

    tol = 1e-12
    qa = quaternion.from_rotation_matrix(Qa)
    qb = quaternion.from_rotation_matrix(Qb)

    quat_i = quaternion.quaternion(0, 1, 0, 0)
    quat_j = quaternion.quaternion(0, 0, 1, 0)
    quat_k = quaternion.quaternion(0, 0, 0, 1)

    quat_array = [
        qa,
        -qa,
        qa * quat_i,
        -qa * quat_i,
        qa * quat_j,
        -qa * quat_j,
        qa * quat_k,
        -qa * quat_k,
    ]

    def dot(qi, qj):
        return np.sum(
            [getattr(qi, s) * getattr(qj, s) for s in ["x", "y", "z", "w"]])

    dot_arr = [abs(dot(qi, qb)) for qi in quat_array]
    max_idx = np.argmax(dot_arr)
    max_dot = dot_arr[max_idx]
    qm = quat_array[max_idx]

    if max_dot > 1 - tol:
        return Qb
    else:
        qm_slerp = quaternion.slerp(qm, qb, 0, 1, t)
        qm_norm = qm_slerp.normalized()
        Qab = quaternion.as_rotation_matrix(qm_norm)
        return Qab
Exemplo n.º 9
0
def bislerp(
    Qa: np.ndarray,
    Qb: np.ndarray,
    t: float,
) -> np.ndarray:
    r"""
    Linear interpolation of two orthogonal matrices.
    Assiume that :math:`Q_a` and :math:`Q_b` refers to
    timepoint :math:`0` and :math:`1` respectively.
    Using spherical linear interpolation (slerp) find the
    orthogonal matrix at timepoint :math:`t`.
    """

    if ~Qa.any() and ~Qb.any():
        return np.zeros((3, 3))
    if ~Qa.any():
        return Qb
    if ~Qb.any():
        return Qa

    tol = 1e-12
    qa = quaternion.from_rotation_matrix(Qa)
    qb = quaternion.from_rotation_matrix(Qb)

    quat_i = quaternion.quaternion(0, 1, 0, 0)
    quat_j = quaternion.quaternion(0, 0, 1, 0)
    quat_k = quaternion.quaternion(0, 0, 0, 1)

    quat_array = [
        qa,
        -qa,
        qa * quat_i,
        -qa * quat_i,
        qa * quat_j,
        -qa * quat_j,
        qa * quat_k,
        -qa * quat_k,
    ]

    dot_arr = [abs((qi.components * qb.components).sum()) for qi in quat_array]
    max_idx = int(np.argmax(dot_arr))
    max_dot = dot_arr[max_idx]
    qm = quat_array[max_idx]

    if max_dot > 1 - tol:
        return Qb

    qm_slerp = quaternion.slerp(qm, qb, 0, 1, t)
    qm_norm = qm_slerp.normalized()
    Qab = quaternion.as_rotation_matrix(qm_norm)
    return Qab
Exemplo n.º 10
0
def interpolate_poses(m1, m2, times):
    r1, t1 = matrix.split(m1)
    r2, t2 = matrix.split(m2)

    t = lerp(t1, t2, times)

    q1 = quaternion.from_rotation_matrix(r1)
    q2 = quaternion.from_rotation_matrix(r2)

    # q = np.slerp_vectorized(q1, q2, times)
    q = nlerp(q1, q2, times)

    r = quaternion.as_rotation_matrix(q)
    return matrix.join(r, t)
Exemplo n.º 11
0
    def _get_transform(self, c_idx, n_idx):
        c_pose = self.poses[c_idx]
        n_pose = self.poses[n_idx]

        local_dtrans = np.linalg.inv(c_pose) @ n_pose[:, 3]

        # TODO - remove quaternions for manual methods
        quat_c = quat.from_rotation_matrix(c_pose[:3,:3])
        quat_n = quat.from_rotation_matrix(n_pose[:3,:3])
        quat_t = quat_c.inverse() * quat_n

        gt_quat_t_ar = quat.as_float_array(quat_t).astype(np.float32)
        gt_trans = local_dtrans[:3].astype(np.float32)
        return gt_quat_t_ar, gt_trans
Exemplo n.º 12
0
    def get_world_coordinates(self, filename, axes=['z'], times=None, averagedur=0.1):
        axinddict = {'x': 0, 'y': 1, 'z': 2}

        with h5py.File(filename, 'r') as h5calib:
            acc = np.array(h5calib['/data/Accel'])
            t = np.array(h5calib['/data/t'])

        gax = np.eye(3)

        if len(axes) == 1:
            times = [0]
            averagedur = 4*t[-1]

        d2 = averagedur/2

        axord = []
        for axis1, time1 in zip(axes, times):
            ist = np.logical_and(t >= time1-d2, t <= time1+d2)
            ax = np.mean(acc[ist, :], axis=0)

            m = re.match('([+-]?)([XYZxyz])', axis1)
            if m is None:
                raise ValueError('Unrecognized axis specification axis')

            if m.group(1) == '-':
                axsign = -1.0
            else:
                axsign = 1.0

            axind = axinddict[m.group(2).lower()]

            gax[:, axind] = ax * axsign
            axord.append(axind)

        axord = np.concatenate((axord, np.setdiff1d(range(3), axord)))
        axrevord = np.argsort(np.arange(3)[axord])

        basis = self._gramschmidt(gax[:, axord])
        basis = basis[:, axrevord]

        # check for right handed-ness
        # the Z axis should be equal to the cross product of the X and Y axes
        # because of small numerical issues, it's sometimes not exactly equal,
        # so we check that they're in the same direction
        assert(np.dot(np.cross(basis[:, 0], basis[:, 1]), basis[:, 2]) > 0.9)

        self.chip2world_rot = basis
        self.qchip2world = quaternion.from_rotation_matrix(basis)
        self.qworld2chip = quaternion.from_rotation_matrix(basis.T)
Exemplo n.º 13
0
    def from_tr_matrix(matrix: np.ndarray) -> Pose:

        tvec = matrix[:3, 3]
        return Pose(
            Position(tvec[0], tvec[1], tvec[2]),
            Orientation.from_quaternion(quaternion.from_rotation_matrix(matrix[:3, :3])),
        )
Exemplo n.º 14
0
def test_from_rotation_matrix(Rs):
    rot_mat_eps = 10*eps
    try:
        from scipy import linalg
    except ImportError:
        rot_mat_eps = 400*eps

    for i, R1 in enumerate(Rs):
        R2 = quaternion.from_rotation_matrix(quaternion.as_rotation_matrix(R1))
        d = quaternion.rotation_intrinsic_distance(R1, R2)
        assert d < rot_mat_eps, (i, R1, R2, d)  # Can't use allclose here; we don't care about rotor sign

    Rs2 = quaternion.from_rotation_matrix(quaternion.as_rotation_matrix(Rs))
    for R1, R2 in zip(Rs, Rs2):
        d = quaternion.rotation_intrinsic_distance(R1, R2)
        assert d < rot_mat_eps, (R1, R2, d)  # Can't use allclose here; we don't care about rotor sign
Exemplo n.º 15
0
def read_robotcar_v2_train(
        robotcar_path: str) -> Dict[str, kapture.PoseTransform]:
    """
    Read the robot car v2 train data file

    :param robotcar_path: path to the data file
    :return: train data
    """
    with (open(path.join(robotcar_path, 'robotcar_v2_train.txt'), 'r')) as f:
        lines = f.readlines()
        # remove empty lines
        lines = (l2 for l2 in lines if l2.strip())
        # trim trailing EOL
        lines = (l3.rstrip("\n\r") for l3 in lines)
        # split space
        lines = (l4.split() for l4 in lines)
        lines = list(lines)
        train_data = {
            table[0].replace(".png", ".jpg"): kapture.PoseTransform(
                r=quaternion.from_rotation_matrix(
                    [[float(v) for v in table[1:4]],
                     [float(v) for v in table[5:8]],
                     [float(v) for v in table[9:12]]]),
                t=[float(v)
                   for v in [table[4], table[8], table[12]]]).inverse()
            for table in lines
        }
    return train_data
Exemplo n.º 16
0
def as_ROSgoal(Homo_mat):
    '''
    ROS goal: goal = (x, y, z, qx, qy, qz, qw)
    '''
    q = quaternion.from_rotation_matrix(Homo_mat[0:3, 0:3])
    return np.array(
        [Homo_mat[0, 3], Homo_mat[1, 3], Homo_mat[2, 3], q.x, q.y, q.z, q.w])
Exemplo n.º 17
0
    def get_state(self):
        self.states = np.array([])
        dim = len(self.frame_ids) * (8 + len(self.long_track_features))
        self.states.resize((dim, 1))
        self.frameid2state = {}

        index = 0
        frame_count = 0
        for frame_id in self.frame_ids:
            # position 3x1
            self.states[index:(index + 3)] = self.pred_poses[frame_id][:, [3]]
            self.frameid2state[frame_id] = index
            index += 3
            # orientation as quaternion 4x1
            q = quaternion.from_rotation_matrix(
                self.pred_poses[frame_id][:, :3])
            self.states[index:(index +
                               4)] = quaternion.as_float_array(q).reshape(
                                   4, 1)
            index += 4
            # scale 1x1
            self.states[index] = 1.0
            index += 1
            for feature_pts in self.long_track_features:
                # depth value at feature coordinates 1x1
                pt_x = feature_pts[frame_count][0]
                pt_y = feature_pts[frame_count][1]
                depth = get_depth(self.pred_depths[frame_id], pt_x, pt_y)
                self.states[index] = depth
                index += 1
            frame_count += 1
Exemplo n.º 18
0
 def set_goal(self, goallengths=None):
    """ Sets the goal to a random point of the robot's workspace [x, y, z] in [m]
    and sets the tangent vector accordingly."""
    if goallengths is None:
       l11goal = np.random.uniform(self.l1min, self.l1max)
       l12goal = np.random.uniform(self.l1min, self.l1max)
       l13goal = np.random.uniform(self.l1min, self.l1max)
       l21goal = np.random.uniform(self.l2min, self.l2max)
       l22goal = np.random.uniform(self.l2min, self.l2max)
       l23goal = np.random.uniform(self.l2min, self.l2max)
    else:
       l11goal = goallengths[0]; l12goal = goallengths[1]; l13goal = goallengths[2]
       l21goal = goallengths[3]; l22goal = goallengths[4]; l23goal = goallengths[5]
    self.goal_lengths = np.array([l11goal, l12goal, l13goal, l21goal, l22goal, l23goal])
    dl21goal = l21goal-l11goal; dl22goal = l22goal-l12goal; dl23goal = l23goal-l13goal
    kappa1, phi1, seg_len1 = self.arc_params(l11goal, l12goal, l13goal)
    kappa2, phi2, seg_len2 = self.arc_params(dl21goal, dl22goal, dl23goal)
    T01 = self.transformation_matrix(kappa1, phi1, seg_len1)
    T12 = self.transformation_matrix(kappa2, phi2, seg_len2)
    T02 = np.matmul(T01, T12)
    self.goal = np.matmul(T02, self.base)[0:3]
    self.normal_vec_goal = T02[0:3, 0]
    self.binormal_vec_goal = T02[0:3, 1]
    self.tangent_vec_goal =  T02[0:3, 2]
    # quaternions
    self.qgoal = quaternion.as_float_array(quaternion.from_rotation_matrix(T02[:3, :3]))
    if np.sign(self.qgoal[0]) != 0.0:
       self.qgoal = np.sign(self.qgoal[0]) * self.qgoal # make sure that quaternions all have the same sign for w
    # calculate RPY angles of goal here or quaternion
    self.Rgoal, self.Pgoal, self.Ygoal = self.get_orientation(T02)
Exemplo n.º 19
0
def load_extrinsics(folder, filename="t265_left_extrinsics.csv"):
    """ Load camera extrinsics.

    Parameters
    ----------
    folder: str
        Path to the export folder.

    filename: str, default 't265_left_extrinsic.csv'
        Filename of the extrinsics file.

    Returns
    -------
    t: numpy.ndarray, shape (3,)
        Translation between the two cameras.

    q: numpy.ndarray, shape (4,)
        Rotation between the two cameras in quaternions.
    """
    with open(os.path.join(folder, filename)) as f:
        reader = csv.reader(f)
        t = np.array([float(row[1]) for row in reader if row[0].startswith("T")])
        f.seek(0)
        R = np.array([float(row[1]) for row in reader if row[0].startswith("R")])

    q = as_float_array(from_rotation_matrix(R.reshape(3, 3)))

    return t, q
Exemplo n.º 20
0
def convert_center_ROSgoal2(dq, Init_GOAL):
    '''
    ROS goal: goal = (x, y, z, qx, qy, qz, qw)
    '''
    with open(Init_GOAL) as f:
        init_goal = np.array(yaml.load(f), dtype='float')

    q0 = np.quaternion(init_goal[6], init_goal[3], init_goal[4], init_goal[5])
    x = np.matrix([[1.0, 0.0, -0.035], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
    # x = np.matrix([[1.0,0.0,-0.0],[0.0,1.0,0.0],[0.0,0.0,1.0]])
    y0 = np.matrix([[cos(dq[2]), -sin(dq[2]), dq[0]],
                    [sin(dq[2]), cos(dq[2]), dq[1]], [0.0, 0.0, 1.0]])
    y = x * y0 * np.linalg.inv(x)
    H = np.matlib.identity(4)
    H[0:2, 0:2] = y[0:2, 0:2]
    H[0:2, 3] = y[0:2, 2]

    H0 = np.matlib.identity(4)
    H0[0, 3] = init_goal[0]
    H0[1, 3] = init_goal[1]
    H0[2, 3] = init_goal[2]
    H0[0:3, 0:3] = quaternion.as_rotation_matrix(q0)
    q = q0 * quaternion.from_rotation_matrix(H[0:3, 0:3])
    H1 = H0 * H
    goal = np.array([H1[0, 3], H1[1, 3], H1[2, 3], q.x, q.y, q.z, q.w])
    return goal
Exemplo n.º 21
0
    def convert_habitat_to_replica(self, pos, rot):
        import quaternion
        # Define the transforms needed
        _tmp = np.eye(3, dtype=np.float32)
        UnitX = _tmp[0]
        UnitY = _tmp[1]
        UnitZ = _tmp[2]

        R_hsim_replica = habitat_sim.utils.quat_from_two_vectors(
            -UnitZ, -UnitY)
        T_hsim_replica = np.eye(4, dtype=np.float32)
        T_hsim_replica[0:3,
                       0:3] = quaternion.as_rotation_matrix(R_hsim_replica)

        R_forward_back = habitat_sim.utils.quat_from_two_vectors(-UnitZ, UnitZ)
        T_replicac_hsimc = np.eye(4, dtype=np.float32)
        T_replicac_hsimc[0:3,
                         0:3] = quaternion.as_rotation_matrix(R_forward_back)

        # Transforming the state happens here
        T_hsim_c = np.eye(4, dtype=np.float32)
        T_hsim_c[0:3, 0:3] = quaternion.as_rotation_matrix(rot)
        T_hsim_c[0:3, 3] = pos

        T_c_hsim = np.linalg.inv(T_hsim_c)
        T_c_replica = T_replicac_hsimc @ T_c_hsim @ T_hsim_replica

        T_replica_c = np.linalg.inv(T_c_replica)

        replica_pos = T_replica_c[0:3, 3]
        replica_rot = quaternion.from_rotation_matrix(T_replica_c[0:3, 0:3])

        return replica_pos, replica_rot
Exemplo n.º 22
0
    def _on_endpoint_state(self, msg):

        cart_pose_trans_mat = np.asarray(msg.O_T_EE).reshape(4,4,order='F')

        self._cartesian_pose = {
            'position': cart_pose_trans_mat[:3,3],
            'orientation': quaternion.from_rotation_matrix(cart_pose_trans_mat[:3,:3]) }

        self._cartesian_effort = {
            'force': np.asarray([ msg.O_F_ext_hat_K.wrench.force.x,
                                  msg.O_F_ext_hat_K.wrench.force.y,
                                  msg.O_F_ext_hat_K.wrench.force.z]),

            'torque': np.asarray([ msg.O_F_ext_hat_K.wrench.torque.x,
                                   msg.O_F_ext_hat_K.wrench.torque.y,
                                   msg.O_F_ext_hat_K.wrench.torque.z])
        }

        self._stiffness_frame_effort = {
            'force': np.asarray([ msg.K_F_ext_hat_K.wrench.force.x,
                                  msg.K_F_ext_hat_K.wrench.force.y,
                                  msg.K_F_ext_hat_K.wrench.force.z]),

            'torque': np.asarray([ msg.K_F_ext_hat_K.wrench.torque.x,
                                   msg.K_F_ext_hat_K.wrench.torque.y,
                                   msg.K_F_ext_hat_K.wrench.torque.z])
        }

        self._tip_states = TipState(msg.header.stamp, deepcopy(self._cartesian_pose), deepcopy(self._cartesian_velocity), deepcopy(self._cartesian_effort), deepcopy(self._stiffness_frame_effort))
Exemplo n.º 23
0
 def __init__(self,
              particle,
              timestep,
              f_ext,
              viscosity,
              kT,
              pos0=np.zeros(3),
              orient0=np.identity(3),
              seed=None):
     self.particle = particle
     self.timestep = timestep
     self.f_ext = f_ext
     self.rng_seed = seed
     self.viscosity = viscosity
     self.kT = kT
     # set particle initial position
     particle._pos = pos0
     # Be able to handle any array-like object
     particle._pos = np.asarray(pos0)
     if particle._pos.shape != (3, ):
         raise ValueError(
             'Initial position must be array-like with length 3')
     # Be able to handle both a quaternion or a rotation matrix
     if isinstance(orient0, quaternion.quaternion):
         particle._orient = orient0
     elif isinstance(orient0, np.ndarray) and orient0.shape == (3, 3):
         particle._orient = quaternion.from_rotation_matrix(orient0)
     else:
         raise TypeError(
             'orient0 must be a quaternion or a 3 x 3 ndarray representing a rotation matrix'
         )
     self.rng = np.random.RandomState(seed)
Exemplo n.º 24
0
    def load_sample_sequence(self, filename):
        if filename.endswith('.pkl'):
            # This is a pickle file that contains a motion sequence stored in angle-axis format
            sample_sequence = pkl.load(open(filename, 'rb'))
            poses = np.array(sample_sequence)

        elif filename.endswith('.npz'):
            # This is a numpy file that was produced using the evaluation code
            preds = np.load(filename)['prediction']

            # Choose the first sample in the file to be displayed
            pred = preds[1]

            # This is in rotation matrix format, so convert to angle-axis
            poses = np.reshape(pred, [pred.shape[0], -1, 3, 3])
            poses = quaternion.as_rotation_vector(
                quaternion.from_rotation_matrix(poses))
            poses = np.reshape(poses, [poses.shape[0], -1])

        else:
            raise ValueError(
                "do not recognize file format of file {}".format(filename))

        # Unity can also display orientations and accelerations, but they are not available here
        oris = np.zeros([poses.shape[0], 6 * 3])
        accs = np.zeros([poses.shape[0], 6 * 3])

        self.sample_sequence = {'gt': poses, 'ori': oris, 'acc': accs}
        self.next_frame = 0
Exemplo n.º 25
0
def OrbitalQuaternion(r, V):
    """
    Input:
    r, V - radius vector and velocity of sc mass center in inertial frame
    
    Returns:
    np.quaternion that determines orbital frame orientation with respect to inertial frame in inertial frame
    
    P.S. to convert a quternion M_i from inertial basis to bounded basis M_e one needs to have
    a quaternion determines orientation of bounded basis relative to intertial basis
    then M_e = A.conj() * M_i * A; A can be in either inertial and bounded basis
    """
    o1 = r / np.linalg.norm(r)
    o3 = np.cross(r, V)
    o3 /= np.linalg.norm(o3)
    o2 = np.cross(o3, o1)

    xi = np.array([1, 0, 0])
    yi = np.array([0, 1, 0])
    zi = np.array([0, 0, 1])

    buf1 = [np.dot(o1, xi), np.dot(o1, yi), np.dot(o1, zi)]
    buf2 = [np.dot(o2, xi), np.dot(o2, yi), np.dot(o2, zi)]
    buf3 = [np.dot(o3, xi), np.dot(o3, yi), np.dot(o3, zi)]
    B = np.vstack((buf1, buf2, buf3)).T
    A = qt.from_rotation_matrix(B)

    return A
Exemplo n.º 26
0
def convert_testing_extrinsics(offset: int,
                               testing_extrinsics: Iterable[VirtualGalleryTestingExtrinsic],
                               images: kapture.RecordsCamera,
                               trajectories: kapture.Trajectories) -> None:
    """
    Import all testing extrinsics into the images and trajectories.

    :param offset:
    :param testing_extrinsics: testing extrinsics to import
    :param images: image list to add to
    :param trajectories: trajectories to add to
    """
    # Map (light_id, loop_id, frame_id) to a unique timestamp
    testing_frames_tuples = ((extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id)
                             for extrinsic in testing_extrinsics)
    testing_frames_tuples = OrderedDict.fromkeys(testing_frames_tuples).keys()
    testing_frame_mapping = {v: n + offset for n, v in enumerate(testing_frames_tuples)}

    # Export images and trajectories
    logger.info("Converting testing images and trajectories...")
    for extrinsic in testing_extrinsics:
        rotation_matrix = [extrinsic.extrinsics[0:3], extrinsic.extrinsics[4:7], extrinsic.extrinsics[8:11]]
        rotation = quaternion.from_rotation_matrix(rotation_matrix)
        timestamp = testing_frame_mapping[(extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id)]
        camera_device_id = _get_testing_camera_name(extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id)
        translation_vector = [extrinsic.extrinsics[3], extrinsic.extrinsics[7], extrinsic.extrinsics[11]]
        images[(timestamp, camera_device_id)] = (f'testing/gallery_light{extrinsic.light_id}_occlusion'
                                                 f'{extrinsic.occlusion_id}/frames/rgb/'
                                                 f'camera_0/rgb_{extrinsic.frame_id:05}.jpg')
        trajectories[(timestamp, camera_device_id)] = kapture.PoseTransform(rotation, translation_vector)
Exemplo n.º 27
0
 def matrix_world(self, mx44):
     if mx44 is None:
         self.q = None
     else:
         mx33 = np.asarray(mx44)[:3, :3]
         mx33 /= np.linalg.norm(mx33, axis=0)
         self.q = quaternion.from_rotation_matrix(mx33).conj()
Exemplo n.º 28
0
    def __init__(self, limb, ee_frame_name="panda_link8", additional_segment_config=None, description=None):

        self._kdl_tree = None
        if description is None:
            self._franka = URDF.from_parameter_server(key='robot_description')
        else:
            self._franka = URDF.from_xml_file(description)

        self._kdl_tree = kdl_tree_from_urdf_model(self._franka)

        if additional_segment_config is not None:
            # add additional segments to account for NE_T_EE and F_T_NE transformations
            # ---- this may cause issues in eg. inertia computations maybe? TODO: test inertia behaviour
            for c in additional_segment_config:
                q = quaternion.from_rotation_matrix(c["origin_ori"]).tolist()
                kdl_origin_frame = PyKDL.Frame(PyKDL.Rotation.Quaternion(q.x, q.y, q.z, q.w),
                                             PyKDL.Vector(*(c["origin_pos"].tolist())))
                kdl_sgm = PyKDL.Segment(c["child_name"], PyKDL.Joint(c["joint_name"]),
                                      kdl_origin_frame, PyKDL.RigidBodyInertia())
                self._kdl_tree.addSegment(
                    kdl_sgm, c["parent_name"])

        self._base_link = self._franka.get_root()
        # self._tip_frame = PyKDL.Frame()
        self._limb_interface = limb
        self.create_chain(ee_frame_name)
Exemplo n.º 29
0
    def __init__(
            self,
            transform: TransformType = None,
            *,
            translation: Iterable[float] = (0, 0, 0),
            rotation: RotationType = (1, 0, 0, 0),
    ) -> None:
        if transform is not None:
            if isinstance(transform, Transform3D):
                self._translation = transform.translation
                self._rotation = transform.rotation
                return

            if isinstance(transform, Sequence):  # pylint: disable=W1116
                transform = np.array(transform)
            if transform.shape != (3, 4) and transform.shape != (4, 4):
                raise ValueError(
                    "The shape of input transform matrix must be 3x4 or 4x4.")

            self._translation = Vector3D(transform[0, 3], transform[1, 3],
                                         transform[2, 3])
            self._rotation = from_rotation_matrix(transform)
            return

        self._translation = Vector3D(*translation)
        if isinstance(rotation, quaternion):
            self._rotation = quaternion(rotation)
        else:
            self._rotation = quaternion(*rotation)
def import_robotcar_rig(extrinsics_dir_path: str) -> kapture.Rigs:
    """
    Read extrinsics files and convert to kapture rigs
    :param extrinsics_dir_path: path to directory with extrinsics
    :return: kapture.Rigs object
    """
    # From dataset documentation:
    #   The extrinsic parameters of the three cameras on the car with respect to the car itself.
    #   The extrinsics are provided as a 4x4 matrix
    #     [R c]
    #     [0 1]
    #   where R is a rotation matrix that maps from camera to world coordinates and c is the position of the camera in
    #   world coordinates. The entries of the matrix are separated by ,.

    rigs = kapture.Rigs()
    for root, dirs, files in os.walk(extrinsics_dir_path):
        for extrinsics_filepath in files:
            (camera_id, _) = extrinsics_filepath.split('_')
            extrinsic_file = open(
                path.join(extrinsics_dir_path, extrinsics_filepath), 'r')
            with extrinsic_file as f:
                m = np.array([[float(num) for num in line.split(',')]
                              for line in f])
            rotation_matrix = m[0:3, 0:3]
            position = m[0:3, 3:4]
            pose_transform = kapture.PoseTransform(
                t=position, r=quaternion.from_rotation_matrix(rotation_matrix))
            # kapture rig format is "rig to sensor"
            rigs['car', camera_id] = pose_transform.inverse()

    return rigs
    def act(self, habitat_observation, goal_position):
        # Update internal state
        cc = 0

        update_is_ok = self.update_internal_state(habitat_observation)

        while not update_is_ok:
            update_is_ok = self.update_internal_state(habitat_observation)
            cc += 1
            if cc > 5:
                break

        current_pose = self.pose6D.detach().cpu().numpy().reshape(4, 4)
        self.position_history.append(current_pose)

        current_position = current_pose[0:3, -1]

        current_rotation = current_pose[0:3, 0:3]
        current_quat = quaternion.from_rotation_matrix(current_rotation)
        current_heading = quaternion.as_euler_angles(current_quat)[1]

        current_map = self.map2DObstacles.detach().cpu().numpy()

        best_action = self.follower.get_next_action(goal_pos=goal_position)

        return (current_position, current_heading, current_map, best_action)
Exemplo n.º 32
0
    def get_flange_pose(self, pos=None, ori=None):
        """
        Get the pose of flange (panda_link8) given the pose of the end-effector frame.

        .. note:: In sim, this method does nothing.

        :param pos: position of the end-effector frame in the robot's base frame, defaults to current end-effector position
        :type pos: np.ndarray, optional
        :param ori: orientation of the end-effector frame, defaults to current end-effector orientation
        :type ori: quaternion.quaternion, optional
        :return: corresponding flange frame pose in the robot's base frame
        :rtype: np.ndarray, quaternion.quaternion
        """
        if pos is None:
            pos = self._cartesian_pose['position']

        if ori is None:
            ori = self._cartesian_pose['orientation']

        if self._params._in_sim:
            return pos, ori

        # get corresponding flange frame pose using transformation matrix
        F_T_EE = np.asarray(self._F_T_EE).reshape(4, 4, order="F")
        mat = quaternion.as_rotation_matrix(ori)

        new_ori = mat.dot(F_T_EE[:3, :3].T)
        new_pos = pos - new_ori.dot(F_T_EE[:3, 3])

        return new_pos, quaternion.from_rotation_matrix(new_ori)
Exemplo n.º 33
0
#!/usr/bin/env python

from __future__ import print_function, division, absolute_import
import sys
import numpy as np
import quaternion
from numpy import *


eps = np.finfo(float).eps


rot_mat_eps = 200*eps


R1 = np.array([quaternion.quaternion(0, -math.sqrt(0.5), 0, -math.sqrt(0.5))])
print(R1)
print(quaternion.as_rotation_matrix(R1))
R2 = quaternion.from_rotation_matrix(quaternion.as_rotation_matrix(R1))
print(R2)
d = quaternion.rotation_intrinsic_distance(R1[0], R2[0])
print(d)
print()
sys.stdout.flush()
sys.stderr.flush()
assert d < rot_mat_eps, (R1, R2, d)  # Can't use allclose here; we don't care about rotor sign