Esempio n. 1
0
def cal():
    rotation = rotateScene.axis2.transform.rotation
    trans = rotateScene.axis2.transform.pos
    R2 = SO3.from_matrix(rotation, normalize=True)
    P = np.vstack(([1., 0., 0.], [0., 1., 0.], [0., 0., 1.]))
    R2P = np.dot(R2.mat, P.transpose()).transpose() + trans
    P_ = np.dot(rotateScene.axis1.transform.rotation,
                P.transpose()).transpose() + rotateScene.axis1.transform.pos
    fR = (R2P - P_).reshape(-1)

    J1 = -SO3.wedge(R2P[0, :])
    J2 = -SO3.wedge(R2P[1, :])
    J3 = -SO3.wedge(R2P[2, :])

    J = np.zeros((9, 6))
    J[:, :3] = np.concatenate((J1, J2, J3), axis=0)
    J[:, 3:6] = np.concatenate(
        (np.identity(3), np.identity(3), np.identity(3)), axis=0)
    Jt = np.transpose(J)
    JtJ = np.dot(Jt, J) + np.identity(6) * 1e-8
    res = -np.dot(Jt, fR)
    r_ = np.dot(np.linalg.inv(JtJ), res)

    r_rotate = r_[:3]
    r_translate = r_[3:6]
    R2 = box_plus(R2, r_rotate)

    rotateScene.axis2.transform.rotation = R2.mat
    rotateScene.axis2.transform.pos += r_translate
Esempio n. 2
0
def test_wedge_vee():
    phi = [1, 2, 3]
    Phi = SO3.wedge(phi)
    phis = np.array([[1, 2, 3], [4, 5, 6]])
    Phis = SO3.wedge(phis)
    assert np.array_equal(phi, SO3.vee(Phi))
    assert np.array_equal(phis, SO3.vee(Phis))
Esempio n. 3
0
def test_rpy():
    r = np.pi / 12
    p = np.pi / 6
    y = np.pi / 3
    C_got = SO3.from_rpy(r, p, y)
    C_expected = SO3.rotz(y).dot(SO3.roty(p).dot(SO3.rotx(r)))
    assert np.allclose(C_got.mat, C_expected.mat)
Esempio n. 4
0
def QuaternionsToTangents(qs):
    N = qs.shape[0]
    tangents = np.empty([N, 3])
    for i in range(N):
        q = qs[i]
        tangents[i] = SO3.log(SO3.from_quaternion(q))
    return tangents
def cal():
    R1 = rotateScene.axis2_1.transform.rotation
    R2 = rotateScene.axis2_2.transform.rotation

    RR_ = np.dot(rotateScene.axis1_1.transform.rotation,
                 rotateScene.axis1_2.transform.rotation)

    P = np.array([1., 0., 0.])
    RRP = np.dot(np.dot(R1, R2), P)
    R2P = np.dot(R2, P)
    P_ = np.dot(RR_, P)
    fR = (RRP - P_)

    J1 = -SO3.wedge(RRP)
    Jt1 = J1.transpose()
    JtJ1 = np.dot(Jt1, J1) + np.identity(3) * 1e-8
    res1 = -np.dot(Jt1, fR)
    r1_ = np.dot(np.linalg.inv(JtJ1), res1)

    J2 = np.dot(R1, -SO3.wedge(R2P))
    Jt2 = J2.transpose()
    JtJ2 = np.dot(Jt2, J2) + np.identity(3) * 1e-8
    res2 = -np.dot(Jt2, fR)
    r2_ = np.dot(np.linalg.inv(JtJ2), res2)

    R1 = box_plus(R1, r1_)
    R2 = box_plus(R2, r2_)

    rotateScene.axis2_1.transform.rotation = R1
    rotateScene.axis2_2.transform.rotation = R2
def so3_diff(C_1, C_2, unit='deg'):
    A = SO3.from_matrix(C_1)
    B = SO3.from_matrix(C_2)
    err = A.dot(B.inv()).log()
    if unit == 'deg':
        return norm(err) * 180. / np.pi
    else:
        return norm(err)
Esempio n. 7
0
def test_wedge():
    phi = [1, 2, 3]
    Phi = SO3.wedge(phi)
    phis = np.array([[1, 2, 3], [4, 5, 6]])
    Phis = SO3.wedge(phis)
    assert np.array_equal(Phi, -Phi.T)
    assert np.array_equal(Phis[0, :, :], SO3.wedge(phis[0]))
    assert np.array_equal(Phis[1, :, :], SO3.wedge(phis[1]))
Esempio n. 8
0
def test_left_jacobians():
    phi_small = [0., 0., 0.]
    phi_big = [np.pi / 2, np.pi / 3, np.pi / 4]

    left_jacobian_small = SO3.left_jacobian(phi_small)
    inv_left_jacobian_small = SO3.inv_left_jacobian(phi_small)
    assert np.allclose(left_jacobian_small.dot(inv_left_jacobian_small),
                       np.identity(3))

    left_jacobian_big = SO3.left_jacobian(phi_big)
    inv_left_jacobian_big = SO3.inv_left_jacobian(phi_big)
    assert np.allclose(left_jacobian_big.dot(inv_left_jacobian_big),
                       np.identity(3))
def test_rotmat_wahba():
    print('Checking accuracy of QCQP rotmat solver')
    N = 1000
    sigma = 0.
    C = SO3.exp(np.random.randn(3)).as_matrix()
    #Create two sets of vectors (normalized to unit l2 norm)
    x_1 = normalized(np.random.randn(N, 3), axis=1)
    #Rotate and add noise
    noise = np.random.randn(N, 3)
    noise = (noise.T * sigma).T
    x_2 = C.dot(x_1.T).T + noise

    A = np.zeros((10, 10))
    for i in range(N):
        mat = np.zeros((3, 10))
        mat[:, :9] = np.kron(x_1[i], np.eye(3))
        mat[:, 9] = -x_2[i]
        A += mat.T.dot(mat)

    constraint_matrices, c_vec = rotation_matrix_constraints()
    _, C_solve = solve_equality_QCQP_dual(A, constraint_matrices, c_vec)

    print('Ground truth:')
    print(C)
    print('Solved:')
    print(C_solve)
    print('Angle difference: {:.3E} deg'.format(
        rotmat_angle_diff(torch.from_numpy(C),
                          torch.from_numpy(C_solve),
                          units='deg').item()))
Esempio n. 10
0
def cal():
    rotation = rotateScene.axis2.transform.rotation
    R = SO3.from_matrix(rotation, normalize=True)
    p = [0, 1, 0]
    p_ = np.dot(rotateScene.axis1.transform.rotation, p)

    We_Rp = SO3.wedge(R.dot(p))
    J = SO3.left_jacobian(R.log())
    dRp_p = SO3.from_matrix(- np.dot(We_Rp, J), normalize=True)
    inv_dRp_p = dRp_p.inv()
    fx = np.dot(R.mat, p) - p_
    # neg_Rp = - np.dot(R.mat, p)
    R_mat = box_plus(R.mat, np.dot(inv_dRp_p.mat, -fx))
    R = SO3.from_matrix(R_mat, normalize=True)

    rotateScene.axis2.transform.rotation = R.mat
Esempio n. 11
0
def Load_cam_extrinsincs(path_to_file, camera_id):
    fd = open(path_to_file, 'r')

    T_camera_to_body = None
    cur_dev_name = None
    scalingFactor = None
    for line in fd:
        if '#' in line:
            continue

        line = line.replace('\n', '')
        line_ls = line.split(' ')

        if line_ls[0] == 'devName':
            cur_dev_name = line_ls[1]

        if line_ls[0] == 'scalingFactor':
            scalingFactor = float(line_ls[1])

        if line_ls[0] == 'T':
            qw = float(line_ls[1])
            qx = float(line_ls[2])
            qy = float(line_ls[3])
            qz = float(line_ls[4])
            x = float(line_ls[5])  #* scalingFactor
            y = float(line_ls[6])  #* scalingFactor
            z = float(line_ls[7])  #* scalingFactor

            if cur_dev_name == 'cam' + str(camera_id):
                R = SO3.from_quaternion(np.array([qw, qx, qy, qz])).as_matrix()
                t = np.expand_dims(np.array([x, y, z]), 1)
                T_camera_to_body = np.concatenate((R, t), axis=1)
                break
    return T_camera_to_body, scalingFactor
def cal():
    rotation = rotateScene.axis2_2.transform.rotation
    R = SO3.from_matrix(rotation, normalize=True)
    p = [0, 1, 0]
    p_ = np.dot(rotateScene.axis1.transform.rotation, p)
    Rp = R.dot(p)
    fR = Rp - p_

    J = -SO3.wedge(Rp)
    Jt = np.transpose(J)
    JtJ = np.dot(Jt, J) + np.identity(3) * 1e-8
    res = -np.dot(Jt, fR)
    r_ = np.dot(np.linalg.inv(JtJ), res)

    R = box_plus(R, r_)

    rotateScene.axis2_2.transform.rotation = R.mat
Esempio n. 13
0
def test_transform_vectorized():
    C = SO3.exp(np.pi * np.ones(3) / 4)
    pt1 = np.array([1, 2, 3])
    pt2 = np.array([4, 5, 3])
    pts = np.array([pt1, pt2])  # 2x3
    Cpt1 = C.dot(pt1)
    Cpt2 = C.dot(pt2)
    Cpts = C.dot(pts)
    assert (np.allclose(Cpt1, Cpts[0]) and np.allclose(Cpt2, Cpts[1]))
def cal():
    rotation1 = rotateScene.axis2_1.transform.rotation
    rotation2 = rotateScene.axis2_2.transform.rotation
    R1 = SO3.from_matrix(rotation1, normalize=True)
    R2 = SO3.from_matrix(rotation2, normalize=True)

    rotation1_ = rotateScene.axis1_1.transform.rotation
    rotation2_ = rotateScene.axis1_2.transform.rotation

    R1_ = SO3.from_matrix(rotation1_, normalize=True)
    R2_ = SO3.from_matrix(rotation2_, normalize=True)

    P = np.vstack(([1., 0., 0.], [0., 1., 0.], [0., 0., 1.]))
    RRP = np.dot(np.dot(R1.mat, R2.mat), P.transpose()).transpose()
    # R2P = np.dot(R2.mat, P.transpose()).transpose()
    P_ = np.dot(np.dot(R1_.mat, R2_.mat), P.transpose()).transpose()
    fR = (RRP - P_).reshape(-1)

    J11 = -SO3.wedge(RRP[0, :])
    J12 = -SO3.wedge(RRP[1, :])
    J13 = -SO3.wedge(RRP[2, :])

    J21 = np.dot(-SO3.wedge(RRP[0, :]), R1.mat)
    J22 = np.dot(-SO3.wedge(RRP[1, :]), R1.mat)
    J23 = np.dot(-SO3.wedge(RRP[2, :]), R1.mat)

    J = np.zeros((9, 6))
    J[:, 0:3] = np.concatenate((J11, J12, J13), axis=0)
    J[:, 3:6] = np.concatenate((J21, J22, J23), axis=0)

    Jt = np.transpose(J)
    JtJ = np.dot(Jt, J) + np.identity(6) * 1e-8
    res = -np.dot(Jt, fR)
    r = np.dot(np.linalg.inv(JtJ), res)

    r1 = r[:3]
    r2 = r[3:6]

    R1 = box_plus(R1, r1)
    R2 = box_plus(R2, r2)

    rotateScene.axis2_1.transform.rotation = R1.mat
    rotateScene.axis2_2.transform.rotation = R2.mat
Esempio n. 15
0
def cal():
    rotation = rotateScene.axis2.transform.rotation

    R = SO3.from_matrix(rotation, normalize=True)
    P = np.vstack(([1, 0, 0], [0, 1, 0], [0, 0, 1]))
    P_ = np.dot(rotateScene.axis1.transform.rotation,
                P.transpose()).transpose()  # [9]
    RP = np.dot(R.mat, P.transpose()).transpose()  # [9]
    fR = (RP - P_).reshape(-1)  # [9]-[9] = [9]

    J1 = -SO3.wedge(RP[0, :])
    J2 = -SO3.wedge(RP[1, :])
    J3 = -SO3.wedge(RP[2, :])

    J = np.concatenate((J1, J2, J3), axis=0)
    Jt = np.transpose(J)
    res = -np.dot(Jt, fR)

    R = box_plus(R, 0.1 * res)
    rotateScene.axis2.transform.rotation = R.mat
Esempio n. 16
0
def cal():
    rotation1 = rotateScene.axis2_1.transform.rotation
    rotation2 = rotateScene.axis2_2.transform.rotation
    R1 = SO3.from_matrix(rotation1, normalize=True)
    R2 = SO3.from_matrix(rotation2, normalize=True)

    rotation1_ = rotateScene.axis1_1.transform.rotation
    rotation2_ = rotateScene.axis1_2.transform.rotation

    R1_ = SO3.from_matrix(rotation1_, normalize=True)
    R2_ = SO3.from_matrix(rotation2_, normalize=True)

    P = np.array([0., 1., 0.])
    RRP = np.dot(np.dot(R1.mat, R2.mat), P)
    P_ = np.dot(np.dot(R1_.mat, R2_.mat), P)
    fR = (RRP - P_)

    J1 = -SO3.wedge(RRP)
    J2 = np.dot(-SO3.wedge(RRP), R1.mat)

    J = np.hstack([J1, J2])

    Jt = np.transpose(J)
    res = -np.dot(Jt, fR)
    r = 0.01 * res

    r1 = r[:3]
    r2 = r[3:6]

    R1 = box_plus(R1, r1)
    R2 = box_plus(R2, r2)

    rotateScene.axis2_1.transform.rotation = R1.mat
    rotateScene.axis2_2.transform.rotation = R2.mat
def cal():
    rotation = rotateScene.axis2.transform.rotation
    R = SO3.from_matrix(rotation, normalize=True)
    P = [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]
    manifold = np.array([0., 0., 0.])

    for i in range(3):
        p_ = np.dot(rotateScene.axis1.transform.rotation, P[i])

        We_Rp = SO3.wedge(R.dot(p_))
        dRp_p = SO3.from_matrix(-We_Rp, normalize=True)
        inv_dRp_p = dRp_p.inv()
        fx = R.dot(P[i]) - p_

        manifold += np.dot(inv_dRp_p.mat, -fx)

    x0 = R.mat
    manifold = manifold / 3
    x = box_plus(x0, manifold)
    R = SO3.from_matrix(x, normalize=True)

    rotateScene.axis2.transform.rotation = R.mat
Esempio n. 18
0
def cal():
    rotation = rotateScene.axis2_2.transform.rotation
    R2 = SO3.from_matrix(rotation, normalize=True)
    P = np.vstack(([1., 0., 0.], [0., 1., 0.], [0., 0., 1.]))
    R2P = np.dot(R2.mat, P.transpose()).transpose()
    P_ = np.dot(rotateScene.axis1.transform.rotation,
                P.transpose()).transpose()
    fR = (R2P - P_).reshape(-1)

    J21 = -SO3.wedge(R2P[0, :])
    J22 = -SO3.wedge(R2P[1, :])
    J23 = -SO3.wedge(R2P[2, :])

    J2 = np.concatenate((J21, J22, J23), axis=0)
    Jt2 = np.transpose(J2)
    JtJ2 = np.dot(Jt2, J2) + np.identity(3) * 1e-8
    res = -np.dot(Jt2, fR)
    r_ = np.dot(np.linalg.inv(JtJ2), res)

    R2 = box_plus(R2, r_)

    rotateScene.axis2_2.transform.rotation = R2.mat
Esempio n. 19
0
def cal():
    R1 = rotateScene.axis2_1.transform.rotation
    R2 = rotateScene.axis2_2.transform.rotation

    RR_ = np.dot(rotateScene.axis1_1.transform.rotation,
                 rotateScene.axis1_2.transform.rotation)

    P = np.vstack(([1., 0., 0.], [0., 1., 0.], [0., 0., 1.]))
    RRP = np.dot(np.dot(R1, R2), P.transpose()).transpose()
    R2P = np.dot(R2, P.transpose()).transpose()
    P_ = np.dot(RR_, P.transpose()).transpose()
    fR = (R2P - P_).reshape(-1)

    # J11 = -SO3.wedge(RRP[0, :])
    # J12 = -SO3.wedge(RRP[1, :])
    # J13 = -SO3.wedge(RRP[2, :])
    # J1 = np.concatenate((J11, J12, J13), axis=0)
    # Jt1 = J1.transpose()
    # JtJ1 = np.dot(Jt1, J1) + np.identity(3) * 1e-8
    # res1 = - np.dot(Jt1, fR)
    # r1_ = np.dot(np.linalg.inv(JtJ1), res1)

    J21 = -SO3.wedge(R2P[0, :])
    J22 = -SO3.wedge(R2P[1, :])
    J23 = -SO3.wedge(R2P[2, :])
    J2 = np.concatenate((J21, J22, J23), axis=0)
    Jt2 = J2.transpose()
    JtJ2 = np.dot(Jt2, J2) + np.identity(3) * 1e-8
    res2 = -np.dot(Jt2, fR)
    r2_ = np.dot(np.linalg.inv(JtJ2), res2)

    # R1 = box_plus(R1, r1_)
    R2 = box_plus(R2, r2_)

    # rotateScene.axis2_1.transform.rotation = R1
    rotateScene.axis2_2.transform.rotation = R2
Esempio n. 20
0
def test_quaternion():
    q1 = np.array([1, 0, 0, 0])
    q2 = np.array([0, 1, 0, 0])
    q3 = np.array([0, 0, 1, 0])
    q4 = np.array([0, 0, 0, 1])
    q5 = 0.5 * np.ones(4)
    q6 = -q5

    assert np.allclose(SO3.from_quaternion(q1).to_quaternion(), q1)
    assert np.allclose(SO3.from_quaternion(q2).to_quaternion(), q2)
    assert np.allclose(SO3.from_quaternion(q3).to_quaternion(), q3)
    assert np.allclose(SO3.from_quaternion(q4).to_quaternion(), q4)
    assert np.allclose(SO3.from_quaternion(q5).to_quaternion(), q5)
    assert np.allclose(
        SO3.from_quaternion(q5).mat,
        SO3.from_quaternion(q6).mat)
def gen_sim_data(N, sigma, torch_vars=False, shuffle_points=False):
    ##Simulation
    #Create a random rotation
    C = SO3.exp(np.random.randn(3)).as_matrix()
    #Create two sets of vectors (normalized to unit l2 norm)
    x_1 = normalized(np.random.randn(N, 3), axis=1)
    #Rotate and add noise
    noise = np.random.randn(N, 3)
    noise = (noise.T * sigma).T
    x_2 = C.dot(x_1.T).T + noise

    if shuffle_points:
        x_1, x_2 = unison_shuffled_copies(x_1, x_2)

    if torch_vars:
        C = torch.from_numpy(C)
        x_1 = torch.from_numpy(x_1)
        x_2 = torch.from_numpy(x_2)

    return C, x_1, x_2
def create_wahba_As(N=10):
    A = torch.zeros(N, 10, 10, dtype=torch.double)
    C = torch.empty(N, 3, 3, dtype=torch.double)
    N_points = 100
    for n in range(N):
        sigma = 0.
        C_n = SO3.exp(np.random.rand(3)).as_matrix()
        C[n] = torch.from_numpy(C_n)
        #Create two sets of vectors (normalized to unit l2 norm)
        x_1 = normalized(np.random.randn(N_points, 3), axis=1)
        #Rotate and add noise
        noise = np.random.randn(N_points, 3)
        noise = (noise.T * sigma).T
        x_2 = C_n.dot(x_1.T).T + noise

        for i in range(N_points):
            mat = np.zeros((3, 10))
            mat[:, :9] = np.kron(x_1[i], np.eye(3))
            mat[:, 9] = -x_2[i]
            A[n] += torch.from_numpy(mat.T.dot(mat))
    return A, C
def test_numpy_solver(N=100, sigma=0.01, tol=1.):
    #Tolerance in terms of degrees

    C, x_1, x_2 = gen_sim_data(N, sigma)
    redundant_constraints = True

    ## Solver
    print('Checking single solve with synthetic data...')
    A = build_A(x_1, x_2, sigma * sigma * np.ones(N))
    #A = np.random.randn(4,4)
    q_opt, _, t_solve, gap = solve_wahba(
        A, redundant_constraints=redundant_constraints)
    C_opt = SO3.from_quaternion(q_opt, ordering='xyzw').as_matrix()

    ## Output
    print('Done. Solved in {:.3f} seconds.'.format(t_solve))
    print('Duality gap: {:.3E}.'.format(gap))
    #Compare to known rotation
    print('Convex rotation error: {:.3f} deg'.format(so3_diff(C_opt, C)))
    print('Horn rotation error: {:.3f} deg'.format(
        so3_diff(solve_horn(x_1, x_2), C)))

    assert (so3_diff(C_opt, C) < tol)
def gen_sim_data_grid(N, sigma, torch_vars=False, shuffle_points=False):
    ##Simulation
    #Create a random rotation
    C = SO3.exp(np.random.randn(3)).as_matrix()

    #Grid is fixed
    grid_dim = 50
    xlims = np.linspace(-1., 1., grid_dim)
    ylims = np.linspace(-1., 1., grid_dim)
    x, y = np.meshgrid(xlims, ylims)
    z = np.sin(x) * np.cos(y)
    x_1 = normalized(np.hstack(
        (x.reshape(grid_dim**2, 1), y.reshape(grid_dim**2,
                                              1), z.reshape(grid_dim**2, 1))),
                     axis=1)

    #Sample N points
    ids = np.random.permutation(x_1.shape[0])
    x_1 = x_1[ids[:N]]

    #Sort into canonical order
    #x_1 = x_1[x_1[:,0].argsort()]

    #Rotate and add noise
    noise = np.random.randn(N, 3)
    noise = (noise.T * sigma).T
    x_2 = C.dot(x_1.T).T + noise

    if shuffle_points:
        x_1, x_2 = unison_shuffled_copies(x_1, x_2)

    if torch_vars:
        C = torch.from_numpy(C)
        x_1 = torch.from_numpy(x_1)
        x_2 = torch.from_numpy(x_2)

    return C, x_1, x_2
Esempio n. 25
0
def box_plus(a, b):
    return SO3.exp(b).dot(a)
Esempio n. 26
0
def box_minus(a, b):
    return a.dot(SO3.inv(b)).log()
Esempio n. 27
0
from liegroups.numpy import SO3
import numpy as np

A = SO3.from_rpy(0.1, 0.2, -0.3)
B = SO3.from_rpy(-1.2, 3.4, -0.9)
c = [0.5, -0.3, 1.2]


def box_plus(a, b):
    return SO3.exp(b).dot(a)


def box_minus(a, b):
    return a.dot(SO3.inv(b)).log()


C1 = box_minus(A, B)
C2 = -box_minus(B, A)

print(C1)
print(C2)

print(A)
print(A.inv())
Esempio n. 28
0
    drive = seq_names[seq]
    data = sio.loadmat('{}/{}.mat'.format(gt_dir, drive))
    kitti_ts = np.loadtxt('{}/{}/times.txt'.format(data_dir, seq))

    f = '/home/brandon/Desktop/Projects/VO-implementations/ORB_SLAM2/saved_trajectories/{}.txt'.format(
        seq)
    img_idx = []
    dT_list = []
    T = []
    l = np.loadtxt(f)
    ts = l[:, 0]
    pos = l[:, 1:4]
    quat = l[:, 4:8]

    for i in range(0, pos.shape[0]):
        R = SO3.from_quaternion(quat[i], ordering='xyzw')
        T.append(SE3(R, pos[i]))
        img_idx.append(np.argmin(np.abs(ts[i] - kitti_ts)))

    for i in range(0, len(T) - 1):
        dT = (T[i].inv()).dot(T[i + 1])
        dT_list.append(dT.as_matrix())

    gt_traj = data['poses_gt'].transpose(2, 0, 1)  #[0:4541]
    est_traj = []
    est_traj.append(gt_traj[0])

    for i in range(0, len(dT_list)):

        dT = SE3.from_matrix(dT_list[i], normalize=True)
Esempio n. 29
0
    vo_poses = np.array(vo_poses)
       
    r_w_imu_w = gt_poses[:,0:3]
    rpy_w_imu_w = gt_poses[:,3:6]

    plt.plot(gt_poses[:,0], gt_poses[:,1])
    plt.plot(vo_poses[:,0], -vo_poses[:,1])
    plt.savefig('{}_gt_traj'.format(seq))
    plt.figure()
    plt.plot(rpy_w_imu_w[:,2])
    plt.plot(vo_poses[:,5])
    plt.savefig('{}_gt_rpy'.format(seq))
    plt.figure()
    
    ''' GT '''   
    C_c_imu = SO3(np.array([[1,0,0],[0,0,1],[0,1,0]]))
    gt_traj=[np.array([[ 1, 0,  0,  0], \
                      [0, 0,  1,  0], \
                      [0, -1, 0,  0],  \
                      [ 0.,          0.,          0.,          1.        ]])]
    gt_T_w_c_list = []
    for i in range(0, r_w_imu_w.shape[0]):
        C_imu_w = SO3.from_rpy(rpy_w_imu_w[i,0], rpy_w_imu_w[i,1], rpy_w_imu_w[i,2]).inv()
        C_c_w = C_c_imu.dot(C_imu_w)
        C_w_c = SO3(np.copy(C_c_w.inv().as_matrix()))
        
        T_w_c = SE3(C_w_c, r_w_imu_w[i])
        gt_T_w_c_list.append(T_w_c)
    
    gt_pose_vec_list, gt_dT_list = [], []    
    for i in range(0,len(gt_T_w_c_list)-1):
Esempio n. 30
0
def capture_rs_image_seq(seq_id, _T_actor, T_cam0_to_body, T_body_to_cam0,
                         scalingFactor):
    # create folders
    seq_path = os.path.join(DATASET_FOLDER, 'seq_' + str(seq_id))
    if not os.path.exists(seq_path):
        os.makedirs(seq_path)

    if not USE_EXISTING_VEL:
        # cam_vel: wx, wy, wz, tx, ty, tz
        # cam_vel: row0_to_row_others
        wx = np.random.uniform(VEL_MIN_w, VEL_MAX_w)
        wy = np.random.uniform(VEL_MIN_w, VEL_MAX_w)
        wz = np.random.uniform(VEL_MIN_w, VEL_MAX_w)
        tx = np.random.uniform(VEL_MIN_t, VEL_MAX_t)
        ty = np.random.uniform(VEL_MIN_t, VEL_MAX_t) * 0.1
        tz = np.random.uniform(VEL_MIN_t, VEL_MAX_t)

        wx = wx if np.random.rand() > 0.5 else -wx
        wy = wy if np.random.rand() > 0.5 else -wy
        wz = wz if np.random.rand() > 0.5 else -wz
        tx = tx if np.random.rand() > 0.5 else -tx
        ty = ty if np.random.rand() > 0.5 else -ty
        tz = tz if np.random.rand() > 0.5 else -tz

        cam_vel = np.array([wx, wy, wz, tx, ty, tz])

        # save ground truth vel to file
        fid = open(os.path.join(seq_path, 'gt_vel.log'), 'w')
        fid.write('# Ground truth velocity in camera frame\n')
        fid.write('# wx, wy, wz, x, y, z\n')
        fid.write('%f %f %f %f %f %f\n' % (cam_vel[0], cam_vel[1], cam_vel[2],
                                           cam_vel[3], cam_vel[4], cam_vel[5]))
        fid.close()
    else:
        _vel = open(os.path.join(seq_path, 'gt_vel.log'), 'r').readlines()
        for v in _vel:
            if '#' in v:
                continue
            v = v.replace('\n', '')
            v = v.split(' ')
            cam_vel = [float(x) for x in v]
            cam_vel = np.array(cam_vel)

    # get T_actor matrix: from body to world
    _T_actor = _T_actor.split(',')
    q = np.array([
        float(_T_actor[0]),
        float(_T_actor[1]),
        float(_T_actor[2]),
        float(_T_actor[3])
    ])
    t = np.array([float(_T_actor[4]), float(_T_actor[5]), float(_T_actor[6])])
    t = np.expand_dims(t, 1)
    R_actor = SO3.from_quaternion(q).as_matrix()
    T_actor = np.concatenate((R_actor, t), axis=1)

    for frame_id in range(SEQ_LEN):
        # render rolling shutter image
        t_offset = frame_id / float(FPS)
        if FRAME_INDEX > 0 and FRAME_INDEX != frame_id:
            continue

        # render all global shutter images
        for r in range(RESOLUTION_H):
            t_r = r * READOUT_TIME + t_offset
            _T_cam = cam_vel * t_r
            R_cam = SO3.exp(_T_cam[:3]).as_matrix()
            t_cam = _T_cam[3:6] / scalingFactor
            t_cam = np.expand_dims(t_cam, 1)

            # T_cam: row0_to_row_r
            T_cam = np.concatenate((R_cam, t_cam), axis=1)
            # T_cam: row_r_to_row0
            #T_cam = T_inv(T_cam)
            # T_actor_r: from body to world
            T_actor_r = T_mul(
                T_actor, T_mul(T_cam0_to_body, T_mul(T_cam, T_body_to_cam0)))

            # convert to quanternion
            q_actor_r = SO3.from_matrix(T_actor_r[:, :3],
                                        normalize=True).to_quaternion()
            norm = math.sqrt(q_actor_r[0]**2 + q_actor_r[1]**2 +
                             q_actor_r[2]**2 + q_actor_r[3]**2)
            q_actor_r = q_actor_r / norm
            t_actor_r = T_actor_r[:, 3:4].squeeze()
            pose_actor_r = np.array([
                q_actor_r[0], q_actor_r[1], q_actor_r[2], q_actor_r[3],
                t_actor_r[0], t_actor_r[1], t_actor_r[2]
            ])

            # Move actor and render images
            MoveToPose(pose_actor_r)
            RenderImage(0, 'lit',
                        os.path.join(DATASET_FOLDER, 'tmp/' + str(r) + '.png'),
                        r)

            if RENDER_DEPTH:
                RenderImage(
                    0, 'depth',
                    os.path.join(DATASET_FOLDER, 'tmp/' + str(r) + '.exr'), r)

            # render global shutter image & depth
            if r == 0:
                RenderImage(
                    0, 'lit',
                    seq_path + '/' + str(frame_id).zfill(4) + '_gs_f.png')
                if RENDER_DEPTH:
                    RenderImage(
                        0, 'depth',
                        seq_path + '/' + str(frame_id).zfill(4) + '_gs_f.exr')
                    os.rename(
                        seq_path + '/' + str(frame_id).zfill(4) + '_gs_f.exr',
                        seq_path + '/' + str(frame_id).zfill(4) +
                        '_gs_f.depth')

            if r == RESOLUTION_H // 2:
                RenderImage(
                    0, 'lit',
                    seq_path + '/' + str(frame_id).zfill(4) + '_gs_m.png')
                if RENDER_DEPTH:
                    RenderImage(
                        0, 'depth',
                        seq_path + '/' + str(frame_id).zfill(4) + '_gs_m.exr')
                    os.rename(
                        seq_path + '/' + str(frame_id).zfill(4) + '_gs_m.exr',
                        seq_path + '/' + str(frame_id).zfill(4) +
                        '_gs_m.depth')

        # synthesize rolling shutter image
        GenerateRollingShutterImage(DATASET_FOLDER + '/tmp', RESOLUTION_H,
                                    seq_path,
                                    str(frame_id).zfill(4) + '_rs.png')
        if RENDER_DEPTH:
            GenerateRollingShutterDepth(DATASET_FOLDER + '/tmp', RESOLUTION_H,
                                        seq_path,
                                        str(frame_id).zfill(4) + '_rs.depth')