Exemple #1
0
    def test_R2A(self):
        R = test_utils.get_random_R()
        A = conversions.R2A(np.array([R]))

        np.testing.assert_almost_equal(
            conversions.R2A(np.array([R]))[0], conversions.R2A(R)
        )
        np.testing.assert_almost_equal(
            conversions.A2R(np.array([A]))[0], conversions.A2R(A)
        )

        R_test = conversions.A2R(A[0])
        for r, r_test in zip(R.flatten(), R_test.flatten()):
            self.assertAlmostEqual(r, r_test)
Exemple #2
0
def slerp(R1, R2, t):
    """
    Spherical linear interpolation (https://en.wikipedia.org/wiki/Slerp)
    between R1 and R2 with parameter t, 0 ≤ t ≤ 1
    """
    return np.dot(
        R1, conversions.A2R(t * conversions.R2A(np.dot(R1.transpose(), R2)))
    )
 def compute(cls, pose1, pose2, dt):
     assert pose1.skel == pose2.skel
     data_local = []
     data_global = []
     assert dt > constants.EPSILON
     for joint in pose1.skel.joints:
         T1 = pose1.get_transform(joint, local=True)
         T2 = pose2.get_transform(joint, local=True)
         dR, dp = conversions.T2Rp(np.dot(math.invertT(T1), T2))
         w, v = conversions.R2A(dR) / dt, dp / dt
         data_local.append(np.hstack((w, v)))
         T1 = pose1.get_transform(joint, local=False)
         T2 = pose2.get_transform(joint, local=False)
         dR, dp = conversions.T2Rp(np.dot(math.invertT(T1), T2))
         w, v = conversions.R2A(dR) / dt, dp / dt
         data_global.append(np.hstack((w, v)))
     return np.array(data_local), np.array(data_global)
Exemple #4
0
def pose_similarity(
    pose1,
    pose2,
    vel1=None,
    vel2=None,
    w_joint_pos=0.9,
    w_joint_vel=0.1,
    w_joints=None,
    apply_root_correction=True,
):
    """
    This only measure joint angle difference (i.e. root translation will
    not be considered).
    If 'apply_root_correction' is True, then pose2 will be rotated
    automatically
    in a way that its root rotation is closest to the root rotation of pose1,
    where'root_correction_axis' defines the geodesic curve.
    """
    assert pose1.skel.num_joints() == pose2.skel.num_joints()
    skel = pose1.skel
    if vel1 is not None:
        assert vel2 is not None
        assert vel1.skel.num_joints() == vel2.skel.num_joints()

    if w_joints is None:
        w_joints = np.ones(skel.num_joints())
    """ joint angle difference """
    diff_pos = 0.0
    if w_joint_pos > 0.0:
        root_idx = skel.get_index_joint(skel.root_joint)
        v_up_env = pose1.skel.v_up_env
        for j in range(skel.num_joints()):
            R1, p1 = conversions.T2Rp(pose1.get_transform(j, local=True))
            R2, p2 = conversions.T2Rp(pose2.get_transform(j, local=True))
            if apply_root_correction and j == root_idx:
                Q1 = conversions.R2Q(R1)
                Q2 = conversions.R2Q(R2)
                Q2, _ = quaternion.Q_closest(Q1, Q2, v_up_env)
                R2 = conversions.Q2R(Q2)
            # TODO: Verify if logSO3 is same as R2A
            dR = conversions.R2A(np.dot(np.transpose(R1), R2))
            diff_pos += w_joints[j] * np.dot(dR, dR)
    """ joint angular velocity difference """
    diff_vel = 0.0
    if vel1 is not None and w_joint_vel > 0.0:
        skel = vel1.skel
        for j in range(skel.num_joints()):
            dw = vel2.get_angular(j, local=True) - vel1.get_angular(j,
                                                                    local=True)
            diff_vel += w_joints[j] * np.dot(dw, dw)
    return (w_joint_pos * diff_pos +
            w_joint_vel * diff_vel) / skel.num_joints()
Exemple #5
0
def project_rotation_3D(R):
    """
    Project a 3D rotation matrix to the 3D rotation.
    It will just returns corresponding axis-angle.
    """
    return conversions.R2A(R)
def main(args):
    comp_device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    bm = BodyModel(model_type="smplh",
                   bm_path=args.body_model_file,
                   num_betas=10).to(comp_device)
    faces = c2c(bm.f)

    img_shape = (1600, 1600)
    motion = bvh.load(
        file=args.input_file,
        scale=0.5,
        v_up_skel=np.array([0.0, 1.0, 0.0]),
        v_face_skel=np.array([0.0, 0.0, 1.0]),
        v_up_env=np.array([0.0, 0.0, 1.0]),
    )
    motion = motion_ops.rotate(
        motion,
        conversions.Ax2R(conversions.deg2rad(-90)),
    )
    mv = prepare_mesh_viewer(img_shape)

    out = cv2.VideoWriter(args.video_output_file,
                          cv2.VideoWriter_fourcc(*"XVID"), 30, img_shape)

    parents = bm.kintree_table[0].long()[:21 + 1]
    parents = parents.cpu().numpy()
    dfs_order = get_dfs_order(parents)
    for frame in tqdm.tqdm(range(motion.num_frames())):
        pose = motion.get_pose_by_frame(frame)
        R, p = conversions.T2Rp(pose.data[0])
        root_orient = conversions.R2A(R)
        trans = p

        num_joints = len(pose.data) - 1
        body_model_pose_data = np.zeros(num_joints * 3)
        for motion_joint, amass_joint in enumerate(dfs_order):
            # motion_joint is idx of joint in Motion class order
            # amass_joint is idx of joint in AMASS skeleton
            if amass_joint == 0:
                continue
            pose_idx = amass_joint - 1
            # Convert rotation matrix to axis angle
            axis_angles = conversions.R2A(
                conversions.T2R(pose.data[motion_joint]))
            body_model_pose_data[pose_idx * 3:pose_idx * 3 + 3] = axis_angles

        pose_data_t = (
            torch.Tensor(body_model_pose_data).to(comp_device).unsqueeze(0))
        root_orient_t = torch.Tensor(root_orient).to(comp_device).unsqueeze(0)
        trans_t = torch.Tensor(trans).to(comp_device).unsqueeze(0)
        body = bm(pose_body=pose_data_t,
                  root_orient=root_orient_t,
                  trans=trans_t)

        body_mesh = trimesh.Trimesh(
            vertices=c2c(body.v[0]),
            faces=faces,
            vertex_colors=np.tile(colors["grey"], (6890, 1)),
        )
        # TODO: Add floor trimesh to the scene to display the ground plane
        mv.set_static_meshes([body_mesh])
        body_image = mv.render()
        img = body_image.astype(np.uint8)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        out.write(img)
    out.release()