Exemplo n.º 1
0
    def normalize_single(cls, poses):
        batch_size = poses.shape[0]
        device = poses.device

        # Shift data such that the weighted mean of all joints is at (0, 0, 0).
        # The weights are heuristically defined as follows.
        weights = torch.zeros(poses.shape[1], 1, device=device)
        weights[[0, 2, 3, 4]] = 0.2  # W, IMCP, MMCP, RMCP
        weights[[1, 5]] = 0.1  # TMCP, PMCP

        weighted_means = (weights * poses).sum(dim=1)
        shifts = - weighted_means
        shifted_poses = poses + shifts.view(batch_size, 1, -1)

        # Scale data such that the average bone length of the pose is 1.0.
        bone_lengths = pose_features.lengths_of_all_bones(poses)
        mean_bone_length = bone_lengths.view(batch_size, -1).mean(dim=1)
        scalings = 1.0 / mean_bone_length.view(batch_size, 1, 1)
        scaled_poses = shifted_poses * scalings

        # Rotate the pose such that the normal of the hand palm points into negative z-direction.
        # The normal is approximated by computing the average of all pair wise cross products of the
        # vectors between the origin and the palm joints. The x-axis direction equals the average
        # vector from origin to IMCP, MMCP, RMCP and PMCP (in the previously defined plain).
        # Remember that after the above shifting operation, the origin lies in the palm plane.
        z_directions = cls._compute_z_direction(scaled_poses)
        plane_alignment_rot_mats = cls._compute_plane_alignment_rot_mat(z_directions)
        rotated_poses_t = torch.bmm(plane_alignment_rot_mats, scaled_poses.transpose(1, 2))

        x_directions_2d = rotated_poses_t[:, :2, 2:5].mean(dim=2)
        inplane_rot_mats = cls._compute_inplane_rot_mat(x_directions_2d)
        rotated_poses = torch.bmm(inplane_rot_mats, rotated_poses_t).transpose(1, 2)
        rotations = torch.bmm(inplane_rot_mats, plane_alignment_rot_mats)

        return rotated_poses, {'shift': shifts, 'scaling': scalings, 'rotation': rotations}
def test_bone_lengths_of_all_fingers():
    true_middle_finger_bone_lengths = torch.tensor([1.0, 1.0, 1.0, 3.0])

    true_lengths_batch = torch.zeros(batch_size, 5, 4)
    true_lengths_batch[:, 2] = true_middle_finger_bone_lengths

    lengths_batch = pose_features.lengths_of_all_bones(poses_a)

    assert true_lengths_batch.is_same_size(lengths_batch)
    assert torch.allclose(true_lengths_batch, lengths_batch)
Exemplo n.º 3
0
def bone_length_error(poses, labels):
    bone_lengths = pose_features.lengths_of_all_bones(poses)
    true_bone_lengths = pose_features.lengths_of_all_bones(labels)
    return (bone_lengths - true_bone_lengths).reshape(poses.shape[0], -1)
Exemplo n.º 4
0
                data = errors.distance_error(all_data.poses, all_data.labels)
            elif error_name == 'bone_length':
                data = errors.bone_length_error(all_data.poses,
                                                all_data.labels)
            elif error_name == 'dist_bone_cat':
                distance_errors = errors.distance_error(
                    all_data.poses, all_data.labels)
                bone_length_errors = errors.bone_length_error(
                    all_data.poses, all_data.labels)
                data = torch.cat((distance_errors, bone_length_errors), dim=1)
            elif error_name == 'poses_only':
                data = all_data.poses.reshape(-1, 63)
            elif error_name == 'combined':
                data = combined_errors(all_data.poses, all_data.labels)
            elif error_name == 'shape':
                data = pose_features.lengths_of_all_bones(
                    all_data.labels).reshape(-1, 20)
            elif error_name == 'dist_disp':
                distance_errors = errors.distance_error(
                    all_data.poses, all_data.labels)
                disparities = np.load(
                    os.path.join('results', dataset_name + '_disparities.npy'))
                disparities = disparities.reshape(-1, 1)
                data = torch.cat(
                    (distance_errors, torch.from_numpy(disparities).type(
                        torch.float32)),
                    dim=1)
            else:
                raise ValueError(
                    'Unknown error function name: {}'.format(error_name))

            data = data / torch.mean(data, dim=0)
Exemplo n.º 5
0
def cross_proportion_matrix(poses):
    bone_lengths = pose_features.lengths_of_all_bones(poses).reshape(
        poses.shape[0], -1)
    inv_bone_lengths = 1.0 / bone_lengths
    proportion_matrix = batch_outer(bone_lengths, inv_bone_lengths)
    return proportion_matrix