Beispiel #1
0
def genDataset(process_id):
    global dataset_folder
    print("process ID {:d}".format(process_id))
    if process_id < 6:
        model_list = np.loadtxt(os.path.join(dataset_folder,
                                             'train_final.txt'),
                                dtype=int)
        model_list = model_list[365 * process_id:365 * (process_id + 1)]
        split_name = 'train'
    elif process_id == 6:
        model_list = np.loadtxt(os.path.join(dataset_folder, 'val_final.txt'),
                                dtype=int)
        split_name = 'val'
    elif process_id == 7:
        model_list = np.loadtxt(os.path.join(dataset_folder, 'test_final.txt'),
                                dtype=int)
        split_name = 'test'

    mkdir_p(os.path.join(dataset_folder, split_name))
    for model_id in model_list:
        remeshed_obj_filename = os.path.join(
            dataset_folder, 'obj_remesh/{:d}.obj'.format(model_id))
        info_filename = os.path.join(
            dataset_folder, 'rig_info_remesh/{:d}.txt'.format(model_id))
        remeshed_obj = o3d.io.read_triangle_mesh(remeshed_obj_filename)
        remesh_obj_v = np.asarray(remeshed_obj.vertices)
        remesh_obj_vn = np.asarray(remeshed_obj.vertex_normals)
        remesh_obj_f = np.asarray(remeshed_obj.triangles)
        rig_info = Info(info_filename)

        #vertices
        vert_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_v.txt'.format(split_name, model_id))
        input_feature = np.concatenate((remesh_obj_v, remesh_obj_vn), axis=1)
        np.savetxt(vert_filename, input_feature, fmt='%.6f')

        #topology edges
        edge_index = get_tpl_edges(remesh_obj_v, remesh_obj_f)
        graph_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_tpl_e.txt'.format(split_name, model_id))
        np.savetxt(graph_filename, edge_index, fmt='%d')

        # geodesic_edges
        surface_geodesic = calc_surface_geodesic(remeshed_obj)
        edge_index = get_geo_edges(surface_geodesic, remesh_obj_v)
        graph_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_geo_e.txt'.format(split_name, model_id))
        np.savetxt(graph_filename, edge_index, fmt='%d')

        # joints
        joint_pos = rig_info.get_joint_dict()
        joint_name_list = list(joint_pos.keys())
        joint_pos_list = list(joint_pos.values())
        joint_pos_list = [np.array(i) for i in joint_pos_list]
        adjacent_matrix = rig_info.adjacent_matrix()
        joint_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_j.txt'.format(split_name, model_id))
        adj_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_adj.txt'.format(split_name, model_id))
        np.savetxt(adj_filename, adjacent_matrix, fmt='%d')
        np.savetxt(joint_filename, np.array(joint_pos_list), fmt='%.6f')

        # pre_trained attn
        shutil.copyfile(
            os.path.join(dataset_folder,
                         'pretrain_attention/{:d}.txt'.format(model_id)),
            os.path.join(dataset_folder,
                         '{:s}/{:d}_attn.txt'.format(split_name, model_id)))

        # voxel
        shutil.copyfile(
            os.path.join(dataset_folder, 'vox/{:d}.binvox'.format(model_id)),
            os.path.join(dataset_folder,
                         '{:s}/{:d}.binvox'.format(split_name, model_id)))

        #skinning information
        num_nearest_bone = 5
        geo_dist = np.load(
            os.path.join(
                dataset_folder,
                "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                    model_id)))
        bone_pos, bone_names, bone_isleaf = get_bones(rig_info)

        input_samples = []  # mesh_vertex_id, (bone_id, 1 / D_g, is_leaf) * N
        ground_truth_labels = []  # w_1, w_2, ..., w_N
        for vert_remesh_id in range(len(remesh_obj_v)):
            this_sample = [vert_remesh_id]
            this_label = []
            skin = rig_info.joint_skin[vert_remesh_id]
            skin_w = {}
            for i in np.arange(1, len(skin), 2):
                skin_w[skin[i]] = float(skin[i + 1])
            bone_id_near_to_far = np.argsort(geo_dist[vert_remesh_id, :])
            for i in range(num_nearest_bone):
                if i >= len(bone_id_near_to_far):
                    this_sample += [-1, 0, 0]
                    this_label.append(0.0)
                    continue
                bone_id = bone_id_near_to_far[i]
                this_sample.append(bone_id)
                this_sample.append(1.0 /
                                   (geo_dist[vert_remesh_id, bone_id] + 1e-10))
                this_sample.append(bone_isleaf[bone_id])
                start_joint_name = bone_names[bone_id][0]
                if start_joint_name in skin_w:
                    this_label.append(skin_w[start_joint_name])
                    del skin_w[start_joint_name]
                else:
                    this_label.append(0.0)

            input_samples.append(this_sample)
            ground_truth_labels.append(this_label)

        with open(
                os.path.join(dataset_folder, '{:s}/{:d}_skin.txt').format(
                    split_name, model_id), 'w') as fout:
            for i in range(len(bone_pos)):
                fout.write('bones {:s} {:s} {:.6f} {:.6f} {:.6f} '
                           '{:.6f} {:.6f} {:.6f}\n'.format(
                               bone_names[i][0], bone_names[i][1],
                               bone_pos[i, 0], bone_pos[i, 1], bone_pos[i, 2],
                               bone_pos[i, 3], bone_pos[i, 4], bone_pos[i, 5]))
            for i in range(len(input_samples)):
                fout.write('bind {:d} '.format(input_samples[i][0]))
                for j in np.arange(1, len(input_samples[i]), 3):
                    fout.write('{:d} {:.6f} {:d} '.format(
                        input_samples[i][j], input_samples[i][j + 1],
                        input_samples[i][j + 2]))
                fout.write('\n')
            for i in range(len(ground_truth_labels)):
                fout.write('influence ')
                for j in range(len(ground_truth_labels[i])):
                    fout.write('{:.3f} '.format(ground_truth_labels[i][j]))
                fout.write('\n')
Beispiel #2
0
def predict_skinning(input_data,
                     pred_skel,
                     skin_pred_net,
                     surface_geodesic,
                     mesh_filename,
                     subsampling=False):
    """
    predict skinning
    :param input_data: wrapped input data
    :param pred_skel: predicted skeleton
    :param skin_pred_net: network to predict skinning weights
    :param surface_geodesic: geodesic distance matrix of all vertices
    :param mesh_filename: mesh filename
    :return: predicted rig with skinning weights information
    """
    global device, output_folder
    num_nearest_bone = 5
    bones, bone_names, bone_isleaf = get_bones(pred_skel)
    mesh_v = input_data.pos.data.cpu().numpy()
    print(
        "     calculating volumetric geodesic distance from vertices to bone. This step takes some time..."
    )
    geo_dist = calc_geodesic_matrix(bones,
                                    mesh_v,
                                    surface_geodesic,
                                    mesh_filename,
                                    subsampling=subsampling)
    input_samples = []  # joint_pos (x, y, z), (bone_id, 1/D)*5
    loss_mask = []
    skin_nn = []
    for v_id in range(len(mesh_v)):
        geo_dist_v = geo_dist[v_id]
        bone_id_near_to_far = np.argsort(geo_dist_v)
        this_sample = []
        this_nn = []
        this_mask = []
        for i in range(num_nearest_bone):
            if i >= len(bones):
                this_sample += bones[bone_id_near_to_far[0]].tolist()
                this_sample.append(
                    1.0 / (geo_dist_v[bone_id_near_to_far[0]] + 1e-10))
                this_sample.append(bone_isleaf[bone_id_near_to_far[0]])
                this_nn.append(0)
                this_mask.append(0)
            else:
                skel_bone_id = bone_id_near_to_far[i]
                this_sample += bones[skel_bone_id].tolist()
                this_sample.append(1.0 / (geo_dist_v[skel_bone_id] + 1e-10))
                this_sample.append(bone_isleaf[skel_bone_id])
                this_nn.append(skel_bone_id)
                this_mask.append(1)
        input_samples.append(np.array(this_sample)[np.newaxis, :])
        skin_nn.append(np.array(this_nn)[np.newaxis, :])
        loss_mask.append(np.array(this_mask)[np.newaxis, :])

    skin_input = np.concatenate(input_samples, axis=0)
    loss_mask = np.concatenate(loss_mask, axis=0)
    skin_nn = np.concatenate(skin_nn, axis=0)
    skin_input = torch.from_numpy(skin_input).float()
    input_data.skin_input = skin_input
    input_data.to(device)

    skin_pred = skin_pred_net(data)
    skin_pred = torch.softmax(skin_pred, dim=1)
    skin_pred = skin_pred.data.cpu().numpy()
    skin_pred = skin_pred * loss_mask

    skin_nn = skin_nn[:, 0:num_nearest_bone]
    skin_pred_full = np.zeros((len(skin_pred), len(bone_names)))
    for v in range(len(skin_pred)):
        for nn_id in range(len(skin_nn[v, :])):
            skin_pred_full[v, skin_nn[v, nn_id]] = skin_pred[v, nn_id]
    print("     filtering skinning prediction")
    tpl_e = input_data.tpl_edge_index.data.cpu().numpy()
    skin_pred_full = post_filter(skin_pred_full, tpl_e, num_ring=1)
    skin_pred_full[skin_pred_full <
                   np.max(skin_pred_full, axis=1, keepdims=True) * 0.35] = 0.0
    skin_pred_full = skin_pred_full / (
        skin_pred_full.sum(axis=1, keepdims=True) + 1e-10)
    skel_res = assemble_skel_skin(pred_skel, skin_pred_full)
    return skel_res
Beispiel #3
0
def one_process(dataset_folder, start_id, end_id):
    model_list = np.loadtxt(os.path.join(dataset_folder, 'model_list.txt'),
                            dtype=int)
    model_list = model_list[start_id:end_id]
    remesh_obj_folder = os.path.join(dataset_folder, "obj_remesh")
    mkdir_p(os.path.join(dataset_folder, "volumetric_geodesic/"))

    for model_id in model_list:
        print(model_id)
        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                        model_id))):
            continue
        remeshed_obj_filename = os.path.join(
            dataset_folder, 'obj_remesh/{:d}.obj'.format(model_id))
        ori_obj_filename = os.path.join(dataset_folder,
                                        'obj/{:d}.obj'.format(model_id))
        info_filename = os.path.join(dataset_folder,
                                     'rig_info/{:d}.txt'.format(model_id))

        pts = np.array(
            o3d.io.read_triangle_mesh(
                os.path.join(remesh_obj_folder,
                             '{:d}.obj'.format(model_id))).vertices)

        mesh_remesh = trimesh.load(remeshed_obj_filename)
        mesh_ori = trimesh.load(ori_obj_filename)
        rig_info = Info(info_filename)
        bones, bone_name, _ = get_bones(rig_info)
        origins, ends, pts_bone_dist = pts2line(pts, bones)

        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_raw.npy".format(
                        model_id))):
            pts_bone_visibility = np.load(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_raw.npy".format(
                        model_id)))
        else:
            # pick one mesh with fewer faces to speed up
            if len(mesh_remesh.faces) < len(mesh_ori.faces):
                trimesh.repair.fix_normals(mesh_remesh)
                pts_bone_visibility = calc_pts2bone_visible_mat(
                    mesh_remesh, origins, ends)
            else:
                trimesh.repair.fix_normals(mesh_ori)
                pts_bone_visibility = calc_pts2bone_visible_mat(
                    mesh_ori, origins, ends)
            pts_bone_visibility = pts_bone_visibility.reshape(
                len(bones), len(pts)).transpose()
            #np.save(os.path.join(dataset_folder, "volumetric_geodesic/{:d}_visibility_raw.npy".format(model_id)), pts_bone_visibility)
        pts_bone_dist = pts_bone_dist.reshape(len(bones), len(pts)).transpose()

        # remove visible points which are too far
        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_filtered.npy".format(
                        model_id))):
            pts_bone_visibility = np.load(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_filtered.npy".format(
                        model_id)))
        else:
            for b in range(pts_bone_visibility.shape[1]):
                visible_pts = np.argwhere(
                    pts_bone_visibility[:, b] == 1).squeeze(1)
                if len(visible_pts) == 0:
                    continue
                threshold_b = np.percentile(pts_bone_dist[visible_pts, b], 15)
                pts_bone_visibility[pts_bone_dist[:, b] > 1.3 * threshold_b,
                                    b] = False
            #np.save(os.path.join(dataset_folder, "volumetric_geodesic/{:d}_visibility_filtered.npy".format(model_id)), pts_bone_visibility)

        mesh = o3d.io.read_triangle_mesh(
            os.path.join(remesh_obj_folder, '{:d}.obj'.format(model_id)))
        surface_geodesic = calc_surface_geodesic(mesh)

        visible_matrix = np.zeros(pts_bone_visibility.shape)
        visible_matrix[np.where(
            pts_bone_visibility == 1)] = pts_bone_dist[np.where(
                pts_bone_visibility == 1)]
        euc_dist = np.sqrt(
            np.sum((pts[np.newaxis, ...] - pts[:, np.newaxis, :])**2, axis=2))
        for c in range(visible_matrix.shape[1]):
            unvisible_pts = np.argwhere(pts_bone_visibility[:,
                                                            c] == 0).squeeze(1)
            visible_pts = np.argwhere(pts_bone_visibility[:,
                                                          c] == 1).squeeze(1)
            if len(visible_pts) == 0:
                visible_matrix[:, c] = pts_bone_dist[:, c]
                continue
            for r in unvisible_pts:
                dist1 = np.min(surface_geodesic[r, visible_pts])
                nn_visible = visible_pts[np.argmin(
                    surface_geodesic[r, visible_pts])]
                if np.isinf(dist1):
                    visible_matrix[r, c] = 8.0 + pts_bone_dist[r, c]
                else:
                    visible_matrix[r,
                                   c] = dist1 + visible_matrix[nn_visible, c]
        np.save(
            os.path.join(
                dataset_folder,
                "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                    model_id)), visible_matrix)