Exemple #1
0
def predict_skeleton(input_data, vox, root_pred_net, bone_pred_net, mesh_filename):
    """
    Predict skeleton structure based on joints
    :param input_data: wrapped data
    :param vox: voxelized mesh
    :param root_pred_net: network to predict root
    :param bone_pred_net: network to predict pairwise connectivity cost
    :param mesh_filename: meshfilename for debugging
    :return: predicted skeleton structure
    """
    root_id = getInitId(input_data, root_pred_net)
    pred_joints = input_data.y[:input_data.num_joint[0]].data.cpu().numpy()

    with torch.no_grad():
        connect_prob, _ = bone_pred_net(input_data)
        connect_prob = torch.sigmoid(connect_prob)
    pair_idx = input_data.pairs.long().data.cpu().numpy()
    prob_matrix = np.zeros((data.num_joint[0], data.num_joint[0]))
    prob_matrix[pair_idx[:, 0], pair_idx[:, 1]] = connect_prob.data.cpu().numpy().squeeze()
    prob_matrix = prob_matrix + prob_matrix.transpose()
    cost_matrix = -np.log(prob_matrix + 1e-10)
    cost_matrix = increase_cost_for_outside_bone(cost_matrix, pred_joints, vox)

    pred_skel = Info()
    parent, key = primMST_symmetry(cost_matrix, root_id, pred_joints)
    for i in range(len(parent)):
        if parent[i] == -1:
            pred_skel.root = TreeNode('root', tuple(pred_joints[i]))
            break
    loadSkel_recur(pred_skel.root, i, None, pred_joints, parent)
    pred_skel.joint_pos = pred_skel.get_joint_dict()
    #show_mesh_vox(mesh_filename, vox, pred_skel.root)
    img = show_obj_skel(mesh_filename, pred_skel.root)
    return pred_skel
Exemple #2
0
def tranfer_to_ori_mesh(filename_ori, filename_remesh, pred_rig):
    """
    convert the predicted rig of remeshed model to the rig of the original model.
    Just assign skinning weight based on nearest neighbor
    :param filename_ori: original mesh filename
    :param filename_remesh: remeshed mesh filename
    :param pred_rig: predicted rig
    :return: predicted rig for original mesh
    """
    mesh_remesh = o3d.io.read_triangle_mesh(filename_remesh)
    mesh_ori = o3d.io.read_triangle_mesh(filename_ori)
    tranfer_rig = Info()

    vert_remesh = np.asarray(mesh_remesh.vertices)
    vert_ori = np.asarray(mesh_ori.vertices)

    vertice_distance = np.sqrt(
        np.sum((vert_ori[np.newaxis, ...] - vert_remesh[:, np.newaxis, :])**2,
               axis=2))
    vertice_raw_id = np.argmin(
        vertice_distance, axis=0
    )  # nearest vertex id on the fixed mesh for each vertex on the remeshed mesh

    tranfer_rig.root = pred_rig.root
    tranfer_rig.joint_pos = pred_rig.joint_pos
    new_skin = []
    for v in range(len(vert_ori)):
        skin_v = [v]
        v_nn = vertice_raw_id[v]
        skin_v += pred_rig.joint_skin[v_nn][1:]
        new_skin.append(skin_v)
    tranfer_rig.joint_skin = new_skin
    return tranfer_rig
Exemple #3
0
    def execute(self, context):
        wm = context.window_manager
        if not os.path.isfile(wm.brignet_skel_path):
            return {'CANCELLED'}

        from utils.rig_parser import Info
        skel_info = Info(filename=wm.brignet_skel_path)

        if os.path.isfile(wm.brignet_obj_path):
            bpy.ops.import_scene.obj(filepath=wm.brignet_obj_path, use_edges=True, use_smooth_groups=True,
                                     use_groups_as_vgroups=False, use_image_search=True, split_mode='OFF',
                                     global_clight_size=0, axis_forward='-Z', axis_up='Y')

            mesh_obj = context.selected_objects[0]
        else:
            mesh_obj = None

        ArmatureGenerator(skel_info, mesh_obj).generate()
        return {'FINISHED'}
Exemple #4
0
def shoot_rays(mesh, origins, ray_dir, debug=False, model_id=None):
    '''
    shoot rays and record the first hit distance, as well as all vertices on the hit faces.
    :param mesh: input mesh (trimesh)
    :param origins: origin of rays
    :param ray_dir: direction of rays
    :return: all vertices indices on the hit face, the distance of first hit for each ray.
    '''
    global ray_per_sample
    RayMeshIntersector = trimesh.ray.ray_triangle.RayMeshIntersector(mesh)
    locations, index_ray, index_tri = RayMeshIntersector.intersects_location(origins, ray_dir + 1e-15)
    locations_per_ray = []
    index_tri_per_ray = []
    for i in range(len(ray_dir)):
        locations_per_ray.append(locations[index_ray == i])
        index_tri_per_ray.append(index_tri[index_ray == i])
    all_hit_pos = []
    all_hit_tri = []
    all_hit_ori = []
    all_hit_ori_id = []
    for ori_id in np.arange(0, len(ray_dir), ray_per_sample):
        hit_pos = []
        hit_tri = []
        hit_dist = []
        hit_ori_id = []
        for i in range(ray_per_sample):
            ray_id = int(ori_id + i)
            if len(locations_per_ray[ray_id]) > 1:
                closest_hit_id = np.argmin(np.linalg.norm(locations_per_ray[ray_id] - origins[ray_id], axis=1))
                hit_pos.append(locations_per_ray[ray_id][closest_hit_id][np.newaxis, :])
                hit_dist.append(np.linalg.norm(locations_per_ray[ray_id][closest_hit_id] - origins[ray_id]))
                hit_tri.append(index_tri_per_ray[ray_id][closest_hit_id])
                hit_ori_id.append(int(ori_id/ray_per_sample))
            elif len(locations_per_ray[ray_id]) == 1:
                hit_pos.append(locations_per_ray[ray_id])
                hit_dist.append(np.linalg.norm(locations_per_ray[ray_id][0] - origins[ray_id]))
                hit_tri.append(index_tri_per_ray[ray_id][0])
                hit_ori_id.append(int(ori_id/ray_per_sample))

        if len(hit_pos) == 0: # no hit, pick nearby faces
            hit_tri = trimesh.proximity.nearby_faces(mesh, origins[int(ori_id + 0)][np.newaxis, :])[0]
            hit_vertices = mesh.faces[hit_tri].flatten()
            hit_pos = [np.array(mesh.vertices[i])[np.newaxis, :] for i in hit_vertices]
            hit_dist = [np.linalg.norm(hit_pos[i].squeeze() - origins[int(ori_id + 0)]) for i in range(len(hit_pos))]
            hit_tri = np.repeat(hit_tri, 3)
            hit_ori_id = np.repeat(int(ori_id / ray_per_sample), len(hit_tri))

        hit_pos = np.concatenate(hit_pos, axis=0)
        hit_dist = np.array(hit_dist)
        hit_tri = np.array(hit_tri)
        hit_ori_id = np.array(hit_ori_id)
        valid_ids = np.argwhere(hit_dist < np.percentile(hit_dist, 20) * 2).squeeze(1)
        hit_pos = hit_pos[valid_ids]
        hit_dist = hit_dist[valid_ids]
        hit_tri = hit_tri[valid_ids]
        hit_ori_id = hit_ori_id[valid_ids]

        all_hit_pos.append(hit_pos)
        all_hit_tri.append(hit_tri)
        all_hit_ori_id.append(hit_ori_id)
        all_hit_ori.append(origins[int(ori_id + 0)][np.newaxis, :])

    all_hit_pos = np.concatenate(all_hit_pos, axis=0)
    all_hit_tri = np.concatenate(all_hit_tri)
    all_hit_ori_id = np.concatenate(all_hit_ori_id)
    all_hit_ori = np.concatenate(all_hit_ori, axis=0)

    if debug:
        import open3d as o3d
        from utils.vis_utils import find_lines_from_tree, drawSphere
        mesh_filename = 'dataset_folder/obj/{:d}.obj'.format(model_id)
        skel = Info('dataset_folder/rig_info/{:d}.txt'.format(model_id))
        # show mesh
        mesh_o3d = o3d.io.read_triangle_mesh(mesh_filename)
        mesh_ls = o3d.geometry.LineSet.create_from_triangle_mesh(mesh_o3d)
        mesh_ls.colors = o3d.utility.Vector3dVector([[0.8, 0.8, 0.8] for i in range(len(mesh_ls.lines))])
        # show skeleton
        line_list_skel = []
        joint_pos_list = []
        find_lines_from_tree(skel.root, line_list_skel, joint_pos_list)
        line_set_skel = o3d.geometry.LineSet()
        line_set_skel.points = o3d.utility.Vector3dVector(joint_pos_list)
        line_set_skel.lines = o3d.utility.Vector2iVector(line_list_skel)
        colors = [[1.0, 0.0, 0.0] for i in range(len(line_list_skel))]
        line_set_skel.colors = o3d.utility.Vector3dVector(colors)
        # show ray
        dpts = np.concatenate((all_hit_ori, all_hit_pos), axis=0)
        dlines = o3d.geometry.LineSet()
        dlines.points = o3d.utility.Vector3dVector(dpts)
        dlines.lines = o3d.utility.Vector2iVector([[all_hit_ori_id[i], len(all_hit_ori) + i] for i in range(len(all_hit_ori_id))])
        colors = [[0.0, 0.0, 1.0] for i in range(len(all_hit_ori_id))]
        dlines.colors = o3d.utility.Vector3dVector(colors)
        vis = o3d.visualization.Visualizer()
        vis.create_window()
        vis.add_geometry(dlines)
        vis.add_geometry(mesh_ls)
        vis.add_geometry(line_set_skel)
        this_level = skel.root.children
        while this_level:
            next_level = []
            for p_node in this_level:
                vis.add_geometry(drawSphere(p_node.pos, 0.007, color=[1.0, 0.0, 0.0]))  # [0.3, 0.1, 0.1]
                next_level += p_node.children
            this_level = next_level
        vis.run()
        vis.destroy_window()

    return all_hit_pos, all_hit_ori_id, all_hit_ori
Exemple #5
0
    end_id = int(sys.argv[2])

    ray_per_sample = 14 # number of rays shoot from each joint
    dataset_folder = "/media/zhanxu/4T1/ModelResource_Dataset_rt/"

    obj_folder = os.path.join(dataset_folder, "obj/")
    remesh_obj_folder = os.path.join(dataset_folder, "obj_remesh/")
    info_folder = os.path.join(dataset_folder, "rig_info/")
    res_folder = os.path.join(dataset_folder, "pretrain_attention/")
    model_list = np.loadtxt(os.path.join(dataset_folder, "model_list.txt"), dtype=int)

    for model_id in model_list[start_id:end_id]:
        print(model_id)
        mesh_remesh = trimesh.load(os.path.join(remesh_obj_folder, '{:d}.obj'.format(model_id)))
        mesh_ori = trimesh.load(os.path.join(obj_folder, '{:d}.obj'.format(model_id)))
        rig_info = Info(os.path.join(info_folder, '{:d}.txt'.format(model_id)))

        # pick one mesh with fewer faces to speed up
        if len(mesh_remesh.faces) < len(mesh_ori.faces):
            mesh = mesh_remesh
        else:
            mesh = mesh_ori
        trimesh.repair.fix_normals(mesh)

        origins, dirs = form_rays(rig_info)
        hit_pos, all_hit_ori_id, all_hit_ori = shoot_rays(mesh, origins, dirs, debug=False, model_id=model_id)

        # A problem with trimesh is the vertice order is not the same as obj file. Here we read the obj again with open3d.
        mesh = o3d.io.read_triangle_mesh(os.path.join(remesh_obj_folder, '{:d}.obj'.format(model_id)))
        pts = np.array(mesh.vertices)
        dist = np.sqrt(np.sum((pts[np.newaxis, ...] - hit_pos[:, np.newaxis, :])**2, axis=2))
Exemple #6
0
def genDataset(process_id):
    global dataset_folder
    print("process ID {:d}".format(process_id))
    if process_id < 6:
        model_list = np.loadtxt(os.path.join(dataset_folder,
                                             'train_final.txt'),
                                dtype=int)
        model_list = model_list[365 * process_id:365 * (process_id + 1)]
        split_name = 'train'
    elif process_id == 6:
        model_list = np.loadtxt(os.path.join(dataset_folder, 'val_final.txt'),
                                dtype=int)
        split_name = 'val'
    elif process_id == 7:
        model_list = np.loadtxt(os.path.join(dataset_folder, 'test_final.txt'),
                                dtype=int)
        split_name = 'test'

    mkdir_p(os.path.join(dataset_folder, split_name))
    for model_id in model_list:
        remeshed_obj_filename = os.path.join(
            dataset_folder, 'obj_remesh/{:d}.obj'.format(model_id))
        info_filename = os.path.join(
            dataset_folder, 'rig_info_remesh/{:d}.txt'.format(model_id))
        remeshed_obj = o3d.io.read_triangle_mesh(remeshed_obj_filename)
        remesh_obj_v = np.asarray(remeshed_obj.vertices)
        remesh_obj_vn = np.asarray(remeshed_obj.vertex_normals)
        remesh_obj_f = np.asarray(remeshed_obj.triangles)
        rig_info = Info(info_filename)

        #vertices
        vert_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_v.txt'.format(split_name, model_id))
        input_feature = np.concatenate((remesh_obj_v, remesh_obj_vn), axis=1)
        np.savetxt(vert_filename, input_feature, fmt='%.6f')

        #topology edges
        edge_index = get_tpl_edges(remesh_obj_v, remesh_obj_f)
        graph_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_tpl_e.txt'.format(split_name, model_id))
        np.savetxt(graph_filename, edge_index, fmt='%d')

        # geodesic_edges
        surface_geodesic = calc_surface_geodesic(remeshed_obj)
        edge_index = get_geo_edges(surface_geodesic, remesh_obj_v)
        graph_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_geo_e.txt'.format(split_name, model_id))
        np.savetxt(graph_filename, edge_index, fmt='%d')

        # joints
        joint_pos = rig_info.get_joint_dict()
        joint_name_list = list(joint_pos.keys())
        joint_pos_list = list(joint_pos.values())
        joint_pos_list = [np.array(i) for i in joint_pos_list]
        adjacent_matrix = rig_info.adjacent_matrix()
        joint_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_j.txt'.format(split_name, model_id))
        adj_filename = os.path.join(
            dataset_folder, '{:s}/{:d}_adj.txt'.format(split_name, model_id))
        np.savetxt(adj_filename, adjacent_matrix, fmt='%d')
        np.savetxt(joint_filename, np.array(joint_pos_list), fmt='%.6f')

        # pre_trained attn
        shutil.copyfile(
            os.path.join(dataset_folder,
                         'pretrain_attention/{:d}.txt'.format(model_id)),
            os.path.join(dataset_folder,
                         '{:s}/{:d}_attn.txt'.format(split_name, model_id)))

        # voxel
        shutil.copyfile(
            os.path.join(dataset_folder, 'vox/{:d}.binvox'.format(model_id)),
            os.path.join(dataset_folder,
                         '{:s}/{:d}.binvox'.format(split_name, model_id)))

        #skinning information
        num_nearest_bone = 5
        geo_dist = np.load(
            os.path.join(
                dataset_folder,
                "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                    model_id)))
        bone_pos, bone_names, bone_isleaf = get_bones(rig_info)

        input_samples = []  # mesh_vertex_id, (bone_id, 1 / D_g, is_leaf) * N
        ground_truth_labels = []  # w_1, w_2, ..., w_N
        for vert_remesh_id in range(len(remesh_obj_v)):
            this_sample = [vert_remesh_id]
            this_label = []
            skin = rig_info.joint_skin[vert_remesh_id]
            skin_w = {}
            for i in np.arange(1, len(skin), 2):
                skin_w[skin[i]] = float(skin[i + 1])
            bone_id_near_to_far = np.argsort(geo_dist[vert_remesh_id, :])
            for i in range(num_nearest_bone):
                if i >= len(bone_id_near_to_far):
                    this_sample += [-1, 0, 0]
                    this_label.append(0.0)
                    continue
                bone_id = bone_id_near_to_far[i]
                this_sample.append(bone_id)
                this_sample.append(1.0 /
                                   (geo_dist[vert_remesh_id, bone_id] + 1e-10))
                this_sample.append(bone_isleaf[bone_id])
                start_joint_name = bone_names[bone_id][0]
                if start_joint_name in skin_w:
                    this_label.append(skin_w[start_joint_name])
                    del skin_w[start_joint_name]
                else:
                    this_label.append(0.0)

            input_samples.append(this_sample)
            ground_truth_labels.append(this_label)

        with open(
                os.path.join(dataset_folder, '{:s}/{:d}_skin.txt').format(
                    split_name, model_id), 'w') as fout:
            for i in range(len(bone_pos)):
                fout.write('bones {:s} {:s} {:.6f} {:.6f} {:.6f} '
                           '{:.6f} {:.6f} {:.6f}\n'.format(
                               bone_names[i][0], bone_names[i][1],
                               bone_pos[i, 0], bone_pos[i, 1], bone_pos[i, 2],
                               bone_pos[i, 3], bone_pos[i, 4], bone_pos[i, 5]))
            for i in range(len(input_samples)):
                fout.write('bind {:d} '.format(input_samples[i][0]))
                for j in np.arange(1, len(input_samples[i]), 3):
                    fout.write('{:d} {:.6f} {:d} '.format(
                        input_samples[i][j], input_samples[i][j + 1],
                        input_samples[i][j + 2]))
                fout.write('\n')
            for i in range(len(ground_truth_labels)):
                fout.write('influence ')
                for j in range(len(ground_truth_labels[i])):
                    fout.write('{:.3f} '.format(ground_truth_labels[i][j]))
                fout.write('\n')
Exemple #7
0
def one_process(dataset_folder, start_id, end_id):
    model_list = np.loadtxt(os.path.join(dataset_folder, 'model_list.txt'),
                            dtype=int)
    model_list = model_list[start_id:end_id]
    remesh_obj_folder = os.path.join(dataset_folder, "obj_remesh")
    mkdir_p(os.path.join(dataset_folder, "volumetric_geodesic/"))

    for model_id in model_list:
        print(model_id)
        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                        model_id))):
            continue
        remeshed_obj_filename = os.path.join(
            dataset_folder, 'obj_remesh/{:d}.obj'.format(model_id))
        ori_obj_filename = os.path.join(dataset_folder,
                                        'obj/{:d}.obj'.format(model_id))
        info_filename = os.path.join(dataset_folder,
                                     'rig_info/{:d}.txt'.format(model_id))

        pts = np.array(
            o3d.io.read_triangle_mesh(
                os.path.join(remesh_obj_folder,
                             '{:d}.obj'.format(model_id))).vertices)

        mesh_remesh = trimesh.load(remeshed_obj_filename)
        mesh_ori = trimesh.load(ori_obj_filename)
        rig_info = Info(info_filename)
        bones, bone_name, _ = get_bones(rig_info)
        origins, ends, pts_bone_dist = pts2line(pts, bones)

        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_raw.npy".format(
                        model_id))):
            pts_bone_visibility = np.load(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_raw.npy".format(
                        model_id)))
        else:
            # pick one mesh with fewer faces to speed up
            if len(mesh_remesh.faces) < len(mesh_ori.faces):
                trimesh.repair.fix_normals(mesh_remesh)
                pts_bone_visibility = calc_pts2bone_visible_mat(
                    mesh_remesh, origins, ends)
            else:
                trimesh.repair.fix_normals(mesh_ori)
                pts_bone_visibility = calc_pts2bone_visible_mat(
                    mesh_ori, origins, ends)
            pts_bone_visibility = pts_bone_visibility.reshape(
                len(bones), len(pts)).transpose()
            #np.save(os.path.join(dataset_folder, "volumetric_geodesic/{:d}_visibility_raw.npy".format(model_id)), pts_bone_visibility)
        pts_bone_dist = pts_bone_dist.reshape(len(bones), len(pts)).transpose()

        # remove visible points which are too far
        if os.path.exists(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_filtered.npy".format(
                        model_id))):
            pts_bone_visibility = np.load(
                os.path.join(
                    dataset_folder,
                    "volumetric_geodesic/{:d}_visibility_filtered.npy".format(
                        model_id)))
        else:
            for b in range(pts_bone_visibility.shape[1]):
                visible_pts = np.argwhere(
                    pts_bone_visibility[:, b] == 1).squeeze(1)
                if len(visible_pts) == 0:
                    continue
                threshold_b = np.percentile(pts_bone_dist[visible_pts, b], 15)
                pts_bone_visibility[pts_bone_dist[:, b] > 1.3 * threshold_b,
                                    b] = False
            #np.save(os.path.join(dataset_folder, "volumetric_geodesic/{:d}_visibility_filtered.npy".format(model_id)), pts_bone_visibility)

        mesh = o3d.io.read_triangle_mesh(
            os.path.join(remesh_obj_folder, '{:d}.obj'.format(model_id)))
        surface_geodesic = calc_surface_geodesic(mesh)

        visible_matrix = np.zeros(pts_bone_visibility.shape)
        visible_matrix[np.where(
            pts_bone_visibility == 1)] = pts_bone_dist[np.where(
                pts_bone_visibility == 1)]
        euc_dist = np.sqrt(
            np.sum((pts[np.newaxis, ...] - pts[:, np.newaxis, :])**2, axis=2))
        for c in range(visible_matrix.shape[1]):
            unvisible_pts = np.argwhere(pts_bone_visibility[:,
                                                            c] == 0).squeeze(1)
            visible_pts = np.argwhere(pts_bone_visibility[:,
                                                          c] == 1).squeeze(1)
            if len(visible_pts) == 0:
                visible_matrix[:, c] = pts_bone_dist[:, c]
                continue
            for r in unvisible_pts:
                dist1 = np.min(surface_geodesic[r, visible_pts])
                nn_visible = visible_pts[np.argmin(
                    surface_geodesic[r, visible_pts])]
                if np.isinf(dist1):
                    visible_matrix[r, c] = 8.0 + pts_bone_dist[r, c]
                else:
                    visible_matrix[r,
                                   c] = dist1 + visible_matrix[nn_visible, c]
        np.save(
            os.path.join(
                dataset_folder,
                "volumetric_geodesic/{:d}_volumetric_geo.npy".format(
                    model_id)), visible_matrix)
Exemple #8
0
def output_rigging(skel_name, attachment, output_folder, name):
    skel = Info(skel_name)
    skel_new = assemble_skel_skin(skel, attachment)
    skel_new.save(os.path.join(output_folder, str(name) + '_rig.txt'))
    '''joint_pos = skel_new.get_joint_pos()
Exemple #9
0
    info.split('_info')[0] for info in info_list if info.endswith('_info.npy')
])
prediction_method = 'pred_heatmap_joint_pos_mask'

for i, character in enumerate(characters_list):
    # if i != args.character_idx:
    #     continue

    for motion in motions_list[args.start:args.last]:
        joint_pos_file = os.path.join(joint_log, 'test', character,
                                      '%s_joint.npy' % (motion))
        joint_pos = np.load(joint_pos_file, allow_pickle=True).item()
        joint_result = joint_pos[prediction_method]

        # save skeleton
        pred_skel = Info()
        nodes = []
        for joint_index, joint_pos in enumerate(joint_result):
            nodes.append(TreeNode(name=joint_name[joint_index], pos=joint_pos))

        pred_skel.root = nodes[0]
        for parent, children in enumerate(tree):
            for child in children:
                nodes[parent].children.append(nodes[child])
                nodes[child].parent = nodes[parent]

        # calculate volumetric geodesic distance
        bones, _, _ = get_bones(pred_skel)
        mesh_filename = os.path.join(data_dir, 'objs',
                                     character + '/' + motion + '.obj')
        # mesh_filename = os.path.join(data_dir, 'test_objs', character + '_' + motion + '.obj')