Exemple #1
0
def generate_transform_matrices(mesh, factors):
    """Generates len(factors) meshes, each of them is scaled by factors[i] and
       computes the transformations between them.
    
    Returns:
       M: a set of meshes downsampled from mesh by a factor specified in factors.
       A: Adjacency matrix for each of the meshes
       D: Downsampling transforms between each of the meshes
       U: Upsampling transforms between each of the meshes
    """

    factors = map(lambda x: 1.0 / x, factors)
    M, A, D, U = [], [], [], []
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)

    for factor in factors:
        ds_f, ds_D = qslim_decimator_transformer(M[-1], factor=factor)
        D.append(ds_D)
        new_mesh_v = ds_D.dot(M[-1].v)
        new_mesh = Mesh(v=new_mesh_v, f=ds_f)
        M.append(new_mesh)
        A.append(get_vert_connectivity(new_mesh.v, new_mesh.f))
        U.append(setup_deformation_transfer(M[-1], M[-2]))

    return M, A, D, U
Exemple #2
0
def SecondFundamentalForm(v, f):
    from chumpy import hstack, vstack
    from chumpy.linalg import Pinv
    nbrs = MatVecMult(FirstEdgesMtx(v, f, want_big=True), v.ravel()).reshape(
        (-1, 3))

    b0 = VertNormals(f=f, v=v)
    b1 = NormalizedNx3(CrossProduct(b0, nbrs - v)).reshape((-1, 3))
    b2 = NormalizedNx3(CrossProduct(b0, b1)).reshape((-1, 3))

    cnct = get_vert_connectivity(np.asarray(v), f)
    ffs = []
    for i in range(v.size / 3):
        nbrs = v[np.nonzero(np.asarray(cnct[i].todense()).ravel())[0]] - row(
            v[i])
        us = nbrs.dot(b2[i])
        vs = nbrs.dot(b1[i])
        hs = nbrs.dot(b0[i])
        coeffs = Pinv(
            hstack((col((us * .5)**2), col(us * vs), col(
                (vs * .5)**2)))).dot(hs)
        ffs.append(row(coeffs))
        # if i == 3586:
        #     import pdb; pdb.set_trace()

    ffs = vstack(ffs)
    return ffs
Exemple #3
0
def generate_transform_matrices(mesh, factors):
    factors = map(lambda x: 1.0 / x, factors)
    M, A, D, U = [], [], [], []
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)

    for factor in factors:
        ds_f, ds_D = qslim_decimator_transformer(M[-1], factor=factor)
        D.append(ds_D)
        new_mesh_v = ds_D.dot(M[-1].v)
        new_mesh = Mesh(v=new_mesh_v, f=ds_f)
        M.append(new_mesh)
        A.append(get_vert_connectivity(new_mesh.v, new_mesh.f))
        U.append(setup_deformation_transfer(M[-1], M[-2]))

    return M, A, D, U
Exemple #4
0
def generate_transform_matrices(mesh_path, factors):
    """Generates len(factors) meshes, each of them is scaled by factors[i] and
       computes the transformations between them.
    
    Returns:
       M: a set of meshes downsampled from mesh by a factor specified in factors.
       A: Adjacency matrix for each of the meshes
       D: Downsampling transforms between each of the meshes
       U: Upsampling transforms between each of the meshes
    """

    assert len(factors) == 3
    M, A, D, U = [], [], [], []

    # for mesh up up
    mesh = Mesh(filename=os.path.join(mesh_path, "smpl_mesh_up_up.obj"))
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)

    # for mesh up
    mesh = Mesh(filename=os.path.join(mesh_path, "smpl_mesh_up.obj"))
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)
    D.append(sp.eye(19019, 61718))
    U.append(sp.load_npz(os.path.join(
        mesh_path, "up_sampling_stage_two.npz")))  #saprse shape=(61718, 19019)

    # for smpl
    mesh = Mesh(filename=os.path.join(mesh_path, "smpl_mesh.obj"))
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)
    D.append(sp.eye(6890, 19019))
    U.append(sp.load_npz(os.path.join(
        mesh_path, "up_sampling_stage_one.npz")))  #sparse shape=(19019, 6890)

    # for smpl down
    mesh = Mesh(filename=os.path.join(mesh_path, "smpl_mesh_down.obj"))
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)
    D.append(
        sp.load_npz(os.path.join(
            mesh_path,
            "down_sampling_1723_6890.npz")))  #sparse shape=1723x6890
    U.append(sp.load_npz(os.path.join(
        mesh_path, "up_sampling_6890_1723.npz")))  #sparse shape=6890x1723

    return M, A, D, U
def compute_transforms_from_downsampled_meshes(down_meshes,
                                               template,
                                               sampling_factors=None):
    # A list of adjacency matrices for each downsampled mesh.
    adj_matrices = [get_vert_connectivity(template.v, template.f)]

    # Downsampling and upsampling transforms for each sampling layer.
    down_transforms = []
    up_transforms = []

    for ii in range(1, len(down_meshes)):
        down_mesh = down_meshes[ii]

        # Get an adjacency matrix.
        adj_matrix = get_vert_connectivity(down_mesh.v, down_mesh.f)
        adj_matrices.append(adj_matrix)

        # Find correspondence between downsampled meshes.
        kdtree = KDTree(down_meshes[ii - 1].v)
        idxs = kdtree.query(down_mesh.v, k=1)[1]

        # Create a downsampling matrix.
        down_transform = np.zeros(
            (down_mesh.v.shape[0], down_meshes[ii - 1].v.shape[0]))
        down_transform[[range(down_mesh.v.shape[0])], idxs.tolist()] = 1
        down_transform = sp.csr_matrix(down_transform)
        down_transforms.append(down_transform)

        # Create an upsampling matrix.
        upsampling = mesh_sampling.setup_deformation_transfer(
            down_meshes[ii], down_meshes[ii - 1])
        up_transforms.append(upsampling)

    pooling_data = {
        'factors': sampling_factors,
        'down_meshes': down_meshes[1:],
        'adj_matrices': adj_matrices,
        'down_transforms': down_transforms,
        'up_transforms': up_transforms
    }

    return pooling_data
def generate_transform_matrices_given_downsamples(mesh, downsample_directory, num_downsamples=4):
    from opendr.topology import get_vert_connectivity
    M, A, D, U, F = [], [], [], [], []
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)

    for i in range(1, num_downsamples + 1):
        #         import trimesh
        #         cur_M = trimesh.load(os.path.join(downsample_directory,'template_d{0}.obj'.format(i)))
        cur_M = Mesh(filename=os.path.join(downsample_directory, 'template_d{0}.obj'.format(i)))
        cur_D = np.zeros((cur_M.v.shape[0], M[-1].v.shape[0]))
        kd = spatial.KDTree(np.array(M[-1].v))
        for vi in range(cur_M.v.shape[0]):
            _, u = kd.query(cur_M.v[vi])
            cur_D[vi, u] = 1.0
        M.append(cur_M)
        D.append(sp.csr_matrix(cur_D))
        F.append(cur_M.f)
        A.append(get_vert_connectivity(cur_M.v, cur_M.f))
        U.append(setup_deformation_transfer(M[-1], M[-2]))

    return M, A, D, U, F
Exemple #7
0
def generate_transform_matrices(mesh, factors):

    from opendr.topology import get_vert_connectivity
    """Generates len(factors) meshes, each of them is scaled by factors[i] and
       computes the transformations between them.
    
    Returns:
       M: a set of meshes downsampled from mesh by a factor specified in factors.
       A: Adjacency matrix for each of the meshes
       D: Downsampling transforms between each of the meshes
       U: Upsampling transforms between each of the meshes
    """

    factors = map(lambda x: 1.0 / x, factors)
    M, A, D, U = [], [], [], []
    ## sergey code
    F = []
    ##
    A.append(get_vert_connectivity(mesh.v, mesh.f))
    M.append(mesh)

    i = 0
    for factor in factors:
        ds_f, ds_D = qslim_decimator_transformer(M[-1], factor=factor)
        D.append(ds_D)
        ##
        F.append(ds_f)
        ##
        new_mesh_v = ds_D.dot(M[-1].v)
        new_mesh = Mesh(v=new_mesh_v, f=ds_f)
        M.append(new_mesh)
        A.append(get_vert_connectivity(new_mesh.v, new_mesh.f))
        U.append(setup_deformation_transfer(M[-1], M[-2]))
        print('decimation %d by factor %.2f finished' % (i, factor))
        i += 1

    return M, A, D, U, F
def compute_downsampling_transforms(mesh, sampling_factors):
    # A list of downampled meshes.
    down_meshes = [mesh]

    # A list of adjacency matrices for each downsampled mesh.
    adj_matrices = [get_vert_connectivity(mesh.v, mesh.f)]

    # Downsampling and upsampling transforms for each sampling layer.
    down_transforms = []
    up_transforms = []

    for factor in sampling_factors:
        # Compute a downsampled mesh.
        down_faces, down_transform = mesh_sampling.qslim_decimator_transformer(
            down_meshes[-1], factor=factor)
        down_mesh_v = down_transform.dot(down_meshes[-1].v)
        down_mesh = Mesh(down_mesh_v, down_faces)

        # Append a mesh and its adjacency matrix.
        down_meshes.append(down_mesh)
        adj_matrices.append(get_vert_connectivity(down_mesh_v, down_faces))

        # Append pooling transforms.
        down_transforms.append(down_transform)
        up_transforms.append(
            mesh_sampling.setup_deformation_transfer(down_meshes[-1],
                                                     down_meshes[-2]))

    pooling_data = {
        'factors': sampling_factors,
        'down_meshes': down_meshes[1:],
        'adj_matrices': adj_matrices,
        'down_transforms': down_transforms,
        'up_transforms': up_transforms
    }

    return pooling_data
Exemple #9
0
def FirstEdgesMtx(v, f, want_big=True):
    cnct = get_vert_connectivity((v.r if hasattr(v, 'r') else v), f)
    nbrs = [
        np.nonzero(np.array(cnct[:, i].todense()))[0][0]
        for i in range(cnct.shape[1])
    ]
    JS = np.array(nbrs)
    IS = np.arange(len(JS))
    data = np.ones(IS.size)

    if want_big:
        IS = np.concatenate((IS * 3, IS * 3 + 1, IS * 3 + 2))
        JS = np.concatenate((JS * 3, JS * 3 + 1, JS * 3 + 2))
        data = np.concatenate((data, data, data))

    return sp.csc_matrix((data, (IS, JS)), shape=(JS.size, JS.size))
def removeSmallIsolatedComponents(vertices, faces, normals):
	cnct = get_vert_connectivity(vertices, faces)
	nbrs = [np.nonzero(np.array(cnct[:,i].todense()))[0] for i in range(cnct.shape[1])]
	A = cnct.toarray()
	A[A > 0] = 1

	G = nx.from_numpy_matrix(A)
	connectedComponents = sorted(nx.connected_components(G), key=len, reverse=True)
	if len(connectedComponents) > 1:
		indicesToDelete = np.array([j for c in connectedComponents[1:] for j in c])
		vertices = np.delete(vertices, indicesToDelete, axis=0)
		normals = np.delete(normals, indicesToDelete, axis=0)

		faceHasBadVs = np.all(np.in1d(faces.ravel(), indicesToDelete, invert=True).reshape(faces.shape), axis=1)
		faces = faces[np.where(faceHasBadVs)]
		faces = np.array([f - len(indicesToDelete[f > indicesToDelete]) for f in faces.ravel()]).reshape(faces.shape)

	return vertices, faces, normals