Beispiel #1
0
    def test_mesh_vector_heat(self):

        V, F = pp3d.read_mesh(os.path.join(asset_path, "bunny_small.ply"))

        solver = pp3d.MeshVectorHeatSolver(V, F)

        # Scalar extension
        ext = solver.extend_scalar([1, 22], [0., 6.])
        self.assertEqual(ext.shape[0], V.shape[0])
        self.assertGreaterEqual(np.amin(ext), 0.)

        # Get frames
        basisX, basisY, basisN = solver.get_tangent_frames()
        self.assertEqual(basisX.shape[0], V.shape[0])
        self.assertEqual(basisY.shape[0], V.shape[0])
        self.assertEqual(basisN.shape[0], V.shape[0])
        # TODO could check orthogonal

        # Vector heat (transport vector)
        ext = solver.transport_tangent_vector(1, [6., 6.])
        self.assertEqual(ext.shape[0], V.shape[0])
        self.assertEqual(ext.shape[1], 2)
        ext = solver.transport_tangent_vectors([1, 22], [[6., 6.], [3., 4.]])
        self.assertEqual(ext.shape[0], V.shape[0])
        self.assertEqual(ext.shape[1], 2)

        # Vector heat (log map)
        logmap = solver.compute_log_map(1)
        self.assertEqual(logmap.shape[0], V.shape[0])
        self.assertEqual(logmap.shape[1], 2)
Beispiel #2
0
    def test_mesh_areas(self):

        V, F = pp3d.read_mesh(os.path.join(asset_path, "bunny_small.ply"))

        face_area = pp3d.face_areas(V, F)
        self.assertEqual(face_area.shape[0], F.shape[0])
        self.assertTrue(np.all(face_area >= 0))

        vert_area = pp3d.vertex_areas(V, F)
        self.assertLess(np.abs(np.sum(face_area) - np.sum(vert_area)), 1e-6)
Beispiel #3
0
    def test_mesh_cotan_laplace(self):

        V, F = pp3d.read_mesh(os.path.join(asset_path, "bunny_small.ply"))

        L = pp3d.cotan_laplacian(V, F)

        self.assertEqual(L.shape[0], V.shape[0])
        self.assertEqual(L.shape[1], V.shape[0])

        self.assertLess(np.abs(np.sum(L)), 1e-6)
Beispiel #4
0
    def test_write_read_mesh(self):

        for ext in ['obj']:

            V = generate_verts()
            F = generate_faces()

            fname = "test." + ext

            # write
            pp3d.write_mesh(V, F, fname)

            Vnew, Fnew = pp3d.read_mesh(fname)

            self.assertLess(np.amax(np.abs(V - Vnew)), 1e-6)
            self.assertTrue((F == Fnew).all())
Beispiel #5
0
    def test_mesh_flip_geodesic(self):

        V, F = pp3d.read_mesh(os.path.join(asset_path, "bunny_small.ply"))

        # Test stateful version
        path_solver = pp3d.EdgeFlipGeodesicSolver(V, F)

        # Do a first path
        path_pts = path_solver.find_geodesic_path(v_start=14, v_end=22)
        self.assertEqual(len(path_pts.shape), 2)
        self.assertEqual(path_pts.shape[1], 3)

        # Do some more
        for i in range(5):
            path_pts = path_solver.find_geodesic_path(v_start=14, v_end=22 + i)
            self.assertEqual(len(path_pts.shape), 2)
            self.assertEqual(path_pts.shape[1], 3)
    def __init__(self, root_dir, name="faust", train=True, k_eig=128, n_fmap=30, use_cache=True, op_cache_dir=None):

        # NOTE: These datasets are setup such that each dataset object always loads the entire dataset regardless of train/test mode. The correspondence pair combinations are then set such that the train dataset only returns train pairs, and the test dataset only returns test pairs. Be aware of this if you try to adapt the code for any other purpose!

        self.train = train  # bool
        self.k_eig = k_eig
        self.n_fmap = n_fmap
        self.root_dir = root_dir
        self.cache_dir = os.path.join(root_dir, name, "cache")
        self.op_cache_dir = op_cache_dir
        self.name = name

        # store in memory
        self.verts_list = []
        self.faces_list = []
        self.vts_list = []
        self.names_list = []

        # set combinations
        n_train = {'faust':80, 'scape':51}[self.name]
        if self.train:
            self.combinations = list(permutations(range(n_train), 2))
        else:
            self.combinations = list(combinations(range(n_train, n_train + 20), 2))

        # check the cache
        if use_cache:
            train_cache = os.path.join(self.cache_dir, "train.pt")
            test_cache = os.path.join(self.cache_dir, "test.pt")
            load_cache = train_cache if self.train else test_cache
            print("using dataset cache path: " + str(load_cache))
            if os.path.exists(load_cache):
                print("  --> loading dataset from cache")
                (
                    self.verts_list,
                    self.faces_list,
                    self.frames_list,
                    self.massvec_list,
                    self.L_list,
                    self.evals_list,
                    self.evecs_list,
                    self.gradX_list,
                    self.gradY_list,
                    self.hks_list,
                    self.vts_list,
                    self.names_list
                ) = torch.load(load_cache)
                return
            print("  --> dataset not in cache, repopulating")

        # Load the meshes & labels

        # Get all the files
        mesh_files = []
        vts_files = []

        # load faust data
        mesh_dirpath = os.path.join(self.root_dir, name, "off_2")
        vts_dirpath = os.path.join(self.root_dir, name, "corres")
        for fname in os.listdir(mesh_dirpath):
            mesh_fullpath = os.path.join(mesh_dirpath, fname)
            vts_fullpath = os.path.join(vts_dirpath, fname[:-4] + ".vts")
            mesh_files.append(mesh_fullpath)
            vts_files.append(vts_fullpath)

        print("loading {} meshes".format(len(mesh_files)))

        mesh_files, vts_files = sorted(mesh_files), sorted(vts_files)

        # Load the actual files
        for iFile in range(len(mesh_files)):

            print("loading mesh " + str(mesh_files[iFile]))

            verts, faces = pp3d.read_mesh(mesh_files[iFile])
            vts_file = np.loadtxt(vts_files[iFile]).astype(int) - 1 # convert from 1-based indexing

            # to torch
            verts = torch.tensor(np.ascontiguousarray(verts)).float()
            faces = torch.tensor(np.ascontiguousarray(faces))
            vts_file = torch.tensor(np.ascontiguousarray(vts_file))

            # center and unit-area scale
            verts = diffusion_net.geometry.normalize_positions(verts, faces=faces, scale_method='area')

            self.verts_list.append(verts)
            self.faces_list.append(faces)
            self.vts_list.append(vts_file)
            self.names_list.append(os.path.basename(mesh_files[iFile]).split(".")[0])

        for ind, labels in enumerate(self.vts_list):
            self.vts_list[ind] = labels

        # Precompute operators
        (
            self.frames_list,
            self.massvec_list,
            self.L_list,
            self.evals_list,
            self.evecs_list,
            self.gradX_list,
            self.gradY_list,
        ) = diffusion_net.geometry.get_all_operators(
            self.verts_list,
            self.faces_list,
            k_eig=self.k_eig,
            op_cache_dir=self.op_cache_dir,
        )

        self.hks_list = [diffusion_net.geometry.compute_hks_autoscale(self.evals_list[i], self.evecs_list[i], 16)
                         for i in range(len(self.L_list))]

        # save to cache
        if use_cache:
            diffusion_net.utils.ensure_dir_exists(self.cache_dir)
            torch.save(
                (
                    self.verts_list,
                    self.faces_list,
                    self.frames_list,
                    self.massvec_list,
                    self.L_list,
                    self.evals_list,
                    self.evecs_list,
                    self.gradX_list,
                    self.gradY_list,
                    self.hks_list,
                    self.vts_list,
                    self.names_list,
                ),
                load_cache,
            )
Beispiel #7
0
import numpy as np
# import scipy.sparse.linalg as sla

# Path to where the bindings live
sys.path.append(os.path.join(os.path.dirname(__file__), "../build/"))
sys.path.append(os.path.join(os.path.dirname(__file__), "../src/"))

import potpourri3d as pp3d

ps.init()

# Read input

## = Mesh test
# V, F = pp3d.read_mesh("/Users/nick/mesh/spot.obj")
V, F = pp3d.read_mesh("bunny_small.ply")
ps_mesh = ps.register_surface_mesh("mesh", V, F)

# Distance
dists = pp3d.compute_distance(V, F, 4)
ps_mesh.add_scalar_quantity("dist", dists)

# Vector heat
solver = pp3d.MeshVectorHeatSolver(V, F)

# Vector heat (extend scalar)
ext = solver.extend_scalar([1, 22], [0., 6.])
ps_mesh.add_scalar_quantity("ext", ext)

# Vector heat (tangent frames)
basisX, basisY, basisN = solver.get_tangent_frames()
Beispiel #8
0
    def __init__(self, root_dir, split_size, k_eig, exclude_dict=None, op_cache_dir=None):
        
        self.root_dir = root_dir
        self.n_class = 30 
        self.split_size = split_size # pass None to take all entries (except those in exclude_dict)
        self.k_eig = k_eig
        self.op_cache_dir = op_cache_dir

        self.class_names = []
        self.entries = {}

        # store in memory
        self.verts_list = []
        self.faces_list = []
        self.labels_list = []

        raw_path = os.path.join(self.root_dir, 'raw')

        
        ## Parse the categories file
        cat_path = os.path.join(self.root_dir, 'categories.txt')
        with open(cat_path) as cat_file:
            cat_file.readline() # skip the first two lines
            cat_file.readline() 
            for i_class in range(30):
                cat_file.readline() 
                class_name, _, count = cat_file.readline().strip().split()
                count = int(count)
                self.class_names.append(class_name)
                mesh_list = []
                for j in range(20):
                    mesh_list.append(cat_file.readline().strip())


                # Randomly grab samples for this split. If given, disallow any samples in commmon with exclude_dict (ie making sure train set is distinct from test).
                order = np.random.permutation(len(mesh_list))
                added = 0
                self.entries[class_name] = set()
                for ind in order:
                    if(split_size is not None and added == split_size): continue

                    name = mesh_list[ind]
                    if exclude_dict is not None and name in exclude_dict[class_name]:
                        continue

                    path = os.path.join(root_dir, "raw", "T{}.off".format(name))

                    verts, faces = pp3d.read_mesh(path)
                    verts = torch.tensor(verts).float()
                    faces = torch.tensor(faces)

                    # center and unit scale
                    verts = diffusion_net.geometry.normalize_positions(verts)

                    self.verts_list.append(verts)
                    self.faces_list.append(faces)
                    self.labels_list.append(i_class)
                    self.entries[class_name].add(name)

                    added += 1

                print(class_name + " -- " + " ".join([p for p in self.entries[class_name]]))

                if(split_size is not None and added < split_size):
                    raise ValueError("could not find enough entries to generate requested split")
                
        for ind, label in enumerate(self.labels_list):
            self.labels_list[ind] = torch.tensor(label)
Beispiel #9
0
    def __init__(self, root_dir, split_size, k_eig, exclude_dict=None, op_cache_dir=None):
        
        self.root_dir = root_dir
        self.n_class = 30 
        self.split_size = split_size # pass None to take all entries (except those in exclude_dict)
        self.k_eig = k_eig
        self.op_cache_dir = op_cache_dir

        self.class_names = [ 'alien', 'ants', 'armadillo', 'bird1', 'bird2', 'camel', 'cat', 'centaur', 'dinosaur', 'dino_ske', 'dog1', 'dog2', 'flamingo', 'glasses', 'gorilla', 'hand', 'horse', 'lamp', 'laptop', 'man', 'myScissor', 'octopus', 'pliers', 'rabbit', 'santa', 'shark', 'snake', 'spiders', 'two_balls', 'woman']
        
        self.entries = {}

        # store in memory
        self.verts_list = []
        self.faces_list = []
        self.labels_list = []

        raw_path = os.path.join(self.root_dir, 'raw', "shrec_16")

        for class_idx, class_name in enumerate(self.class_names):
            
            # load both train and test subdirectories; we are manually regenerating random splits to do multiple trials
            mesh_files = []
            for t in ['test', 'train']:
                files = os.listdir(os.path.join(raw_path, class_name, t))
                for f in files:
                    full_f = os.path.join(raw_path, class_name, t, f)
                    mesh_files.append(full_f)


            # Randomly grab samples for this split. If given, disallow any samples in commmon with exclude_dict (ie making sure train set is distinct from test).
            order = np.random.permutation(len(mesh_files))
            added = 0
            self.entries[class_name] = set()
            for ind in order:
                if(split_size is not None and added == split_size): continue

                path = mesh_files[ind]
                if exclude_dict is not None and path in exclude_dict[class_name]:
                    continue

                verts, faces = pp3d.read_mesh(path)
                verts = torch.tensor(verts).float()
                faces = torch.tensor(faces)

                # center and unit scale
                verts = diffusion_net.geometry.normalize_positions(verts)

                self.verts_list.append(verts)
                self.faces_list.append(faces)
                self.labels_list.append(class_idx)
                self.entries[class_name].add(path)

                added += 1

            print(class_name + " -- " + " ".join([os.path.basename(p) for p in self.entries[class_name]]))

            if(split_size is not None and added < split_size):
                raise ValueError("could not find enough entries to generate requested split")
            
        for ind, label in enumerate(self.labels_list):
            self.labels_list[ind] = torch.tensor(label)

        # Precompute operators
        self.frames_list, self.massvec_list, self.L_list, self.evals_list, self.evecs_list, self.gradX_list, self.gradY_list = diffusion_net.geometry.get_all_operators(self.verts_list, self.faces_list, k_eig=self.k_eig, op_cache_dir=self.op_cache_dir)