Пример #1
0
 def test(self):
     """tests model
     returns: number correct and total number
     """
     with torch.no_grad():
         out = self.forward()
         dist1, dist2, _, _ = self.criterion(self.labels, out)
         test_loss = 0.5 * (dist1.mean() + dist2.mean())
         
         labels = torch.Tensor.cpu(self.labels).numpy()
         out = torch.Tensor.cpu(out).numpy()
         
         for i in range(out.shape[0]) :
             normals = pcu.estimate_normals(out[i], k=16)
             #print(n.shape, n)
             fs, vs = poisson_reconstruction(out[i], normals, depth=5, full_depth=3)
             print(vs.shape, fs.shape)
             filename, file_extension = os.path.splitext(self.filename[i])
             file = '%s/%s_%s%s' % (self.export_folder[i], filename, 'out', file_extension)
             print(file)
             self.output_export(vs, fs, file)
         
         #print(labels.shape, out.shape)
         #for i in range(len(labels)) :
         #    mean_error, R, indices = icp(labels[i], out[i], tolerance=0.0001)
         #    print(out[i].shape, out[i])
         #    print(indices.shape, np.unique(indices).shape, indices, np.unique(indices))
         #print(out[indices[:len(indices)-self.pad_iter]].shape, self.init_faces)
         #self.output_export(vertices, faces)
         print(test_loss, len(self.labels))
     return test_loss, len(self.labels), out, self.labels
Пример #2
0
def upsample_surface(patch_uvs, patch_tx, patch_models, devices, scale=1.0, num_samples=8, normal_samples=64,
                     compute_normals=True):
    vertices = []
    normals = []
    with torch.no_grad():
        for i in range(len(patch_models)):
            if (i + 1) % 10 == 0:
                print("Upsamling %d/%d" % (i+1, len(patch_models)))

            device = devices[i % len(devices)]

            n = num_samples
            translate_i, scale_i, rotate_i = (patch_tx[i][j].to(device) for j in range(len(patch_tx[i])))
            uv_i = utils.meshgrid_from_lloyd_ts(patch_uvs[i].cpu().numpy(), n, scale=scale).astype(np.float32)
            uv_i = torch.from_numpy(uv_i).to(patch_uvs[i])
            y_i = patch_models[i](uv_i)

            mesh_v = ((y_i.squeeze() @ rotate_i.transpose(0, 1)) / scale_i - translate_i).cpu().numpy()

            if compute_normals:
                mesh_f = utils.meshgrid_face_indices(n)
                mesh_n = pcu.per_vertex_normals(mesh_v, mesh_f)
                normals.append(mesh_n)

            vertices.append(mesh_v)

    vertices = np.concatenate(vertices, axis=0).astype(np.float32)
    if compute_normals:
        normals = np.concatenate(normals, axis=0).astype(np.float32)
    else:
        print("Fixing normals...")
        normals = pcu.estimate_normals(vertices, k=normal_samples)

    return vertices, normals
Пример #3
0
    def test_estimate_normals(self):
        import point_cloud_utils as pcu
        import numpy as np

        # v is a nv by 3 NumPy array of vertices
        # f is an nf by 3 NumPy array of face indexes into v
        # n is a nv by 3 NumPy array of vertex normals if they are specified, otherwise an empty array
        v, f, n = pcu.read_obj(os.path.join(self.test_path, "cube_twist.obj"))

        # Estimate normals for the point set, v using 12 nearest neighbors per point
        n = pcu.estimate_normals(v, k=12)
        self.assertEqual(n.shape, v.shape)
    def datalist_generation(fn, nb_samples, sqrt_nb, max_nb, normals):

        datalist = []

        for d in range(nb_samples):
            points = fn(sqrt_nb)
            perm = torch.randperm(len(points))
            points = torch.tensor(points)[perm]
            points = points[:max_nb]
            if normals:
                normal = torch.tensor(
                    pcu.estimate_normals(np.array(points), k=16))
                datalist.append(Data(pos=points, norm=normal[:max_nb]))
            else:
                datalist.append(Data(pos=points))

        return datalist
Пример #5
0
def main():
    with open('../Models/Cloud_ToyScrew-Yellow.json') as fin:
        cloud = []
        screwCloud = np.array(json.load(fin))
        for p in screwCloud:
            if not np.any(np.isnan(np.array(p))):
                #if not np.any(np.isnan(np.array(p))) and np.linalg.norm(np.array(p) - np.array((0, 0, 0.4))) < 0.3:
                cloud.append(p)
        cloud = np.array(cloud)
        fullCloud = cloud  # [np.random.choice(range(len(cloud)), len(cloud))]

    cloudNormals = pcu.estimate_normals(fullCloud,
                                        k=10,
                                        smoothing_iterations=3)
    mask = ModelFinder.voxelFilter(fullCloud, size=0.005)
    cloud, cloudNormals = fullCloud[mask], cloudNormals[mask]
    #cloud, cloudNormals = ModelFinder.meanPlanarCloudSampling(fullCloud, cloudNormals, 0.01, 0.2, 0.005)
    flipNormals(cloudNormals)

    fig = plt.figure()
    ax = fig.gca(projection='3d')

    Q = Queue()
    meshPoints = ax.scatter([], [], [], color='red')
    ani = FuncAnimation(fig,
                        functools.partial(showHypotheses, ax, cloud, Q),
                        range(1),
                        repeat_delay=1000)
    ax.scatter(cloud[:, 0], cloud[:, 1], cloud[:, 2], color='blue')
    ax.quiver(cloud[:, 0],
              cloud[:, 1],
              cloud[:, 2],
              cloudNormals[:, 0] * 0.01,
              cloudNormals[:, 1] * 0.01,
              cloudNormals[:, 2] * 0.01,
              color='blue')

    process = Process(target=findHypotheses, args=(Q, cloud, cloudNormals))
    process.start()
    ax.set_xlim3d(-.05, 0.05)
    ax.set_ylim3d(-.05, 0.05)
    ax.set_zlim3d(-0.05, 0.05)
    plt.show()
    process.terminate()
Пример #6
0
def main():
    with open('Test_Scenes/Cloud_sphere.json') as fin:
        cloud = []
        screwCloud = np.array(json.load(fin))
        for p in screwCloud:
            if not np.any(np.isnan(np.array(p))):
                # if not np.any(np.isnan(np.array(p))) and np.linalg.norm(np.array(p) - np.array((0, 0, 0.4))) < 0.3:
                cloud.append(p)
        cloud = np.array(cloud)
        fullCloud = cloud  # [np.random.choice(range(len(cloud)), len(cloud))]

    erf = ERF()

    cloudNormals = pcu.estimate_normals(fullCloud,
                                        k=10,
                                        smoothing_iterations=10)
    mask = ERF.voxelFilter(fullCloud, size=0.005)
    cloud, cloudNormals = fullCloud[mask], cloudNormals[mask]
    flipNormals(cloudNormals)
    found_spheres, found_cylinders = erf.findInCloud(cloud, cloudNormals)

    ####################
    # Plotting Code    #
    ####################

    fig = plt.figure()
    ax = fig.gca(projection='3d')
    ax.set_xlim3d(-0.5, 0.5)
    ax.set_ylim3d(-0.5, 0.5)
    ax.set_zlim3d(-0.5, 0.5)
    ax.scatter(cloud[:, 0], cloud[:, 1], cloud[:, 2], color='blue', alpha=0.2)

    print("Found ", len(found_spheres), " Spheres!")
    print("Found ", len(found_cylinders), " Cylinders!")

    # Plot all of the spheres
    for ii in range(0, len(found_spheres)):
        r = found_spheres[ii][0]
        c = found_spheres[ii][1]
        u, v = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j]
        x = r * np.cos(u) * np.sin(v) + c[0]
        y = r * np.sin(u) * np.sin(v) + c[1]
        z = r * np.cos(v) + c[2]
        ax.plot_wireframe(x, y, z, color="r")

    print(found_cylinders)

    # Plot all of the cylinders
    for ii in range(0, len(found_cylinders)):
        # Plot points at center facing up, then rotate pts by a-vector
        r = found_cylinders[ii][0]  # radius
        c = found_cylinders[ii][1]  # centerpoint
        a = found_cylinders[ii][
            2]  # vector of cylinder direction (aka plane direction)
        min_z = found_cylinders[ii][3]
        max_z = found_cylinders[ii][4]

        x = np.linspace(-r, r, 25)
        z = np.linspace(min_z, max_z, 5)
        xm, zm = np.meshgrid(x, z)
        ym = np.sqrt(r**2 - xm**2)

        # Rotate based on a-vector
        def skew(x):
            # return the skew symmetric matrix used for rotating
            return np.array([[0, -x[2], x[1]], [x[2], 0, -x[0]],
                             [-x[1], x[0], 0]])

        v = np.cross(np.array([0.0, 0.0, 1.0]), a)
        R = np.eye(3) + skew(v) + skew(v) @ skew(v) * 1 / (
            1 + np.dot(np.array([0.0, 0.0, 1.0]), a))

        # Rotate all of the meshgrid points
        rotated_pts = R @ np.stack((xm.flatten(), ym.flatten(), zm.flatten()))
        rotated_pts_neg_y = R @ np.stack(
            (xm.flatten(), -ym.flatten(), zm.flatten()))
        xm = rotated_pts[0, :].reshape((5, 25))
        zm = rotated_pts[2, :].reshape((5, 25))
        ym1 = rotated_pts[1, :].reshape((5, 25))
        ym2 = rotated_pts_neg_y[1, :].reshape((5, 25))

        # Shift based on center
        xm = xm + c[0]
        ym1 = ym1 + c[1]
        ym2 = ym2 + c[1]
        zm = zm + c[2]

        ax.plot_wireframe(xm, ym1, zm, color="g")
        ax.plot_wireframe(xm, ym2, zm, color="g")

    plt.show()
Пример #7
0
    def __init__(self, traj, top, load=True):
        """
        Instantiate the membrane object with loaded trajectory.

        :param str traj: Full filepath location to MDTraj-readable trajectory file (e.g., trr, xtc, dcd).
        :param str top: Full filepath location to MDTraj-readable topology file (e.g., pdb, gro).
        :param bool load: Flag to load trajectory.
        """

        self.load = load
        self.traj_file = traj
        self.top_file = top
        self.raw_leaflets = []
        self.leaflets = []
        self.min_leaflet_size = 10

        if self.load:
            self.sim = loader.load(self.traj_file, self.top_file)
            self.topol = self.sim.topology.to_dataframe()[0]
            # Check if simulation loaded
            if self.sim is None:
                print("ERROR: No simulation data loaded.")
                raise FileNotFoundError

        # Populate useful one-off lookups
        # Construct set for head group filter membership testing
        self.hg_set = set(
            self.sim.topology.select(
                "name " +
                " or name ".join([x
                                  for x in particle_naming.headgroup_names])))

        # Collect lipid residue indices and lipid particle indices
        self.detected_lipids = [
            x.index for x in self.sim.topology.residues
            if (particle_naming.ion_names.isdisjoint({x.name})
                and particle_naming.water_names.isdisjoint({x.name}))
        ]
        logging.debug("Number of lipids detected: %s" %
                      len(self.detected_lipids))
        self.residue_names = sorted(
            set([
                self.sim.topology.residue(index).name
                for index in self.detected_lipids
            ]))

        # Collect all lipid particle indices
        self.lipid_particles = self.topol[~np.isin(
            self.topol["resName"].values,
            list(particle_naming.ion_names) +
            list(particle_naming.water_names))].index

        # OPTIONAL Collect head group particle indices - not yet sure if this is helpful
        # self.hg_particles = self.hg_set.intersection(self.lipid_particles)

        # Construct lookups linking lipid residues and particles
        self.lipid_particles_by_res = defaultdict(list)
        for index in self.topol.index:
            self.lipid_particles_by_res[self.sim.topology.atom(
                index).residue.index].append(index)

        self.lipid_residues_by_particle = {
            index: self.sim.topology.atom(index).residue.index
            for index in self.topol.index
        }

        self.hg_particles_by_res = {
            resid:
            tuple(self.hg_set.intersection(self.lipid_particles_by_res[resid]))
            for resid in self.detected_lipids
        }

        # Pre-calculate all lipid vectors
        # NOTE: This requires the original trajectory file to be PBC-corrected, so that all molecules are whole in every frame.
        # Use numpy stack to allow indexing using different numbers of head group particles in each lipid
        self.hg_centroids = np.stack([
            get_centroid_of_particles(self.sim,
                                      self.hg_particles_by_res[resid])
            for resid in self.detected_lipids
        ],
                                     axis=1)

        # Use numpy stack to allow indexing using different numbers of particles found in each lipid
        self.com_centroids = np.stack([
            get_centroid_of_particles(self.sim,
                                      self.lipid_particles_by_res[resid])
            for resid in self.detected_lipids
        ],
                                      axis=1)

        self.vectors = self.com_centroids - self.hg_centroids

        # Precalculate local neighborhood lipid vector normals
        # This is not done in a PBC-aware fashion. It therefore tends to mess up in the corners if too many nearest
        # neighbors are selected for the normal estimation, by dragging the vectors towards the box COM.
        self.normals = np.asarray(
            [pcu.estimate_normals(frame, k=20) for frame in self.hg_centroids])

        # The normal detection from the point cloud doesn't correctly estimate the Z-direction. The leaflets are
        # assigned using the lipid vectors rather than the normal vectors. Here, the lipid vectors can also be
        # used to correct the polarity of the normals.

        # Correct "positive" normals - these are the normals which point up from the upper leaflet towards
        # +ve Z-direction.
        # Where the lipid vectors and normals are negative (i.e., pointing down-Z) make the normals positive (i.e., pointing up-Z)
        # Likewise, correct "negative" normals - those pointing down from the lower leaflet towards -ve Z
        # Where the lipid vectors and normals are positive (i.e., pointing up-Z) make the normals negative (i.e., pointing down-Z)
        pos_inversion = (self.vectors[:, :, 2] < 0) & (self.normals[:, :, 2] <
                                                       0)
        neg_inversion = (self.vectors[:, :, 2] > 0) & (self.normals[:, :, 2] >
                                                       0)

        inversion_slice = 1 + (np.logical_or(pos_inversion, neg_inversion) *
                               -2)
        self.normals[:, :, 2] = self.normals[:, :, 2] * inversion_slice

        # Detect leaflets using vector alignment
        self.detect_leaflets()

        # Store lookup for residue leaflet occupancy
        self.leaflet_occupancy_by_resid = defaultdict(list)
        # Shape: X[resid] = [upper upper lower upper ... ]
        for frame_leaflets in self.leaflets:
            for resid in self.detected_lipids:
                if resid in set(frame_leaflets["upper"]):
                    self.leaflet_occupancy_by_resid[resid].append("upper")
                elif resid in set(frame_leaflets["lower"]):
                    self.leaflet_occupancy_by_resid[resid].append("lower")
                elif resid in set(frame_leaflets["aggregate"]):
                    self.leaflet_occupancy_by_resid[resid].append("aggregate")
                else:
                    self.leaflet_occupancy_by_resid[resid].append("none")
Пример #8
0
 def set_scene(self, cloud):
     self.Scene = cloud
     self.SceneNormals = pcu.estimate_normals(cloud, 10, 3)
     self.SceneKd = KDTree(cloud)
Пример #9
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 25 11:21:05 2020

@author: tamir
"""
import os
import numpy as np
import point_cloud_utils as pcu

THIS_DIR = os.path.dirname(os.path.abspath(__file__))
points = os.path.join(THIS_DIR, "data/point_cloud.obj")

# v is a nv by 3 NumPy array of vertices
v, f, n = pcu.read_obj(points)

# Estimate a normal at each point (row of v) using its 5 nearest neighbors
n = pcu.estimate_normals(v, k=5)

np.testing.assert_allclose(n[0],
                           np.asarray([0.96283305, 0.11186423, 0.24584327]))
Пример #10
0
    smooth_vtx = fil.filter()

    # MLS 平滑
    tree = smooth_vtx.make_kdtree()
    mls = smooth_vtx.make_moving_least_squares()
    mls.set_Compute_Normals(True)
    mls.set_polynomial_fit(True)
    mls.set_Search_Method(tree)
    mls.set_search_radius(10.0)
    mls_point = mls.process()
    s_vtx_n = np.asarray(mls_point)

    print('filter complete')

    # 计算法向量
    n = pcu.estimate_normals(s_vtx_n, k=16)

    # 三角面片重建
    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(s_vtx_n)
    pcd.normals = o3d.utility.Vector3dVector(n)

    # depth越大细节越多
    mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
        pcd, depth=7)

    # 移除Possion重建后额外的平面点
    vertices_to_remove = remove(mesh, vtx_middle, reconstruct_shape_l,
                                reconstruct_shape_r)
    mesh.remove_vertices_by_mask(vertices_to_remove)