Example #1
0
def test_obj_info(tmpdir):
    ply0 = PlyData([], text=True, obj_info=["test obj_info"])
    test_file = tmpdir.join("test.ply")
    ply0.write(str(test_file))

    ply0_str = test_file.read("rb").decode("ascii")
    assert ply0_str.startswith("ply\r\nformat ascii 1.0\r\n" "obj_info test obj_info\r\n")

    ply1 = PlyData.read(str(test_file))
    assert len(ply1.obj_info) == 1
    assert ply1.obj_info[0] == "test obj_info"
 def read_ply(self, file_name):
     num_samples = self.num_samples // len(self.files_list)
     if self.file_index == len(self.files_list) - 1:
         num_samples = num_samples + (self.num_samples - (num_samples * len(self.files_list)))
     
     root, ext = os.path.splitext(file_name)
     if not os.path.isfile(root + ".npy"):
         ply = PlyData.read(file_name)
         vertex = ply['vertex']
         (x, y, z) = (vertex[t] for t in ('x', 'y', 'z'))
         points = zip(x.ravel(), y.ravel(), z.ravel())
         np.save(root + ".npy", points)
     else:
         points = np.load(root + ".npy")
     
     #load normals
     if os.path.isfile(root + "_normals" + ".ply"):
         if not os.path.isfile(root + "_normals" + ".npy"):
             ply1 = PlyData.read(root + "_normals" + ".ply")
             vertex = ply1['vertex']
             (nx, ny, nz) = (vertex[t] for t in ('nx', 'ny', 'nz'))
             self.normals = np.asarray(zip(nx.ravel(), ny.ravel(), nz.ravel()))
             np.save(root + "_normals" + ".npy", self.normals)
         else:
             self.normals = np.load(root + "_normals" + ".npy")
     
     if self.add_noise:
         self.data = utils.add_noise_normal(points, std=self.nois_std)
     else:
         self.data = np.asarray(points)
     
     self.pc_diameter = utils.get_pc_diameter(self.data)
     self.l = self.relL*self.pc_diameter
     
     rot = utils.angle_axis_to_rotation(self.rotation_angle, self.rotation_axis)
     self.data = utils.transform_pc(self.data, rot)
     
     #plotutils.show_pc(self.data)
     #mlab.show()
             
     #TODO: better sampling
     print "sampling file: ", file_name
     self.samples, self.sample_indices = Sampler.sample(self.data, -1, min_num_point=-1, file_name=file_name, sampling_algorithm=self.sampling_algorithm)
     #self.samples, self.sample_indices = Sampler.sample(self.data, -1, num_samples, file_name=file_name, sampling_algorithm=self.sampling_algorithm)
     #self.samples = self.samples[0:num_samples]
     #self.sample_indices = self.sample_indices[0:num_samples]
     
     self.tree = spatial.KDTree(self.data)
     return self.data
Example #3
0
    def save_ply(self, filename):
        vertex = np.array([tuple(i) for i in self.v], dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
        face = np.array([(tuple(i), 0, 100, 255) for i in self.f] , 
            dtype=[('vertex_indices', 'i4', (3,)),
            ('red', 'u1'), ('green', 'u1'),
            ('blue', 'u1')])
	edge = np.array([(tuple(i)[0], tuple(i)[1], 255, 255, 255) for i in self.e] , 
            dtype=[('vertex1', 'i4'), ('vertex2', 'i4'),
            ('red', 'u1'), ('green', 'u1'),
            ('blue', 'u1')])
        el = PlyElement.describe(vertex, 'vertex')
        el2 = PlyElement.describe(face, 'face')
	el3 = PlyElement.describe(edge, 'edge')
        plydata = PlyData([el, el2, el3])
        plydata.write(filename)
Example #4
0
def ply_plot(ply_file, opacity = 1, color = (1,1,1)):
    ply = PlyData.read(ply_file)

    '''
    Plot vertices and triangles from a PlyData instance. Assumptions:
        `ply' has a 'vertex' element with 'x', 'y', and 'z'
            properties;
        `ply' has a 'face' element with an integral list property
            'vertex_indices', all of whose elements have length 3.
    '''
    vertex = ply['vertex'].data

    (x, y, z) = (vertex[t] for t in ('x', 'y', 'z'))

    # mlab.points3d(x, y, z, color=(1, 1, 1), mode='point')

    tri_idx = ply['face'].data['vertex_indices']
    idx_dtype = tri_idx[0].dtype

    triangles = numpy.fromiter(tri_idx, [('data', idx_dtype, (3,))],
                               count=len(tri_idx))['data']

    mesh = mlab.triangular_mesh(x, y, z, triangles,
                                color=color,
                                opacity = opacity)
    return mesh
Example #5
0
def main():
    res_folder = opt.result_dir
    ply_folder = opt.scan_path
    output_dir = opt.output_dir
    os.makedirs(output_dir, exist_ok=True)

    reader_ins = Benchmark_reader(res_folder)
    for folder in os.listdir(res_folder):
        if os.path.isdir(os.path.join(res_folder, folder)):
            continue
        print(folder)
        # ply reader
        ply_file = os.path.join(ply_folder, folder.split('.')[0], folder.split('.')[0]+'_vh_clean_2.ply')
        ply_data = PlyData.read(ply_file)
        points = []
        for point in ply_data.elements[0].data:
            points.append([point[0], point[1], point[2]])
        points = np.array(points)
        colors = np.zeros_like(points)

        # instance reader
        instances = reader_ins[folder]
        for instance_idx, instance_key in enumerate(instances.keys()):
            r, g, b = create_color_palette()[int((instance_idx + 1)%41)]
            colors[instances[instance_key]['points'].nonzero()[0].astype(np.int32)] = [r,g,b]

        output_file = os.path.join(output_dir, folder.split('.')[0] + '.ply')
        write_ply(points, colors, None, output_file)
Example #6
0
def main():
    parser = ArgumentParser()
    parser.add_argument("ply_filename")

    args = parser.parse_args()

    plot(PlyData.read(args.ply_filename))
Example #7
0
def readMesh_PLY(filename, output="soup"):

    if output != "soup":
        raise Exception("Mesh types other than soup not yet supported")

    # Read the actual file
    # TODO This takes a long time, maybe try to replace with something faster of my own?
    plydata = PlyData.read(filename)

    # Read vertices
    # If the mesh has more than three columns of vertex data, ignore the later columns
    # (for instance, Stanford Mesh Repo meshes store intensity and confidence here)
    nVerts = plydata["vertex"].count
    verts = np.zeros((nVerts, 3))
    verts[:, 0] = np.array(plydata["vertex"].data["x"])
    verts[:, 1] = np.array(plydata["vertex"].data["y"])
    verts[:, 2] = np.array(plydata["vertex"].data["z"])

    # Read faces
    faces = make2d(plydata["face"].data["vertex_indices"])

    # Build a mesh from these vertices and faces
    mesh = TriSoupMesh(verts, faces)

    return mesh
Example #8
0
def main():
    parser = ArgumentParser()
    parser.add_argument('ply_filename')

    args = parser.parse_args()

    plot(PlyData.read(args.ply_filename))
    mlab.show()
Example #9
0
    def read_ply(self, file_name, num_samples=1000, sample_class_start=0, add_noise =False,
                  noise_prob=0.3, noise_factor=0.02, noise_std=0.1, sampling_algorithm=SampleAlgorithm.Uniform,
                  rotation_axis=[0, 0, 1], rotation_angle=0):
         
        root, ext = os.path.splitext(file_name)
        if not os.path.isfile(root + ".npy"):
            ply = PlyData.read(file_name)
            vertex = ply['vertex']
            (x, y, z) = (vertex[t] for t in ('x', 'y', 'z'))
            points = zip(x.ravel(), y.ravel(), z.ravel())
            np.save(root + ".npy", points)
        else:
            points = np.load(root + ".npy")
        
        #load normals
        if os.path.isfile(root + "_normals" + ".ply"):
            if not os.path.isfile(root + "_normals" + ".npy"):
                ply1 = PlyData.read(root + "_normals" + ".ply")
                vertex = ply1['vertex']
                (nx, ny, nz) = (vertex[t] for t in ('nx', 'ny', 'nz'))
                self.normals = np.asarray(zip(nx.ravel(), ny.ravel(), nz.ravel()))
                np.save(root + "_normals" + ".npy", self.normals)
            else:
                self.normals = np.load(root + "_normals" + ".npy")
        
        if add_noise:
            print "adding noise to model.."
            mr = utils.model_resolution(np.array(points))
            #mr = 0.404
            print "model resolution: ", mr
            self.data = utils.add_noise_normal(np.array(points), mr, noise_std)
        else:
            self.data = np.asarray(points)
        rot = utils.angle_axis_to_rotation(rotation_angle, rotation_axis)
        self.data = utils.transform_pc(self.data, rot)
        #plotutils.show_pc(self.data)
        #mlab.show()
#TODO: better sampling
        self.samples, self.sample_indices = Sampler.sample(self.data, -1, num_samples-1, file_name=file_name, pose=rot, sampling_algorithm=sampling_algorithm)
        self.tree = spatial.KDTree(self.data) 
        self.sample_class_start = sample_class_start
        self.sample_class_current = sample_class_start
        self.num_samples = self.samples.shape[0]
        print "num samples: ", self.num_samples
        logging.basicConfig(filename='example.log',level=logging.DEBUG)
        return self.data
Example #10
0
def read_ply_cloud(filename):
    ply_data = PlyData.read(filename)
    points = ply_data['vertex'].data.copy()
    cloud = np.empty([2048, 3])
    for i in range(len(points)):
        point = points[i]
        p = np.array([point[0], point[1], point[2]])
        cloud[i] = p
    return np.array(cloud)
Example #11
0
def test_write_stream(tmpdir, tet_ply_txt):
    ply0 = tet_ply_txt
    test_file = tmpdir.join("test.ply")

    with test_file.open("wb") as f:
        tet_ply_txt.write(f)

    ply1 = PlyData.read(str(test_file))
    verify(ply0, ply1)
Example #12
0
def read_str(string, tmpdir, name="test.ply"):
    """
    Utility: create a PlyData instance from a string.

    """
    filename = tmpdir.join(name)
    with filename.open("wb") as f:
        f.write(string)
    return PlyData.read(str(filename))
Example #13
0
def write_read(ply, tmpdir, name="test.ply"):
    """
    Utility: serialize/deserialize a PlyData instance through a
    temporary file.

     """
    filename = tmpdir.join(name)
    ply.write(str(filename))
    return PlyData.read(str(filename))
Example #14
0
def read_mesh_vertices(filename):
    assert os.path.isfile(filename)
    with open(filename, 'rb') as f:
        plydata = PlyData.read(f)
        num_verts = plydata['vertex'].count
        vertices = np.zeros(shape=[num_verts, 3], dtype=np.float32)
        vertices[:,0] = plydata['vertex'].data['x']
        vertices[:,1] = plydata['vertex'].data['y']
        vertices[:,2] = plydata['vertex'].data['z']
    return vertices
Example #15
0
 def load_ply(self, path):
     plyData = PlyData.read(path)
     data = plyData['vertex']
     vertex_number = len(data[:])
     vertex_data = np.zeros((vertex_number,3),dtype='int64')
     for i in range(0, vertex_number):
         vertex_data[i][0] = data[i][0]
         vertex_data[i][1] = data[i][1]
         vertex_data[i][2] = data[i][2]
     #print(vertex_data)
     return vertex_data
Example #16
0
def main():
    parser = ArgumentParser()
    parser.add_argument('ply_filename')

    args = parser.parse_args()
    # file1 = open(args.ply_filename,'r')

    
    # plot(PlyData.read())
    # plot(file1)
    plot(PlyData.read(args.ply_filename))
Example #17
0
def convert(inputfp, outputfp):
    print 'converting %s to %s'%(inputfp, outputfp)
    plydata = PlyData.read(inputfp)
    vertex = np.array([each for each in plydata.elements[0].data.tolist()], dtype=np.float32)
    faces = plydata.elements[1].data.tolist()
    faces = np.array([each[0] for each in faces], dtype=np.int32)
    gv = gifti.GiftiDataArray.from_array(vertex, intent=1008)
    gf = gifti.GiftiDataArray.from_array(faces, intent=1009)
    g = gifti.GiftiImage()
    g.add_gifti_data_array(gv)
    g.add_gifti_data_array(gf)
    gio.write(g, outputfp)
Example #18
0
    def load_from_ply(self, filename, rotationFlag):

        plydata = PlyData.read(filename)
        self.plydata = plydata

        self.f = np.vstack(plydata['face'].data['vertex_indices'])	
	if rotationFlag is not None:
        	x = -plydata['vertex'].data['z']
        	y = plydata['vertex'].data['x']
        	z = -plydata['vertex'].data['y']
	else:
        	x = plydata['vertex'].data['x']
        	y = plydata['vertex'].data['y']
        	z = plydata['vertex'].data['z']
        self.v = np.zeros([x.size, 3])
        self.v[:,0] = x
        self.v[:,1] = y
        self.v[:,2] = z
Example #19
0
def ply_to_patch(ply_file_path,connection_string,pcid,writing_query,additional_offset,grouping_rules):
    """ This function read a ply file, group points into 1M3 patches, convert patches
    """
    from plyfile import PlyData
    import datetime
    
    print '\t working on ply file : ',ply_file_path 
    print '\t importing ply file ',datetime.datetime.now()
    plydata = PlyData.read(ply_file_path)
    print '\t grouping points',datetime.datetime.now()
    numpy_spec_patch = grouping_ply_data(plydata, grouping_rules)
    
    #to order the patch
    #sorted_points = np.sort(patch[1], axis=0, kind='quicksort', order=('GPS_time'))

    #send patch to database*
    print '\t sending patch to database ply file ',datetime.datetime.now()
    return making_pgpatch( numpy_spec_patch, connection_string, pcid, writing_query, ply_file_path, additional_offset)
Example #20
0
def readPLY(fileName):
    plydata = PlyData.read(fileName)
    verts = []
    faces = []
    for ele in plydata.elements:
        if ele.name == 'vertex':
            for v in plydata['vertex']:
                if len(v) == 3:
                    verts.append(np.asarray([v[0], v[1], v[2]]))
                else:
                    verts.append(np.asarray([v[0][0], v[0][1], v[0][2]]))
        if ele.name == 'face':
            for f in plydata['face']:
                if len(f) == 3:
                    faces.append(np.asarray([f[0], f[1], f[2]]))
                else:
                    faces.append(np.asarray([f[0][0], f[0][1], f[0][2]]))
    normals = []
    return verts, faces,normals
Example #21
0
def readPlyFile(filename):
    """
    Usese plyfile python pacakage to read a ply file.
    Gets around issues with pcl having a bad ply writer for pointclouds
    :param filename:
    :type filename: str
    :return: vtkPolyData
    :rtype:
    """

    from plyfile import PlyData

    plydata = PlyData.read(filename)
    vertex_data = plydata['vertex'].data # numpy array with fields ['x', 'y', 'z']
    pts = np.zeros([vertex_data.size, 3])
    pts[:, 0] = vertex_data['x']
    pts[:, 1] = vertex_data['y']
    pts[:, 2] = vertex_data['z']

    return vnp.numpyToPolyData(pts)
Example #22
0
def run_point_cloud(self, voxel_world_id, threshold):
    import voxel_globe.tools

    import voxel_globe.meta.models as models

    import boxm2_adaptor
    import boxm2_mesh_adaptor

    from plyfile import PlyData

    voxel_world = models.VoxelWorld.objects.get(id=voxel_world_id)

    with voxel_globe.tools.task_dir("voxel_viewer") as processing_dir:
        scene_path = os.path.join(voxel_world.directory, "scene.xml")
        scene, cache = boxm2_adaptor.load_cpp(scene_path)
        ply_filename = os.path.join(processing_dir, "model.ply")
        boxm2_mesh_adaptor.gen_color_point_cloud(scene, cache, ply_filename, 0.5, "")

        ply = PlyData.read(str(ply_filename))

        return ply.elements[0].data
Example #23
0
 def sample_ISS(file_name, min_num_point, pose):
     root, ext = os.path.splitext(file_name)
     in_file = root + ".ply"
     out_file = root + "_iss.ply"
     if (not os.path.isfile(out_file)):
         print "file doen't exits.................."
         args = ["./iss_detect", in_file, out_file]
         popen = subprocess.Popen(args, stdout=subprocess.PIPE)
         popen.wait()
         output = popen.stdout.read()
         print output
     pc = np.load(root + '.npy')
     tree = spatial.KDTree(pc)
     ply = PlyData.read(out_file)
     vertex = ply['vertex']
     (x, y, z) = (vertex[t] for t in ('x', 'y', 'z'))
     pc_iss = np.asarray(zip(x.ravel(), y.ravel(), z.ravel()))
     indices = np.zeros((pc_iss.shape[0],))
     for pt_i, samplept in enumerate(pc_iss):
         _, index = tree.query(samplept, k=1)
         indices[pt_i] = index    
     pc_iss = utils.transform_pc(pc_iss, pose)
     
     #min_num_point = min(int(pc_iss.shape[0] / 10), 200)
     if min_num_point < 0:
         #min_num_point = min(int(pc_iss.shape[0] / 10), 200)
         #min_num_point = min(int(pc_iss.shape[0] / 1), 300)
         #min_num_point = min(int(pc_iss.shape[0] / 1), 500)
         min_num_point = int(pc_iss.shape[0] / 1) 
         
     if min_num_point >= pc_iss.shape[0]:
         return pc_iss, indices
     
     sample_step = int(pc_iss.shape[0] / min_num_point)
     pc_iss_samples, _ = Sampler.sample_uniform(pc_iss, sample_step)
     indices_samples, _ = Sampler.sample_uniform(indices, sample_step)
     assert(pc_iss_samples.shape[0] == indices_samples.shape[0])
     print ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,pc_iss shape:", pc_iss_samples.shape
     return pc_iss_samples, indices_samples
def Slab_SVM(file_name, v, sigma):
    plydata = PlyData.read(open(file_name))
    vertexes = plydata['vertex'][:]

    x = np.zeros((len(vertexes),3))
    for i in range(len(vertexes)):
        x[i] = np.array([vertexes[i][0],vertexes[i][1],vertexes[i][2]])

    file_name_points = len(x)

    x_0 = plydata['vertex']['x']
    x_1 = plydata['vertex']['y']
    x_2 = plydata['vertex']['z']
    #createPlot(x_0,x_1,x_2,file_name)

    y = np.zeros(len(x))
    x,ignore_x,y,ignore_y=train_test_split(x,y,train_size=0.01,random_state=8)
    len_x = len(x)

    risk = createHyperPlane(x,v,sigma)

    print file_name,'points:',file_name_points,'subsample:',len_x,'v',v,'sigma',sigma,'risk',risk
Example #25
0
def create_polygons(file, seg_id, which_type):

    #print("Detecting convex hull")
    try:
        plydata = PlyData.read(file)
            

        x = plydata.elements[0].data['x']
        y = plydata.elements[0].data['y']
        z = plydata.elements[0].data['z']
        seg = plydata.elements[0].data['segment_id']
            #import las file
            #data = File(file, mode = "r")
            #x = data.x
            #y = data.y
            #z = data.z
            #seg = data.segment_id


        points = np.vstack([x, y, z, seg]).transpose()
            #print(points)
        xy = points[:,0:2]
            #shift_value = np.mean(xy, axis = 0)
            #xy_shift = xy - shift_value

            #print(points)  
        if len(points) != 0:
            un_seg = np.unique(points[:,3])
                #print(un_seg)
                #print('hoi')
            for i in range(len(un_seg)):

                if un_seg[i] in seg_id:
                        #print('checkkk')
                    seg_p_i = np.where(points[:,3]==un_seg[i]) # indexes of points belonging to this segment
                    xy_seg = xy[seg_p_i]
                            #print(xy_seg)
                        #print('len unique x: ', len(np.unique(xy_seg[:,0])))
                        #print('len unique y: ', len(np.unique(xy_seg[:,1])))
                        #print('check')
                    if len(xy_seg) < 3 or len(np.unique(xy_seg[:,0])) == 1 or len(np.unique(xy_seg[:,1])) == 1:
                            
                        pass
                            #hull_pts = xy_seg # or should we then skip the segment?

                    else:                  
                        hull = ConvexHull(xy_seg)
                        hull_indices = hull.vertices
                            #print(hull_indices)
                            #print('check2')
                        '''
                        ## plot them
                        plt.plot(points[:,0], points[:,1], 'o')
                        for simplex in hull.simplices:
                            plt.plot(xy_seg[simplex, 0], xy_seg[simplex, 1], 'k-')
                        plt.plot(xy_seg[hull.vertices,0], xy_seg[hull.vertices,1], 'r--', lw=2)
                        plt.plot(xy_seg[hull.vertices[0],0], xy_seg[hull.vertices[0],1], 'ro')
                        plt.show()
                        '''   
                        hull_pts = xy_seg[hull_indices]
                            #print(hull_pts)
                            #hull_pts = hull_pts_shifted + shift_value
                        poly.append(Polygon(hull_pts))

                            #make_np = np.array(seg_id)
                            #make_np_t = np.array(which_type)
                        loct = seg_id.index(un_seg[i])
                        #print('test seg id = ', seg_id[loct])
                        #print('test un_seg[i]= ', un_seg[i])
                            #loct = np.where(make_np == un_seg[i])

                        t = which_type[loct]
                        #print(t)
                            #print('loc t = ', loct)
                            #t = make_np_t[loct]
                            #tt = t.tolist()
                            #t = tt[0]
                            #print(t)

                        attr.append([inputFileName, un_seg[i], t])
                        


    except Exception as e:
        print(e)


    # write to file        
    with fiona.open('objects_1906.shp', 'w', 'ESRI Shapefile', schema) as c:
        for j in range(len(poly)):
            c.write({
                'geometry': mapping(poly[j]),
                'properties': {'id': attr[j][0] , 'seg': attr[j][1], 'type' : attr[j][2]}})
    # plt.draw()
    # fig = plt.figure()
    # ax = fig.gca(projection='3d')
    # ax.plot_trisurf(x, y, np.array(v).flatten(), cmap=cm.jet, linewidth=0.2)
    # ax.set_title('Actual')
    # plt.draw()
    # plt.show()

    return a, D, indices_chosen


def main(x, y, z, ftype):
    if (ftype == 'quad'):
        a = QuadSurfFit(x, y, z)

    elif (ftype == 'lin'):
        a = LinSurfFit(x, y, z)

    return a


if __name__ == "__main__":
    plydata = PlyData.read('example-1.ply')
    x = (plydata['vertex']['x'])
    y = (plydata['vertex']['y'])
    z = (plydata['vertex']['z'])

    a = main(x, y, z, 'quad')
    print 'final a = '
    print a
Example #27
0
def export_ply(pc, filename):
    vertex = np.zeros(pc.shape[0], dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
    for i in range(pc.shape[0]):
        vertex[i] = (pc[i][0], pc[i][1], pc[i][2])
    ply_out = PlyData([PlyElement.describe(vertex, 'vertex', comments=['vertices'])])
    ply_out.write(filename)
Example #28
0
def write_ply(points, filename, text=True):
    """ input: Nx3, write points to filename as PLY format. """
    points = [(points[i,0], points[i,1], points[i,2]) for i in range(points.shape[0])]
    vertex = np.array(points, dtype=[('x', 'f4'), ('y', 'f4'),('z', 'f4')])
    el = PlyElement.describe(vertex, 'vertex', comments=['vertices'])
    PlyData([el], text=text).write(filename)
Example #29
0
os.makedirs(args.outdir, exist_ok=True)

# Read mesh data
fn, ext = os.path.splitext(args.input)
pn, fn = os.path.split(fn)
fn = os.path.join(pn, os.path.basename(fn).rsplit('_', 1)[0])
if args.edge_length is None:
    args.edge_length = fn + "_edge.csv"
if args.boundary_vertex is None and args.lambda_bdvert > 0:
    cfn = fn + "_boundary.csv"
    if os.path.isfile(cfn):
        args.boundary_vertex = cfn

#
plydata = PlyData.read(args.input)
vert = np.vstack(
    [plydata['vertex']['x'], plydata['vertex']['y'],
     plydata['vertex']['z']]).astype(np.float64).T
face = plydata['face']['vertex_indices']
print("reading edge length from ", args.edge_length)
edgedat = np.loadtxt(args.edge_length, delimiter=",")
if edgedat.shape[1] == 4:  ## with weight information
    edgedat = np.array(
        [[i, j, l, w]
         for i, j, l, w in zip(edgedat[:,
                                       0], edgedat[:,
                                                   1], edgedat[:,
                                                               2], edgedat[:,
                                                                           3])
         if i < j])
Example #30
0
    return normal_pca.transform(df)


def do_random_pca(df):
    # Import the libraries required for PCA:
    from sklearn.decomposition import PCA
    # Reduce to 2D randomized:
    random_pca = PCA(n_components=2, svd_solver='randomized')
    # Train PCA on the armadillo and Project to 2D:
    return random_pca.fit_transform(df)


plt.style.use('ggplot')
reduce_factor = 100
# Load up the scanned armadillo:
ply = PlyData.read('datasets/stanford_armadillo.ply')
armadillo = pd.DataFrame({
    'x': ply['vertex']['z'][::reduce_factor],
    'y': ply['vertex']['x'][::reduce_factor],
    'z': ply['vertex']['y'][::reduce_factor]
})
# Render the original armadillo:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_title('Armadillo 3D')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.scatter(armadillo['x'],
           armadillo['y'],
           armadillo['z'],
]
print((len(filenames_training)))
print((len(filenames_testing)))

f_training = h5py.File("./hdf5_data/data_training.h5", 'w')
f_testing = h5py.File("./hdf5_data/data_testing.h5", 'w')

a_data_training = np.zeros((len(filenames_training), 512, 3))
a_pid_training = np.zeros((len(filenames_training), 512), dtype=np.uint8)

a_data_testing = np.zeros((len(filenames_testing), 512, 3))
a_pid_testing = np.zeros((len(filenames_testing), 512), dtype=np.uint8)

for i in range(0, len(filenames_training)):
    print(filenames_training[i])
    plydata = PlyData.read("./ply_dir/" + filenames_training[i] + ".ply")
    piddata = [
        line.rstrip()
        for line in open("./seg_dir/" + filenames_training[i] + ".seg", 'r')
    ]
    for j in range(0, 512):
        a_data_training[i, j] = [
            plydata['vertex']['x'][j], plydata['vertex']['y'][j],
            plydata['vertex']['z'][j]
        ]
        a_pid_training[i, j] = piddata[j]
data = f_training.create_dataset("data", data=a_data_training)
pid = f_training.create_dataset("pid", data=a_pid_training)

for i in range(0, len(filenames_testing)):
    plydata = PlyData.read("./ply_dir/" + filenames_testing[i] + ".ply")
Example #32
0
import matplotlib
import datetime

from mpl_toolkits.mplot3d import Axes3D
from plyfile import PlyData, PlyElement

# Every 100 data samples, we save 1. If things run too
# slow, try increasing this number. If things run too fast,
# try decreasing it... =)
reduce_factor = 100

# Look pretty...
matplotlib.style.use('ggplot')

# Load up the scanned armadillo
plyfile = PlyData.read('Datasets/stanford_armadillo.ply')
armadillo = pd.DataFrame({
    'x': plyfile['vertex']['z'][::reduce_factor],
    'y': plyfile['vertex']['x'][::reduce_factor],
    'z': plyfile['vertex']['y'][::reduce_factor]
})


def do_PCA(armadillo):
    #
    # TODO: Write code to import the libraries required for PCA.
    # Then, train your PCA on the armadillo dataframe. Finally,
    # drop one dimension (reduce it down to 2D) and project the
    # armadillo down to the 2D principal component feature space.
    #
    # NOTE: Be sure to RETURN your projected armadillo!
Example #33
0
def train(param):
    torch.manual_seed(0)
    np.random.seed(0)

    print("**********Initiate Netowrk**********")
    model = graphAE.Model(param)

    model.cuda()
    optimizer = torch.optim.Adam(params=model.parameters(),
                                 lr=param.lr,
                                 weight_decay=param.weight_decay)
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                param.lr_decay_epoch_step,
                                                gamma=param.lr_decay)

    if (param.read_weight_path != ""):
        print("load " + param.read_weight_path)
        checkpoint = torch.load(param.read_weight_path)
        model.load_state_dict(checkpoint['model_state_dict'])
        optimizer.load_state_dict(checkpoint['optimizer_state_dict'])

    model.compute_param_num()

    model.train()

    print("**********Get training ply fn list from**********", param.pcs_train)
    ##get ply file lst
    pc_lst_train = np.load(param.pcs_train)
    param.iter_per_epoch = int(len(pc_lst_train) / param.batch)
    param.end_iter = param.iter_per_epoch * param.epoch
    print("**********Get evaluating ply fn list from**********",
          param.pcs_evaluate)
    pc_lst_evaluate = np.load(param.pcs_evaluate)
    #print ("**********Get test ply fn list from**********", param.pcs_evaluate)
    #pc_lst_test = np.load(param.pcs_test)

    np.random.shuffle(pc_lst_train)
    np.random.shuffle(pc_lst_evaluate)

    pc_lst_train[:, :, 0:3] -= pc_lst_train[:, :, 0:3].mean(1).reshape(
        (-1, 1, 3)).repeat(param.point_num, 1)
    pc_lst_evaluate[:, :, 0:3] -= pc_lst_evaluate[:, :, 0:3].mean(1).reshape(
        (-1, 1, 3)).repeat(param.point_num, 1)
    #pc_lst_test[:,:,0:3] -= pc_lst_test[:,:,0:3].mean(1).reshape((-1,1,3)).repeat(param.point_num, 1)

    template_plydata = PlyData.read(param.template_ply_fn)

    print("**********Start Training**********")

    min_geo_error = 123456
    for i in range(param.start_epoch, param.epoch + 1):

        if (((i % param.evaluate_epoch == 0) and (i != 10))
                or (i == param.epoch)):
            print("###Evaluate", "epoch", i, "##########################")
            with torch.no_grad():
                torch.manual_seed(0)
                np.random.seed(0)
                geo_error = evaluate(param,
                                     model,
                                     pc_lst_evaluate,
                                     i,
                                     template_plydata,
                                     suffix="_eval")
                if (geo_error < min_geo_error):
                    min_geo_error = geo_error
                    print("###Save Weight")
                    path = param.write_weight_folder + "model_epoch%04d" % i + ".weight"
                    torch.save(
                        {
                            'model_state_dict': model.state_dict(),
                            'optimizer_state_dict': optimizer.state_dict()
                        }, path)

        torch.manual_seed(i)
        np.random.seed(i)

        for j in range(param.iter_per_epoch):
            train_one_iteration(param, model, optimizer, pc_lst_train, i, j)

        scheduler.step()
from plyfile import PlyData, PlyElement
import sys
from auto_rotate import make_rotator, gen_ply_vectrices, apply_rotation, write_rotated_ply

if __name__ == '__main__':
    path = sys.argv[1]
    degree = float(sys.argv[2])
    rotator = make_rotator(degree * 0.0174533)
    rotated_vertrices = []
    input_ply = PlyData.read(path)

    #apply rotation
    for t in gen_ply_vectrices(input_ply):
        x, y, z = t[0], t[1], t[2]
        newx, newy, newz = apply_rotation(x, y, z, rotator)
        rotated_vertrices.append((newx, newy, newz))

    #make new ply
    write_rotated_ply(rotated_vertrices, input_ply, 'rotated_by_degree.ply')
Example #35
0
import datetime

from mpl_toolkits.mplot3d import Axes3D
from plyfile import PlyData, PlyElement

# Every 100 data samples, we save 1. If things run too
# slow, try increasing this number. If things run too fast,
# try decreasing it... =)
reduce_factor = 100

# Look pretty...
matplotlib.style.use('ggplot')

# Load up the scanned armadillo
plyfile = PlyData.read(
    '/Users/zmandell/Downloads/DAT210x-master/Module4/Datasets/stanford_armadillo.ply'
)
armadillo = pd.DataFrame({
    'x': plyfile['vertex']['z'][::reduce_factor],
    'y': plyfile['vertex']['x'][::reduce_factor],
    'z': plyfile['vertex']['y'][::reduce_factor]
})


def do_PCA(armadillo):
    #
    # TODO: Write code to import the libraries required for PCA.
    # Then, train your PCA on the armadillo dataframe. Finally,
    # drop one dimension (reduce it down to 2D) and project the
    # armadillo down to the 2D principal component feature space.
    #
Example #36
0
def load_ply_data(filename, point_num):
    plydata = PlyData.read(filename)
    pc = plydata['vertex'].data[:point_num]
    pc_array = np.array([[pc[i][0], pc[i][1], pc[i][2]]
                         for i in range(len(pc))])
    return pc_array
Example #37
0
            q = model["trs"]["rotation"]
            s = model["trs"]["scale"]

            id_cad = model["id_cad"]
            catid_cad = model["catid_cad"]

            outdir = os.path.abspath(opt.out + "/" + id_scan)
            pathlib.Path(outdir).mkdir(parents=True, exist_ok=True) 


            if scan_file == "": # <-- do just once, because scene is same for all cad models
                scan_file = params["scannet"] + "/" + id_scan + "/" + id_scan + "_vh_clean_2.ply"
                Mscan = make_M_from_tqs(r["trs"]["translation"], r["trs"]["rotation"], r["trs"]["scale"])
                assert os.path.exists(scan_file), scan_file + " does not exist."
                with open(scan_file, 'rb') as read_file:
                    mesh_scan = PlyData.read(read_file)
                for v in mesh_scan["vertex"]: 
                    v1 = np.array([v[0], v[1], v[2], 1])
                    v1 = np.dot(Mscan, v1)

                    v[0] = v1[0]
                    v[1] = v1[1]
                    v[2] = v1[2]

                with open(outdir + "/scan.ply", mode='wb') as f:
                    PlyData(mesh_scan).write(f)

            cad_file = params["shapenet"] + "/" + catid_cad + "/" + id_cad  + "/models/model_normalized.obj"
            cad_mesh = pywavefront.Wavefront(cad_file, collect_faces=True, parse=True)
            Mcad = make_M_from_tqs(t, q, s)
Example #38
0
def parse_ply_file(ply_fn):
    '''
  element vertex 1522546
  property float x
  property float y
  property float z
  property float nx
  property float ny
  property float nz
  property float tx
  property float ty
  property uchar red
  property uchar green
  property uchar blue

  element face 3016249
  property list uchar int vertex_indices
  property int material_id
  property int segment_id
  property int category_id
  '''
    with open(ply_fn, 'r') as ply_fo:
        plydata = PlyData.read(ply_fo)
        num_ele = len(plydata.elements)
        num_vertex = plydata['vertex'].count
        num_face = plydata['face'].count
        data_vertex = plydata['vertex'].data
        data_face = plydata['face'].data

        ## vertex
        vertex_eles = [
            'x', 'y', 'z', 'nx', 'ny', 'nz', 'tx', 'ty', 'red', 'green', 'blue'
        ]
        datas_vertex = {}
        for e in vertex_eles:
            datas_vertex[e] = np.expand_dims(data_vertex[e], axis=-1)
        vertex_xyz = np.concatenate(
            [datas_vertex['x'], datas_vertex['y'], datas_vertex['z']], axis=1)
        vertex_nxnynz = np.concatenate(
            [datas_vertex['nx'], datas_vertex['ny'], datas_vertex['nz']],
            axis=1)
        vertex_rgb = np.concatenate(
            [datas_vertex['red'], datas_vertex['green'], datas_vertex['blue']],
            axis=1)

        ## face
        vertex_idx_per_face = data_face['vertex_indices']
        vertex_idx_per_face = np.concatenate(vertex_idx_per_face, axis=0)
        vertex_idx_per_face = np.reshape(vertex_idx_per_face, [-1, 3])

        datas = {}
        datas['xyz'] = vertex_xyz  # (N,3)
        datas['nxnynz'] = vertex_nxnynz  # (N,3)
        datas['color'] = vertex_rgb  # (N,3)
        datas['vidx_per_face'] = vertex_idx_per_face  # (F,3)
        datas['label_material'] = np.expand_dims(data_face['material_id'],
                                                 1)  # (F,1)
        datas['label_instance'] = np.expand_dims(data_face['segment_id'], 1)
        datas['label_raw_category'] = np.expand_dims(data_face['category_id'],
                                                     1)
        label_category = get_cat40_from_rawcat(data_face['category_id'])
        datas['label_category'] = np.expand_dims(label_category, 1)

        return datas
Example #39
0
def extract_ply_data(filenames, min_points=1024):
    """
    Collects all the point data from ply files, and places it into a list.
    If the file does not contain enough points, ignore the file.


    Param:
        filenames: A list of filenames to extract data from. Use collect_files to get this list.

        min_points: The minimum number of points needed to allow the file to be extracted.
    """
    data_list = []  #(x,y,z,dtype=np.float32)
    normal_list = []  #(normal,dtype=np.float32)
    label_list = []  #(dtype=uint8)
    point_labels = []
    face_list = []

    data_lengths = []

    data = None
    face_length = 0

    for file_count in range(0, len(filenames)):

        #get the filename (purely for print)
        file_name = filenames[file_count][filenames[file_count].rfind('\\') +
                                          1:]

        plydata = PlyData.read(filenames[file_count])

        #collect all of the data from the ply [[x,y,z,r,g,b][...]]
        if (data != None):
            face_length += len(data)

        data = plydata['vertex'][:][:]

        #print("Part number of points:" +str(len(data)))
        #do not allow files that have less than min_points since this is our sample size.
        if len(data) >= min_points:

            #normal = calculate_normals(filenames[file_count])
            label = get_label(filenames[file_count])

            face = plydata['face'][:][:]

            #add data and label to their respective lists.
            if label != -1:
                print(file_name + ": (" + label[0].capitalize() + ") " +
                      str(len(data)) + " points | " + str(len(face)) +
                      " faces")
                label_val = [label[1]]
                data_lengths.append(len(data))
                data_list.extend(data)
                label_list.append(label[1])
                point_labels.extend(label_val * len(data))

                #Offset Face indicies to the length of the last set of data
                face = np.array(face.tolist())
                if file_count != 0:
                    face += face_length

                face_list.extend(face)

            else:
                print("Skipping " +
                      str(filenames[file_count] + ": Label Does Not Exist"))
        else:
            print("Skipping " +
                  str(filenames[file_count] + ": Not Enough Data"))

    print("Number of Elements in Data: " + str(len(data_list)) +
          " Total Faces: " + str(len(face_list)) + "\n")
    #print(len(data_list))

    return data_list, data_lengths, normal_list, label_list, point_labels, face_list
Example #40
0
    def process_file(i_mesh, f):
        nonlocal i_sample

        # Read the mesh

        # libigl loader seems to leak memory in loop?
        # verts, faces = utils.read_mesh(f)

        plydata = PlyData.read(f)
        verts = np.vstack((plydata['vertex']['x'], plydata['vertex']['y'],
                           plydata['vertex']['z'])).T
        tri_data = plydata['face'].data['vertex_indices']
        faces = np.vstack(tri_data)

        # Compute total sample counts
        n_vert_sample_tot = int(args.neigh_size / args.model_frac *
                                (1. - args.n_add))
        n_surf_sample_tot = int(args.surface_size / (args.model_frac))

        # sample points
        vert_sample_pos, vert_sample_normal = get_samples(
            verts, faces, n_vert_sample_tot)

        if (args.n_add > 0):
            n_vert_sample_noise = int(args.neigh_size / args.model_frac *
                                      (args.n_add))
            vert_sample_noise_pos, vert_sample_noise_normal = get_samples(
                verts, faces, n_vert_sample_noise)
            vert_sample_noise_pos += np.random.randn(n_vert_sample_noise,
                                                     3) * args.on_surface_dev

            vert_sample_pos = np.concatenate(
                (vert_sample_pos, vert_sample_noise_pos), axis=0)
            vert_sample_normal = np.concatenate(
                (vert_sample_normal, vert_sample_noise_normal), axis=0)

        surf_sample_pos, surf_sample_normal = get_samples(
            verts, faces, n_surf_sample_tot)

        # Build nearest-neighbor structure
        kd_tree_vert = spatial.KDTree(vert_sample_pos)
        kd_tree_surf = spatial.KDTree(surf_sample_pos)

        # Randomly sample vertices
        last_sample = i_sample + counts[i_mesh]
        while i_sample < last_sample:

            print("generating sample {} / {}  on mesh {}".format(
                i_sample, args.n_samples, f))

            # Random vertex
            ind = np.random.randint(vert_sample_pos.shape[0])
            center = surf_sample_pos[ind, :]

            _, neigh_vert = kd_tree_vert.query(center, k=args.neigh_size)
            _, neigh_surf = kd_tree_surf.query(center, k=args.surface_size)

            result_vert_pos = vert_sample_pos[neigh_vert, :]
            result_vert_normal = vert_sample_normal[neigh_vert, :]
            result_surf_pos = surf_sample_pos[neigh_surf, :]
            result_surf_normal = surf_sample_normal[neigh_surf, :]

            # Write out the result
            out_filename = os.path.join(
                args.output_dir,
                "neighborhood_points_{:06d}.npz".format(i_sample))
            np.savez(out_filename,
                     vert_pos=result_vert_pos,
                     vert_normal=result_vert_normal,
                     surf_pos=result_surf_pos,
                     surf_normal=result_surf_normal)

            i_sample = i_sample + 1
Example #41
0
def read_ply_points(ply_path):
    ply = PlyData.read(ply_path)
    data = ply.elements[0].data
    points = np.stack([data['x'], data['y'], data['z']], axis=1)
    return points
Example #42
0
def test_write_read(tet_ply_txt, tmpdir, text, byte_order):
    ply0 = PlyData(tet_ply_txt.elements, text, byte_order,
                   tet_ply_txt.comments)
    ply1 = write_read(ply0, tmpdir)
    verify(ply0, ply1)
Example #43
0
    else:
        assert (prop0 == prop1).all()


vertex = numpy.array([(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)],
                     dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])

face = numpy.array([([0, 1, 2], 255, 255, 255), ([0, 2, 3], 255, 0, 0),
                    ([0, 1, 3], 0, 255, 0), ([1, 2, 3], 0, 0, 255)],
                   dtype=[('vertex_indices', 'i4', (3, )), ('red', 'u1'),
                          ('green', 'u1'), ('blue', 'u1')])

print "Assembling initial PlyData instance..."
ply0 = PlyData([
    PlyElement.describe(vertex, 'vertex', comments=['tetrahedron vertices']),
    PlyElement.describe(face, 'face')
],
               text=True,
               comments=['single tetrahedron with colored faces'])

print "Writing test0.ply (ascii)..."
ply0.write('test0.ply')

print "Reading test0.ply..."
ply1 = PlyData.read('test0.ply')

print "(verifying result...)"
verify(ply0, ply1)

print "Writing test1.ply (binary_little_endian)..."
ply1.text = False
ply1.byte_order = '<'
Example #44
0
def test_header_parse_error(s, line):
    with Raises(PlyHeaderParseError) as e:
        PlyData.read(BytesIO(s))
    assert e.exc_val.line == line
Example #45
0
def read_ply(filename):
    """ read XYZ point cloud from filename PLY file """
    plydata = PlyData.read(filename)
    pc = plydata['vertex'].data
    pc_array = np.array([[x, y, z] for x,y,z in pc])
    return pc_array
Example #46
0
def make_ply_string(dest_path, indices, vertices, rgba_color):
    """
    Creates a ply str that can be included into a .k.zip for rendering
    in KNOSSOS.
    # TODO: write out normals

    Parameters
    ----------
    indices : np.array
    vertices : np.array
    rgba_color : Tuple[uint8] or np.array

    Returns
    -------
    str
    """
    # create header
    vertices = vertices.astype(np.float32)
    indices = indices.astype(np.int32)
    if not indices.ndim == 2:
        indices = np.array(indices, dtype=np.int).reshape((-1, 3))
    if not vertices.ndim == 2:
        vertices = np.array(vertices, dtype=np.float32).reshape((-1, 3))
    if len(rgba_color) != len(vertices) and len(rgba_color) == 4:
        # TODO: create per tree color instead of per vertex color
        rgba_color = np.array([rgba_color for i in range(len(vertices))],
                              dtype=np.uint8)
    else:
        if not (len(rgba_color) == len(vertices) and len(rgba_color[0]) == 4):
            msg = 'Color array has to be RGBA and to provide a color value f' \
                  'or every vertex!'
            log_proc.error(msg)
            raise ValueError(msg)
    if not rgba_color.ndim == 2:
        rgba_color = np.array(rgba_color, dtype=np.int).reshape((-1, 4))
    if type(rgba_color) is list:
        rgba_color = np.array(rgba_color, dtype=np.uint8)
        log_proc.warn("Color input is list. It will now be converted "
                      "automatically, data will be unusable if not normalized"
                      " between 0 and 255. min/max of data:"
                      " {}, {}".format(rgba_color.min(), rgba_color.max()))
    elif not np.issubdtype(rgba_color.dtype, np.uint8):
        log_proc.warn("Color array is not of type integer or unsigned integer."
                      " It will now be converted automatically, data will be "
                      "unusable if not normalized between 0 and 255."
                      "min/max of data: {}, {}".format(rgba_color.min(),
                                                       rgba_color.max()))
        rgba_color = np.array(rgba_color, dtype=np.uint8)
    # ply file requires 1D object arrays,
    vertices = np.concatenate(
        [vertices.astype(np.object),
         rgba_color.astype(np.object)], axis=1)
    vertices = np.array([tuple(el) for el in vertices],
                        dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4'),
                               ('red', 'u1'), ('green', 'u1'), ('blue', 'u1'),
                               ('alpha', 'u1')])
    # ply file requires 1D object arrays.
    indices = np.array([tuple([el], ) for el in indices],
                       dtype=[('vertex_indices', 'i4', (3, ))])
    PlyData([
        PlyElement.describe(vertices, 'vertex'),
        PlyElement.describe(indices, 'face')
    ]).write(dest_path)
Example #47
0
        for m in image_metrics_network:
            m.update_state(gt_probs, pred_probs)

        q.put((tf.transpose(primitive_indices, (1, 0)).numpy(),
               tf.transpose(pred_probs,
                            (1, 0, 2)).numpy(), index, color, gt_probs))
    stream_out.stop()
    print("Computing primitive annotations...")
    pred_primitive_annotations = tf.convert_to_tensor(aggregator.get())
    pred_primitive_dontcare = tf.reduce_sum(pred_primitive_annotations,
                                            axis=-1) < dont_care_threshold

    # Evaluate vertex metrics
    if args.mode == "triangles" and args.mesh == "scannet" and float(
            args.simplify) == 1.0:
        gt_mesh = PlyData.read(
            os.path.join(scene, name + "_vh_clean_2.labels.ply"))

        # Load mapping from vertex to faces
        print("Creating map between faces and vertices...")
        keys = list(gt_mesh["face"].data.dtype.fields.keys())
        if len(keys) != 1:
            print(f"Invalid mesh file: {input_mesh_file}")
            sys.exit(-1)
        face_to_vertices = np.asarray(
            [np.asarray(d) for d in gt_mesh["face"].data[keys[0]]])
        vertex_to_faces_dict = defaultdict(set)
        for face, vertices in enumerate(face_to_vertices):
            for vertex in vertices:
                vertex_to_faces_dict[vertex].add(face)
        max_len = max([len(faces) for faces in vertex_to_faces_dict.values()])
        vertex_to_faces = []
Example #48
0
def load_ply_normal(filename, point_num):
    plydata = PlyData.read(filename)
    pc = plydata['normal'].data[:point_num]
    pc_array = np.array([[x, y, z] for x, y, z in pc])
    return pc_array
Example #49
0
    with open(ARGS_.scene_json) as f:
        #skeleton = json.load(f)["frames"][60]["skeletons"][0]["bones"]
        skeleton = json.load(f)["skeletons"][0]["pose_reference"]

    pose_ref = None
    #with open(ARGS_.pose_reference) as f:
    #  pose_reference = json.load(f)
    pose_ref = pose_reference(skeleton, bones_names)

    output_error = None
    with open(ARGS_.output_error) as f:
        output_error = json.load(f)

    cloud = None
    with open(ARGS_.pointcloud, 'rb') as f:
        cloud = PlyData.read(f)
        cloud = [
            cloud['vertex']['x'], cloud['vertex']['y'], cloud['vertex']['z']
        ]
        #np.vstack((cloud['vertex']['x'],
        #  cloud['vertex']['y'],
        #  cloud['vertex']['z']))

    #pose = rebuild_pose_from_quaternion(pose_ref, output_error['output'])
    pose = rebuild_pose_from_quaternion3(pose_ref,
                                         output_error['output_ground_truth'])

    print(len(pose))

    plot_pose(pose, cloud, title="Pose")
    plt.show()
import sys
import os
sys.path.insert(0, './src/core')
from numpy.random import uniform, seed
from matplotlib.mlab import griddata
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
from pylab import *
import numpy as np
from plyfile import PlyData, PlyElement
import scipy

matplotlib.style.use('classic') #in matplotlib >= 1.5.1

plydata = PlyData.read(open(os.path.expanduser(decos_dem_file)))

vertex = plydata['vertex'].data

[px, py, pz] = (vertex[t] for t in ('x', 'y', 'z'))

# define grid.
npts=500
xi = np.linspace(min(px), max(px), npts)
yi = np.linspace(min(py), max(py), npts)

# grid the data.
zi = griddata(px, py, pz, xi, yi, interp='linear')

############
### PLOT ###
from mpl_toolkits.mplot3d import Axes3D
from plyfile import PlyData, PlyElement


# Every 100 data samples, we save 1. If things run too
# slow, try increasing this number. If things run too fast,
# try decreasing it... =)
reduce_factor = 10


# Look pretty...
matplotlib.style.use('ggplot')


# Load up the scanned armadillo
plyfile = PlyData.read('Datasets/stanford_armadillo.ply')
armadillo = pd.DataFrame({
  'x':plyfile['vertex']['z'][::reduce_factor],
  'y':plyfile['vertex']['x'][::reduce_factor],
  'z':plyfile['vertex']['y'][::reduce_factor]
})



def do_PCA(armadillo):
  #
  # TODO: Write code to import the libraries required for PCA.
  # Then, train your PCA on the armadillo dataframe. Finally,
  # drop one dimension (reduce it down to 2D) and project the
  # armadillo down to the 2D principal component feature space.
  #
Example #52
0
def get_point_cloud(point_cloud_id, number_points=None, history=None):
  from voxel_globe.meta import models
  from vpgl_adaptor import convert_local_to_global_coordinates_array, create_lvcs
  import os
  import numpy as np
  from plyfile import PlyData

  point_cloud = models.PointCloud.objects.get(id=point_cloud_id).history(history)

  lvcs = create_lvcs(point_cloud.origin[1], point_cloud.origin[0], point_cloud.origin[2], 'wgs84')

  ply = PlyData.read(str(os.path.join(point_cloud.directory, 'error.ply')))
  data = ply.elements[0].data

  if number_points:
    try:
      import heapq
      data = np.array(heapq.nlargest(number_points, ply.elements[0].data, 
                                     key=lambda x:x['prob']))
    except IndexError: #not a correctly formated ply file. HACK A CODE!
      #This is a hack-a-code for Tom's ply file
      data = ply.elements[0].data.astype([('x', '<f4'), ('y', '<f4'), 
          ('z', '<f4'), ('red', 'u1'), ('green', 'u1'), ('blue', 'u1'), 
          ('prob', '<f4')])
      import copy
      blah = copy.deepcopy(data['y'])
      data['y'] = data['z']
      data['z'] = -blah
      blah = copy.deepcopy(data['blue'])
      data['blue'] = data['green']
      data['green'] = blah

      data['prob'] = abs(data['x'] - 10 - sum(data['x'])/len(data['x'])) \
                   + abs(data['y'] + 30 - sum(data['y'])/len(data['y'])) \
                   + abs(data['z'] - sum(data['z'])/len(data['z']))
      data['prob'] = max(data['prob']) - data['prob']

      data = np.array(heapq.nlargest(number_points, data, 
                                     key=lambda x:x['prob']))
      print data['prob']



  
  lla = convert_local_to_global_coordinates_array(lvcs, data['x'].tolist(), data['y'].tolist(), data['z'].tolist());

  latitude = np.array(lla[0])
  longitude = np.array(lla[1])
  altitude = np.array(lla[2])
  color = map(lambda r,b,g:'#%02x%02x%02x' % (r, g, b), data['red'], data['green'], data['blue'])

  return_data = {"latitude": latitude, "longitude": longitude,
                 "altitude": altitude, "color": color}

  try:
    return_data['le'] = data['le']
  except ValueError:
    return_data['le'] = (-np.ones(len(latitude))).tolist()
  try:
    return_data['ce'] = data['ce']
  except ValueError:
    return_data['ce'] = (-np.ones(len(latitude))).tolist()

  return return_data
Example #53
0
def export_ply(pc, filename):
    vertex = np.zeros(pc.shape[0], dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
    for i in range(pc.shape[0]):
        vertex[i] = (pc[i][0], pc[i][1], pc[i][2])
    ply_out = PlyData([PlyElement.describe(vertex, 'vertex', comments=['vertices'])])
    ply_out.write(filename)
Example #54
0
import numpy as np
from numpy.linalg import norm
from plyfile import PlyData, make2d
import zipfile

filename = PINS.filename.get()

if filename.endswith(".zip"):
    zipf = zipfile.ZipFile(filename)
    assert len(zipf.namelist()) == 1
    zply = zipf.open(zipf.namelist()[0])
    plydata = PlyData.read(zply)
else:
    plydata = PlyData.read(filename)
elements = {e.name: e for e in plydata.elements}
vertices = elements["vertex"].data
faces = elements["face"].data

#faces => indices + edges
indices = make2d(faces['vertex_indices']).astype(np.uint16)
assert indices.shape[1] == 3  #triangles only
edges = set()
for triangle in indices:
    for p1, p2 in ((0, 1), (1, 2), (2, 0)):
        i1, i2 = triangle[p1], triangle[p2]
        if i2 < i1: i1, i2 = i2, i1
        edges.add((i1, i2))
edges = np.array(list(edges), dtype=np.uint16)

#vertices => coordinates + normals
for n in range(len(vertices.dtype)):
Example #55
0
            elev = read_view_angle(view_file, i) * 40.
            azim = read_view_angle(view_file, i, elev=False) * 180.
            elev = elev[0][0]
            #if elev < 0:
            #    elev += 360.
            azim = azim[0][0]
            #if azim < 0:
            #    azim += 360.
            i = 0
            t += 1
            for path in paths:
                target_path = os.path.join(PLOTS_PATH, tag + "-" + str(i) + ".png")
                ply_file = os.path.join(path, tag + ".ply")
                i += 1

                ply_data = PlyData.read(ply_file)
                points = ply_data['vertex']
                pc = np.vstack([points['x'], points['y'], points['z']]).T
                fig = plot_3d_point_cloud(pc[:, 0], 
                                          pc[:, 1], 
                                          pc[:, 2],
                                          show=False,
                                          elev=elev,
                                          azim=azim,
                                          colorize='rainbow',
                                          filename=target_path)
                #fig.close('all')
                image = np.array(Image.open(target_path)) / 255.0
                images.append(image)
            print(str(t), view_file, ply_file, "-->", target_path, elev, azim)
            plot_images(1, 5, images, tag + ".png", dir_name="point_clouds")
Example #56
0
def filter_depth(scan_folder, out_folder, plyfilename):
    # the pair file
    pair_file = os.path.join(scan_folder, "pair.txt")
    # for the final point cloud
    vertexs = []
    vertex_colors = []

    pair_data = read_pair_file(pair_file)
    score_data = read_score_file(pair_file)

    nviews = len(pair_data)
    # TODO: hardcode size
    # used_mask = [np.zeros([296, 400], dtype=np.bool) for _ in range(nviews)]

    # for each reference view and the corresponding source views
    ct2 = -1

    for ref_view, src_views in pair_data:

        ct2 += 1

        # load the camera parameters
        ref_intrinsics, ref_extrinsics = read_camera_parameters(
            os.path.join(scan_folder, 'cams/{:0>8}.txt'.format(ref_view)))
        # load the reference image
        ref_img = read_img(
            os.path.join(scan_folder, 'images/{:0>8}.jpg'.format(ref_view)))

        # load the estimated depth of the reference view
        ref_depth_est = read_pfm(
            os.path.join(out_folder,
                         'depth_est_0/{:0>8}.pfm'.format(ref_view)))[0]

        import cv2

        #ref_img=cv2.pyrDown(ref_img)

        #ref_depth_est=cv2.pyrUp(ref_depth_est)
        #ref_depth_est=cv2.pyrDown(ref_depth_est)

        # load the photometric mask of the reference view
        confidence = read_pfm(
            os.path.join(out_folder,
                         'confidence_0/{:0>8}.pfm'.format(ref_view)))[0]

        #confidence=cv2.pyrUp(confidence)
        #confidence=cv2.pyrDown(confidence)

        #ref_img=cv2.pyrDown(ref_img)
        #ref_depth_est=cv2.resize(ref_depth_est,(ref_img.shape[1],ref_img.shape[0]))
        #confidence=cv2.resize(confidence,(ref_img.shape[1],ref_img.shape[0]))

        photo_mask = confidence > 0.3

        # photo_mask = confidence>=0

        # photo_mask = confidence > confidence.mean()

        # ref_depth_est=ref_depth_est * photo_mask

        all_srcview_depth_ests = []
        all_srcview_x = []
        all_srcview_y = []
        all_srcview_geomask = []
        # compute the geometric mask
        geo_mask_sum = 0
        geo_mask_sums = []
        ct = 0
        for src_view in src_views:
            ct = ct + 1
            # camera parameters of the source view
            src_intrinsics, src_extrinsics = read_camera_parameters(
                os.path.join(scan_folder, 'cams/{:0>8}.txt'.format(src_view)))
            # the estimated depth of the source view
            src_depth_est = read_pfm(
                os.path.join(out_folder,
                             'depth_est_0/{:0>8}.pfm'.format(src_view)))[0]
            #src_depth_est=cv2.resize(src_depth_est,(ref_img.shape[1],ref_img.shape[0]))
            #src_depth_est=cv2.pyrUp(src_depth_est)
            #src_depth_est=cv2.pyrDown(src_depth_est)

            src_confidence = read_pfm(
                os.path.join(out_folder,
                             'confidence_0/{:0>8}.pfm'.format(src_view)))[0]

            # src_mask=src_confidence>0.1
            # src_mask=src_confidence>src_confidence.mean()

            # src_depth_est=src_depth_est*src_mask

            masks, geo_mask, depth_reprojected, x2d_src, y2d_src = check_geometric_consistency(
                ref_depth_est, ref_intrinsics, ref_extrinsics, src_depth_est,
                src_intrinsics, src_extrinsics)

            if (ct == 1):
                for i in range(2, 11):
                    geo_mask_sums.append(masks[i - 2].astype(np.int32))
            else:
                for i in range(2, 11):
                    geo_mask_sums[i - 2] += masks[i - 2].astype(np.int32)

            geo_mask_sum += geo_mask.astype(np.int32)

            all_srcview_depth_ests.append(depth_reprojected)

            # all_srcview_x.append(x2d_src)
            # all_srcview_y.append(y2d_src)
            # all_srcview_geomask.append(geo_mask)

        geo_mask = geo_mask_sum >= 10

        for i in range(2, 11):
            geo_mask = np.logical_or(geo_mask, geo_mask_sums[i - 2] >= i)

        depth_est_averaged = (sum(all_srcview_depth_ests) +
                              ref_depth_est) / (geo_mask_sum + 1)

        if (not isinstance(geo_mask, bool)):

            final_mask = np.logical_and(photo_mask, geo_mask)

            os.makedirs(os.path.join(out_folder, "mask"), exist_ok=True)

            save_mask(
                os.path.join(out_folder,
                             "mask/{:0>8}_photo.png".format(ref_view)),
                photo_mask)
            save_mask(
                os.path.join(out_folder,
                             "mask/{:0>8}_geo.png".format(ref_view)), geo_mask)
            save_mask(
                os.path.join(out_folder,
                             "mask/{:0>8}_final.png".format(ref_view)),
                final_mask)

            print(
                "processing {}, ref-view{:0>2}, photo/geo/final-mask:{}/{}/{}".
                format(scan_folder, ref_view, photo_mask.mean(),
                       geo_mask.mean(), final_mask.mean()))

            if args.display:
                import cv2
                cv2.imshow('ref_img', ref_img[:, :, ::-1])
                cv2.imshow('ref_depth', ref_depth_est / 800)
                cv2.imshow('ref_depth * photo_mask',
                           ref_depth_est * photo_mask.astype(np.float32) / 800)
                cv2.imshow('ref_depth * geo_mask',
                           ref_depth_est * geo_mask.astype(np.float32) / 800)
                cv2.imshow('ref_depth * mask',
                           ref_depth_est * final_mask.astype(np.float32) / 800)
                cv2.waitKey(0)

            height, width = depth_est_averaged.shape[:2]
            x, y = np.meshgrid(np.arange(0, width), np.arange(0, height))
            # valid_points = np.logical_and(final_mask, ~used_mask[ref_view])
            valid_points = final_mask
            print("valid_points", valid_points.mean())
            x, y, depth = x[valid_points], y[valid_points], depth_est_averaged[
                valid_points]
            color = ref_img[:, :, :][valid_points]  # hardcoded for DTU dataset
            xyz_ref = np.matmul(np.linalg.inv(ref_intrinsics),
                                np.vstack((x, y, np.ones_like(x))) * depth)
            xyz_world = np.matmul(np.linalg.inv(ref_extrinsics),
                                  np.vstack((xyz_ref, np.ones_like(x))))[:3]
            vertexs.append(xyz_world.transpose((1, 0)))
            vertex_colors.append((color * 255).astype(np.uint8))

            # # set used_mask[ref_view]
            # used_mask[ref_view][...] = True
            # for idx, src_view in enumerate(src_views):
            #     src_mask = np.logical_and(final_mask, all_srcview_geomask[idx])
            #     src_y = all_srcview_y[idx].astype(np.int)
            #     src_x = all_srcview_x[idx].astype(np.int)
            #     used_mask[src_view][src_y[src_mask], src_x[src_mask]] = True

    vertexs = np.concatenate(vertexs, axis=0)
    vertex_colors = np.concatenate(vertex_colors, axis=0)
    vertexs = np.array([tuple(v) for v in vertexs],
                       dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
    vertex_colors = np.array([tuple(v) for v in vertex_colors],
                             dtype=[('red', 'u1'), ('green', 'u1'),
                                    ('blue', 'u1')])

    vertex_all = np.empty(len(vertexs),
                          vertexs.dtype.descr + vertex_colors.dtype.descr)
    for prop in vertexs.dtype.names:
        vertex_all[prop] = vertexs[prop]
    for prop in vertex_colors.dtype.names:
        vertex_all[prop] = vertex_colors[prop]

    el = PlyElement.describe(vertex_all, 'vertex')
    PlyData([el]).write(plyfilename)
    print("saving the final model to", plyfilename)
Example #57
0
def outputPly(data, name):
    d2 = np.empty(data.shape[0], dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
    for i in range(data.shape[0]):
        d2[i] = tuple(data[i])
    el = PlyElement.describe(d2, 'vertex')
    PlyData([el]).write(name + '.ply')
Example #58
0
	def __init__(self, model_path, K, H, W, backend="pyglet"):
		app.use_app(backend)
		app.Canvas.__init__(self, show=False, size=(W,H))
		self.size = (W,H)  #(W,H)
		self.K = K.copy()

		fragment_code = """
			#version 130

			varying vec3 fragpos;
			varying vec3 normal;

			varying vec3 fragmentColor;
			out vec4 color;

			varying vec3 viewPos;
			uniform vec3 light_direction;

			void main()
			{
						vec3 norm = normal;
						vec3 lightDirA = normalize(-light_direction - fragpos);
						vec3 diffuseA = vec3(0.4, 0.4, 0.4) * max(dot(norm, lightDirA), 0.0);
						vec4 colors = vec4(fragmentColor, 1);
						vec3 light_3 = diffuseA + vec3(0.65, 0.65, 0.65);
						vec4 light = vec4(light_3, 1.0f);
						color = clamp(light * colors, 0.0, 1.0);
			}
			"""

		vertex_code = """
			#version 130

			attribute vec3 a_position;
			attribute vec3 a_color;
			attribute vec3 a_normal;

			varying vec3 fragmentColor;
			varying vec3 normal;
			varying vec3 fragpos;
			varying vec3 viewPos;

			uniform mat4 view;
			uniform mat4 proj;

			void main()
			{
				fragmentColor = a_color;
				gl_Position = proj * view * vec4(a_position, 1.0);
				fragpos = a_position;
				viewPos = vec3(view);
				normal = a_normal;
			}
			"""


		print('model_path: ',model_path)
		if '.ply' in model_path:
			ply = PlyData.read(model_path)
			vertices = np.stack((ply['vertex']['x'].reshape(-1), ply['vertex']['y'].reshape(-1), ply['vertex']['z'].reshape(-1)), axis=-1)
			assert vertices.shape[1]==3
			face_indices = ply['face']['vertex_indices']
			face_indices = np.stack(face_indices, axis=0)
			assert face_indices.shape[1]==3
			vertex_color = np.stack((ply['vertex']['red'].reshape(-1), ply['vertex']['green'].reshape(-1), ply['vertex']['blue'].reshape(-1)), axis=-1)
			vertex_normal = np.stack((ply['vertex']['nx'].reshape(-1), ply['vertex']['ny'].reshape(-1), ply['vertex']['nz'].reshape(-1)), axis=-1)
			vertex_normal = vertex_normal/np.linalg.norm(vertex_normal, axis=1).reshape(-1,1)
		else:
			print('vispy model_path has to be a ply file')
			raise RuntimeError

		self.data = np.ones(vertices.shape[0], [('a_position', np.float32, 3),('a_color', np.float32, 3),('a_normal', np.float32, 3)])
		self.data['a_position'] = vertices.copy()
		self.data['a_color'] = vertex_color/255.0
		self.data['a_normal'] = vertex_normal.copy()

		self.vertex_buffer = gloo.VertexBuffer(self.data)
		self.index_buffer = gloo.IndexBuffer(face_indices.reshape(-1).astype(np.uint32))

		self.program = gloo.Program(vertex_code, fragment_code)
		self.program.bind(self.vertex_buffer)
		self.update_cam_mat(self.K, 0, self.size[0], self.size[1], 0)

		self.fbo = gloo.FrameBuffer(gloo.Texture2D(shape=(self.size[1],self.size[0],3)), gloo.RenderBuffer(self.size[::-1]))
		self.rgb = None
		self.depth = None
Example #59
0
parser = argparse.ArgumentParser(description='Modifies ply file to be used for rendering.\n Example: ./plyScale.py -f input.ply -o out.ply -r 210 -cutX 10.0')
parser.add_argument('-f','--inputFile', help='Input file name', required=True)
parser.add_argument('-o','--outputFile', help='Output file name', required=True)
parser.add_argument('--lx', help='Desired size of bounding box (X axis)', required=False, default="0")
parser.add_argument('--ly', help='Desired size of bounding box (Y axis)', required=False, default="0")
parser.add_argument('--lz', help='Desired size of bounding box (Z axis)', required=False, default="0")
parser.add_argument('-r','--order', help='Reorder axis. By default 012, to swap x and z use 210', required=False, default="012")
helpStringForCut = 'Remove all the faces which are above specified value for %s. The origin is in the center of mass. If axis reordering was applied, axis are in new coordinates.'
parser.add_argument('--cutX', help=helpStringForCut%('X'), required=False, default="none")
parser.add_argument('--cutY', help=helpStringForCut%('Y'), required=False, default="none")
parser.add_argument('--cutZ', help=helpStringForCut%('Z'), required=False, default="none")
args = vars(parser.parse_args())

desiredBox = [float(args['lx']), float(args['ly']), float(args['lz'])]

plydata = PlyData.read(args['inputFile'])
vertices = plydata['vertex'].data

# swap coords
order = args['order']
if (order != "012"):
    idx =  [int(order[i]) for i in range(0, len(order))]
    assert(len(idx) == 3)
    print "Swapping axis!"
    for i in range(0, len(vertices)):
        v = copy.deepcopy(vertices[i])
        for dim in range(0, 3):
            vertices[i][dim] = v[ idx[dim] ]


# Current box
Example #60
0
def filter_depth(dataset_root, scan, out_folder, plyfilename):

    print("Starting fusion for:"+out_folder)

    # the pair file
    pair_file = os.path.join(dataset_root,'Cameras/pair.txt')
    # for the final point cloud
    vertexs = []
    vertex_colors = []

    pair_data = read_pair_file(pair_file)
    nviews = len(pair_data)

    # for each reference view and the corresponding source views
    for ref_view, src_views in pair_data:
        # load the camera parameters
        ref_intrinsics, ref_extrinsics = read_camera_parameters(
            os.path.join(dataset_root, 'Cameras/{:0>8}_cam.txt'.format(ref_view)))

        # load the reference image
        ref_img = read_img(os.path.join(dataset_root, "Rectified",scan, 'rect_{:03d}_3_r5000.png'.format(ref_view+1))) # Image start from 1.
        # load the estimated depth of the reference view
        ref_depth_est, scale = read_pfm(os.path.join(out_folder, 'depth_est/{:0>8}.pfm'.format(ref_view)))
        # load the photometric mask of the reference view
        confidence, scale = read_pfm(os.path.join(out_folder, 'confidence/{:0>8}.pfm'.format(ref_view)))
        photo_mask = confidence > 0.9

        all_srcview_depth_ests = []
        all_srcview_x = []
        all_srcview_y = []
        all_srcview_geomask = []

        # compute the geometric mask
        geo_mask_sum = 0
        for src_view in src_views:
            # camera parameters of the source view
            src_intrinsics, src_extrinsics = read_camera_parameters(
                os.path.join(dataset_root, 'Cameras/{:0>8}_cam.txt'.format(src_view)))

            # the estimated depth of the source view
            src_depth_est, scale = read_pfm(os.path.join(out_folder, 'depth_est/{:0>8}.pfm'.format(src_view)))

            geo_mask, depth_reprojected, x2d_src, y2d_src = check_geometric_consistency(ref_depth_est, ref_intrinsics, ref_extrinsics,
                                                                      src_depth_est,
                                                                      src_intrinsics, src_extrinsics)
            
            geo_mask_sum += geo_mask.astype(np.int32)
            all_srcview_depth_ests.append(depth_reprojected)
            all_srcview_x.append(x2d_src)
            all_srcview_y.append(y2d_src)
            all_srcview_geomask.append(geo_mask)

        depth_est_averaged = (sum(all_srcview_depth_ests) + ref_depth_est) / (geo_mask_sum + 1)
        # at least 3 source views matched
        geo_mask = geo_mask_sum >= 3
        final_mask = np.logical_and(photo_mask, geo_mask)

        os.makedirs(os.path.join(out_folder, "mask"), exist_ok=True)
        save_mask(os.path.join(out_folder, "mask/{:0>8}_photo.png".format(ref_view)), photo_mask)
        save_mask(os.path.join(out_folder, "mask/{:0>8}_geo.png".format(ref_view)), geo_mask)
        save_mask(os.path.join(out_folder, "mask/{:0>8}_final.png".format(ref_view)), final_mask)

        print("processing {}, ref-view{:0>2}, photo/geo/final-mask:{}/{}/{}".format(scan, ref_view,
                                                                                    photo_mask.mean(),
                                                                                    geo_mask.mean(), final_mask.mean()))


        height, width = depth_est_averaged.shape[:2]
        x, y = np.meshgrid(np.arange(0, width), np.arange(0, height))
        # valid_points = np.logical_and(final_mask, ~used_mask[ref_view])
        valid_points = final_mask
        print("valid_points", valid_points.mean())
        x, y, depth = x[valid_points], y[valid_points], depth_est_averaged[valid_points]
        ref_img = np.array(ref_img)
 
        color = ref_img[valid_points]

        xyz_ref = np.matmul(np.linalg.inv(ref_intrinsics),
                            np.vstack((x, y, np.ones_like(x))) * depth)
        xyz_world = np.matmul(np.linalg.inv(ref_extrinsics),
                              np.vstack((xyz_ref, np.ones_like(x))))[:3]
        vertexs.append(xyz_world.transpose((1, 0)))
        vertex_colors.append((color).astype(np.uint8))

    vertexs = np.concatenate(vertexs, axis=0)
    vertex_colors = np.concatenate(vertex_colors, axis=0)
    vertexs = np.array([tuple(v) for v in vertexs], dtype=[('x', 'f4'), ('y', 'f4'), ('z', 'f4')])
    vertex_colors = np.array([tuple(v) for v in vertex_colors], dtype=[('red', 'u1'), ('green', 'u1'), ('blue', 'u1')])

    vertex_all = np.empty(len(vertexs), vertexs.dtype.descr + vertex_colors.dtype.descr)
    for prop in vertexs.dtype.names:
        vertex_all[prop] = vertexs[prop]
    for prop in vertex_colors.dtype.names:
        vertex_all[prop] = vertex_colors[prop]

    el = PlyElement.describe(vertex_all, 'vertex')
    print("Saving the final model to", plyfilename)
    PlyData([el], comments=['Model created by CVP-MVSNet.']).write(plyfilename)
    print("Model saved.")