Esempio n. 1
0
    def test_save_load_with_normals(self):
        points = torch.tensor([[0, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0]],
                              dtype=torch.float32)
        normals = torch.tensor([[0, 1, 0], [1, 0, 0], [1, 4, 1], [1, 0, 0]],
                               dtype=torch.float32)
        features = torch.rand_like(points)

        for do_features, do_normals in itertools.product([True, False],
                                                         [True, False]):
            cloud = Pointclouds(
                points=[points],
                features=[features] if do_features else None,
                normals=[normals] if do_normals else None,
            )
            device = torch.device("cuda:0")

            io = IO()
            with NamedTemporaryFile(mode="w", suffix=".ply") as f:
                io.save_pointcloud(cloud.cuda(), f.name)
                f.flush()
                cloud2 = io.load_pointcloud(f.name, device=device)
            self.assertEqual(cloud2.device, device)
            cloud2 = cloud2.cpu()
            self.assertClose(cloud2.points_padded(), cloud.points_padded())
            if do_normals:
                self.assertClose(cloud2.normals_padded(),
                                 cloud.normals_padded())
            else:
                self.assertIsNone(cloud.normals_padded())
                self.assertIsNone(cloud2.normals_padded())
            if do_features:
                self.assertClose(cloud2.features_packed(), features)
            else:
                self.assertIsNone(cloud2.features_packed())
Esempio n. 2
0
def eval_one_dir(exp_dir, n_pts=50000):
    """
    Function for one directory
    """
    device = torch.device('cuda:0')
    cfg = config.load_config(os.path.join(exp_dir, 'config.yaml'))
    dataset = config.create_dataset(cfg.data, mode='val')
    meshes_gt = dataset.get_meshes().to(device)
    val_gt_pts_file = os.path.join(cfg.data.data_dir, 'val%d.ply' % n_pts)
    if os.path.isfile(val_gt_pts_file):
        points, normals = np.split(read_ply(val_gt_pts_file), 2, axis=1)
        pcl_gt = Pointclouds(
            torch.from_numpy(points[None, ...]).float(),
            torch.from_numpy(normals[None, ...]).float()).to(device)
    else:
        pcl_gt = dataset.get_pointclouds(n_pts).to(device)
        trimesh.Trimesh(pcl_gt.points_packed().cpu().numpy(),
                        vertex_normals=pcl_gt.normals_packed().cpu().numpy(),
                        process=False).export(val_gt_pts_file,
                                              vertex_normal=True)

    # load vis directories
    vis_dir = os.path.join(exp_dir, 'vis')
    vis_files = sorted(get_filenames(vis_dir, '_mesh.ply'))
    iters = [int(os.path.basename(v).split('_')[0]) for v in vis_files]
    best_dict = defaultdict(lambda: float('inf'))
    vis_eval_csv = os.path.join(vis_dir, "evaluation_n%d.csv" % n_pts)
    if not os.path.isfile(vis_eval_csv):
        with open(os.path.join(vis_dir, "evaluation_n%d.csv" % n_pts),
                  "w") as f:
            fieldnames = ['mtime', 'it', 'chamfer_p', 'chamfer_n', 'pf_dist']
            writer = csv.DictWriter(f,
                                    fieldnames=fieldnames,
                                    restval="-",
                                    extrasaction="ignore")
            writer.writeheader()
            mtime0 = None
            for it, vis_file in zip(iters, vis_files):
                eval_dict = OrderedDict()
                mtime = os.path.getmtime(vis_file)
                if mtime0 is None:
                    mtime0 = mtime
                eval_dict['it'] = it
                eval_dict['mtime'] = mtime - mtime0
                val_pts_file = os.path.join(
                    vis_dir,
                    os.path.basename(vis_file).replace('_mesh',
                                                       '_val%d' % n_pts))
                if os.path.isfile(val_pts_file):
                    points, normals = np.split(read_ply(val_pts_file),
                                               2,
                                               axis=1)
                    points = torch.from_numpy(points).float().to(
                        device=device).view(1, -1, 3)
                    normals = torch.from_numpy(normals).float().to(
                        device=device).view(1, -1, 3)
                else:
                    mesh = trimesh.load(vis_file, process=False)
                    # points, normals = pcu.sample_mesh_poisson_disk(
                    #     mesh.vertices, mesh.faces,
                    #     mesh.vertex_normals.ravel().reshape(-1, 3), n_pts, use_geodesic_distance=True)
                    # p_idx = np.random.permutation(points.shape[0])[:n_pts]
                    # points = points[p_idx, ...]
                    # normals = normals[p_idx, ...]
                    # points = torch.from_numpy(points).float().to(
                    #     device=device).view(1, -1, 3)
                    # normals = torch.from_numpy(normals).float().to(
                    #     device=device).view(1, -1, 3)
                    meshes = Meshes(
                        torch.from_numpy(mesh.vertices[None, ...]).float(),
                        torch.from_numpy(mesh.faces[None,
                                                    ...]).float()).to(device)
                    points, normals = sample_points_from_meshes(
                        meshes, n_pts, return_normals=True)
                    trimesh.Trimesh(points.cpu().numpy()[0],
                                    vertex_normals=normals.cpu().numpy()[0],
                                    process=False).export(val_pts_file,
                                                          vertex_normal=True)
                pcl = Pointclouds(points, normals)
                chamfer_p, chamfer_n = chamfer_distance(
                    points,
                    pcl_gt.points_padded(),
                    x_normals=normals,
                    y_normals=pcl_gt.normals_padded(),
                )
                eval_dict['chamfer_p'] = chamfer_p.item()
                eval_dict['chamfer_n'] = chamfer_n.item()
                pf_dist = point_mesh_face_distance(meshes_gt, pcl)
                eval_dict['pf_dist'] = pf_dist.item()
                writer.writerow(eval_dict)
                for k, v in eval_dict.items():
                    if v < best_dict[k]:
                        best_dict[k] = v
                        print('best {} so far ({}): {:.4g}'.format(
                            k, vis_file, v))

    # generation dictories
    gen_dir = os.path.join(exp_dir, 'generation')
    if not os.path.isdir(gen_dir):
        return

    final_file = os.path.join(gen_dir, 'mesh.ply')
    val_pts_file = final_file[:-4] + '_val%d' % n_pts + '.ply'
    if not os.path.isfile(final_file):
        return

    gen_file_csv = os.path.join(gen_dir, "evaluation_n%d.csv" % n_pts)
    if not os.path.isfile(gen_file_csv):
        with open(os.path.join(gen_dir, "evaluation_n%d.csv" % n_pts),
                  "w") as f:
            fieldnames = ['chamfer_p', 'chamfer_n', 'pf_dist']
            writer = csv.DictWriter(f,
                                    fieldnames=fieldnames,
                                    restval="-",
                                    extrasaction="ignore")
            writer.writeheader()
            eval_dict = OrderedDict()
            mesh = trimesh.load(final_file)
            # points, normals = pcu.sample_mesh_poisson_disk(
            #     mesh.vertices, mesh.faces,
            #     mesh.vertex_normals.ravel().reshape(-1, 3), n_pts, use_geodesic_distance=True)
            # p_idx = np.random.permutation(points.shape[0])[:n_pts]
            # points = points[p_idx, ...]
            # normals = normals[p_idx, ...]
            # points = torch.from_numpy(points).float().to(
            #     device=device).view(1, -1, 3)
            # normals = torch.from_numpy(normals).float().to(
            #     device=device).view(1, -1, 3)
            meshes = Meshes(
                torch.from_numpy(mesh.vertices[None, ...]).float(),
                torch.from_numpy(mesh.faces[None, ...]).float()).to(device)
            points, normals = sample_points_from_meshes(meshes,
                                                        n_pts,
                                                        return_normals=True)
            trimesh.Trimesh(points.cpu().numpy()[0],
                            vertex_normals=normals.cpu().numpy()[0],
                            process=False).export(val_pts_file,
                                                  vertex_normal=True)
            pcl = Pointclouds(points, normals)
            chamfer_p, chamfer_n = chamfer_distance(
                points,
                pcl_gt.points_padded(),
                x_normals=normals,
                y_normals=pcl_gt.normals_padded(),
            )
            eval_dict['chamfer_p'] = chamfer_p.item()
            eval_dict['chamfer_n'] = chamfer_n.item()
            pf_dist = point_mesh_face_distance(meshes_gt, pcl)
            eval_dict['pf_dist'] = pf_dist.item()
            writer.writerow(eval_dict)
            for k, v in eval_dict.items():
                if v < best_dict[k]:
                    best_dict[k] = v
                    print('best {} so far ({}): {:.4g}'.format(
                        k, final_file, v))