Ejemplo n.º 1
0
def full_eval(opt):

    indir_root = opt.indir
    outdir_root = os.path.join(opt.outdir, opt.models+os.path.splitext(opt.modelpostfix)[0])
    datasets = opt.dataset
    if not isinstance(datasets, list):
        datasets = [datasets]
    for dataset in datasets:
        print(f'Evaluating on dataset {dataset}')
        opt.indir = os.path.join(indir_root, os.path.dirname(dataset))
        opt.outdir = os.path.join(outdir_root, os.path.dirname(dataset))
        opt.dataset = os.path.basename(dataset)

        # evaluate
        if os.path.exists(os.path.join(opt.indir, '05_query_dist')):
            opt.reconstruction = False
            points_to_surf_eval.eval_meshnet(opt)

            res_dir_eval = os.path.join(opt.outdir, 'eval')

            evaluation.eval_predictions(
                os.path.join(res_dir_eval, 'eval'),
                os.path.join(opt.indir, '05_query_dist'),
                os.path.join(res_dir_eval, 'rme_comp_res.csv'),
                unsigned=False)

        # reconstruct
        start = time.time()
        opt.reconstruction = True
        points_to_surf_eval.eval_meshnet(opt)
        res_dir_rec = os.path.join(opt.outdir, 'rec')
        end = time.time()
        print('Inference of SDF took: {}'.format(end - start))

        start = time.time()
        imp_surf_dist_ms_dir = os.path.join(res_dir_rec, 'dist_ms')
        query_pts_ms_dir = os.path.join(res_dir_rec, 'query_pts_ms')
        vol_out_dir = os.path.join(res_dir_rec, 'vol')
        mesh_out_dir = os.path.join(res_dir_rec, 'mesh')
        sdf.implicit_surface_to_mesh_directory(
            imp_surf_dist_ms_dir, query_pts_ms_dir,
            vol_out_dir, mesh_out_dir,
            opt.query_grid_resolution,
            opt.sigma,
            opt.certainty_threshold,
            opt.workers)
        end = time.time()
        print('Sign propagation took: {}'.format(end - start))

        new_meshes_dir_abs = os.path.join(res_dir_rec, 'mesh')
        ref_meshes_dir_abs = os.path.join(opt.indir, '03_meshes')
        csv_file = os.path.join(res_dir_rec, 'hausdorff_dist_pred_rec.csv')
        evaluation.mesh_comparison(
            new_meshes_dir_abs=new_meshes_dir_abs,
            ref_meshes_dir_abs=ref_meshes_dir_abs,
            num_processes=opt.workers,
            report_name=csv_file,
            samples_per_model=10000,
            dataset_file_abs=os.path.join(opt.indir, opt.dataset))
Ejemplo n.º 2
0
        #        reconstructed_meshes_raw = '/home/perler/repos/AtlasNet/log/atlasnet_ae_imp_surf_14_25_squares/' + dataset + '/'
        #        reconstructed_meshes = '/home/perler/repos/AtlasNet/log/atlasnet_ae_imp_surf_14_25_squares/' + dataset + '/ply/'
        #        report_file = '/home/perler/repos/AtlasNet/log/atlasnet_ae_imp_surf_14_25_squares/' + dataset + '.csv'
        #
        #        # for AtlasNet
        #        revert_atlasnet_transform(
        #            in_dir_abs=reconstructed_meshes_raw,
        #            out_dir_abs=reconstructed_meshes,
        #            ref_meshes_dir_abs=original_meshes,
        #            #ref_meshes_dir_abs=original_pc,  # for real-world, no mesh exists
        #            num_processes=num_processes)
        #
        #        utils_eval.mesh_comparison(
        #            new_meshes_dir_abs=reconstructed_meshes, ref_meshes_dir_abs=original_meshes,
        #            num_processes=num_processes, report_name=report_file,
        #            samples_per_model=10000, dataset_file_abs=val_set_file_abs)

        # DeepSDF train imp_surf_14, val imp_surf_14
        original_meshes = '/data/datasets/own/' + dataset + '/03_meshes'
        val_set_file_abs = '/data/datasets/own/' + dataset + '/testset.txt'
        reconstructed_meshes = '/home/perler/repos/DeepSDF/examples/' + dataset + '/Reconstructions/1000/Meshes/' + dataset + '/03_meshes/'
        report_file = '/home/perler/repos/DeepSDF/examples/' + dataset + '/Reconstructions/1000/Meshes/' + dataset + '/imp_surf_14_comp.csv'

        print('### chamfer distance: {}'.format(dataset))
        evaluation.mesh_comparison(new_meshes_dir_abs=reconstructed_meshes,
                                   ref_meshes_dir_abs=original_meshes,
                                   num_processes=num_processes,
                                   report_name=report_file,
                                   samples_per_model=10000,
                                   dataset_file_abs=val_set_file_abs)
Ejemplo n.º 3
0
def main(dataset_name: str):

    # meshlabserver = "C:\\Program Files\\VCG\\MeshLab\\meshlabserver.exe"
    meshlabserver = '/home/perler/repos/meshlab/src/distrib/meshlabserver'

    num_processes = 12
    # num_processes = 1
    # base_dir = '../../datasets'
    base_dir = '/data/datasets/own/'

    #dataset_dir = 'implicit_surf_8'
    #dataset_dir = 'implicit_surf_real_world'
    dataset_dir = dataset_name

    config_file = os.path.join(base_dir, dataset_dir, 'settings.ini')
    config = configparser.ConfigParser()
    read_config(config, config_file)
    print('Processing dataset: ' + config_file)

    do_clean = False
    filter_broken_inputs = True

    dirs_to_clean = \
        ['00_base_meshes',
         '01_base_meshes_ply',
         '02_meshes_cleaned',
         '03_meshes',
         '04_pts', '04_blensor_py',  # , '04_pcd' '04_pts_noisefree',
         '05_patch_dists', '05_patch_ids', '05_query_dist', '05_query_pts',
         '05_patch_ids_grid', '05_query_pts_grid', '05_query_dist_grid',
         '06_poisson_rec', '06_mc_gt_recon', '06_poisson_rec_gt_normals',
         '06_normals', '06_normals/pts', '06_dist_from_p_normals']
    dirs_to_clean_remainders = \
        ['00_base_meshes',
         '01_base_meshes_ply',
         '02_meshes_cleaned',
         '03_meshes',
         '04_pts', '04_blensor_py',  # '04_pcd', '04_pts_noisefree',
         '05_patch_dists', '05_patch_ids', '05_query_dist', '05_query_pts',
         '05_patch_ids_grid', '05_query_pts_grid', '05_query_dist_grid',
         '06_poisson_rec', '06_mc_gt_recon', '06_poisson_rec_gt_normals',
         '06_normals', '06_normals/pts', '06_dist_from_p_normals']

    # clean old dataset
    if do_clean:
        for dir in dirs_to_clean:
            shutil.rmtree(os.path.join(base_dir, dataset_dir, dir), True)

    if filter_broken_inputs:  # the user might have removed unwanted input meshes after some processing
        clean_up_broken_inputs(base_dir=base_dir,
                               dataset_dir=dataset_dir,
                               final_out_dir='00_base_meshes',
                               final_out_extension=None,
                               clean_up_dirs=dirs_to_clean_remainders,
                               broken_dir='broken')

    start = time.time()
    print('### reconstruct poisson with pcpnet normals')
    dirs = (
        os.path.join(base_dir, dataset_dir, '04_pts_vis'),
        os.path.join(base_dir, dataset_dir, '06_normals_pcpnet'),
    )
    endings_per_dir = (
        '.xyz',
        '.normals',
    )
    file_utils.concat_txt_dirs(ref_dir=os.path.join(base_dir, dataset_dir,
                                                    '06_normals_pcpnet'),
                               ref_ending='.normals',
                               dirs=dirs,
                               endings_per_dir=endings_per_dir,
                               out_dir=os.path.join(base_dir, dataset_dir,
                                                    '07_pts_normals_pcpnet'),
                               out_ending='.xyz')
    print('### poisson reconstruction from pcpnet normals')
    apply_meshlab_filter(base_dir=base_dir,
                         dataset_dir=dataset_dir,
                         pts_dir='07_pts_normals_pcpnet',
                         recon_mesh_dir='06_poisson_rec_pcpnet_normals',
                         num_processes=num_processes,
                         filter_file='poisson.mlx',
                         meshlabserver_bin=meshlabserver)
    end = time.time()
    print('SPSR with PCPNet normals took: {}'.format(end - start))
    print(
        '### normal estimation and poisson reconstruction pcpnet - hausdorff distance'
    )
    new_meshes_dir_abs = os.path.join(base_dir, dataset_dir,
                                      '06_poisson_rec_pcpnet_normals')
    ref_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '03_meshes')
    csv_file = os.path.join(base_dir, dataset_dir,
                            'comp_poisson_rec_pcpnet_normals.csv')
    val_set_file_abs = os.path.join(base_dir, dataset_dir, 'valset.txt')
    evaluation.mesh_comparison(new_meshes_dir_abs=new_meshes_dir_abs,
                               ref_meshes_dir_abs=ref_meshes_dir_abs,
                               num_processes=num_processes,
                               report_name=csv_file,
                               samples_per_model=10000,
                               dataset_file_abs=val_set_file_abs)

    print('### get ground truth normals for point cloud')
    utils.get_pts_normals(base_dir=base_dir,
                          dataset_dir=dataset_dir,
                          dir_in_pointcloud='04_pts',
                          dir_in_meshes='03_meshes',
                          dir_out_normals='06_normals',
                          samples_per_model=100000,
                          num_processes=num_processes)
    print('### poisson reconstruction from gt normals')
    apply_meshlab_filter(base_dir=base_dir,
                         dataset_dir=dataset_dir,
                         pts_dir='06_normals/pts',
                         recon_mesh_dir='06_poisson_rec_gt_normals',
                         num_processes=num_processes,
                         filter_file='../../poisson.mlx',
                         meshlabserver_bin=meshlabserver)
    print(
        '### normal estimation and poisson reconstruction gt normals - hausdorff distance'
    )
    new_meshes_dir_abs = os.path.join(base_dir, dataset_dir,
                                      '06_poisson_rec_gt_normals')
    ref_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '03_meshes')
    csv_file = os.path.join(base_dir, dataset_dir,
                            'comp_poisson_rec_gt_normals.csv')
    val_set_file_abs = os.path.join(base_dir, dataset_dir, 'valset.txt')
    evaluation.mesh_comparison(new_meshes_dir_abs=new_meshes_dir_abs,
                               ref_meshes_dir_abs=ref_meshes_dir_abs,
                               num_processes=num_processes,
                               report_name=csv_file,
                               samples_per_model=10000,
                               dataset_file_abs=val_set_file_abs)
Ejemplo n.º 4
0
            '--epsilon',
            str(rec_epsilon),
        ]
        recon_opt = points_to_surf_eval.parse_arguments(recon_params)
        points_to_surf_eval.points_to_surf_eval(recon_opt)

        # reconstruct meshes from predicted SDFs
        imp_surf_dist_ms_dir = os.path.join(res_dir_rec, 'dist_ms')
        query_pts_ms_dir = os.path.join(res_dir_rec, 'query_pts_ms')
        vol_out_dir = os.path.join(res_dir_rec, 'vol')
        mesh_out_dir = os.path.join(res_dir_rec, 'mesh')
        sdf.implicit_surface_to_mesh_directory(imp_surf_dist_ms_dir,
                                               query_pts_ms_dir, vol_out_dir,
                                               mesh_out_dir, grid_resolution,
                                               sigma, certainty_threshold,
                                               workers)

        # get Hausdorff distance for reconstructed meshes
        new_meshes_dir_abs = os.path.join(res_dir_rec, 'mesh')
        ref_meshes_dir_abs = os.path.join(in_dir_test, '03_meshes')
        csv_file = os.path.join(res_dir_rec, 'hausdorff_dist_pred_rec.csv')
        evaluation.mesh_comparison(new_meshes_dir_abs=new_meshes_dir_abs,
                                   ref_meshes_dir_abs=ref_meshes_dir_abs,
                                   num_processes=workers,
                                   report_name=csv_file,
                                   samples_per_model=10000,
                                   dataset_file_abs=os.path.join(
                                       in_dir_test, test_set))

    print('Points2Surf is finished!')
Ejemplo n.º 5
0
def main(dataset_name: str):

    # meshlabserver = "C:\\Program Files\\VCG\\MeshLab\\meshlabserver.exe"
    meshlabserver = '~/repos/meshlab/src/distrib/meshlabserver'

    num_processes = 12
    # num_processes = 1

    base_dir = 'datasets'
    dataset_dir = dataset_name

    print('Processing dataset: ' + dataset_name)

    filter_broken_inputs = True

    dirs_to_clean = \
        ['00_base_meshes',
         '01_base_meshes_ply',
         '02_meshes_cleaned',
         '03_meshes',
         '04_pts', '04_blensor_py',
         '05_patch_dists', '05_patch_ids', '05_query_dist', '05_query_pts',
         '05_patch_ids_grid', '05_query_pts_grid', '05_query_dist_grid',
         '06_poisson_rec', '06_mc_gt_recon', '06_poisson_rec_gt_normals',
         '06_normals', '06_normals/pts', '06_dist_from_p_normals']

    if filter_broken_inputs:  # the user might have removed unwanted input meshes after some processing
        clean_up_broken_inputs(base_dir=base_dir, dataset_dir=dataset_dir,
                               final_out_dir='00_base_meshes', final_out_extension=None,
                               clean_up_dirs=dirs_to_clean, broken_dir='broken')

    start = time.time()
    print('### reconstruct poisson with pcpnet normals')
    dirs = (os.path.join(base_dir, dataset_dir, '04_pts_vis'),
            os.path.join(base_dir, dataset_dir, '06_normals_pcpnet'),)
    endings_per_dir = ('.xyz', '.normals', )
    file_utils.concat_txt_dirs(
       ref_dir=os.path.join(base_dir, dataset_dir, '06_normals_pcpnet'), ref_ending='.normals',
       dirs=dirs, endings_per_dir=endings_per_dir,
       out_dir=os.path.join(base_dir, dataset_dir, '07_pts_normals_pcpnet'), out_ending='.xyz')
    print('### poisson reconstruction from pcpnet normals')
    apply_meshlab_filter(base_dir=base_dir, dataset_dir=dataset_dir, pts_dir='07_pts_normals_pcpnet',
                         recon_mesh_dir='06_poisson_rec_pcpnet_normals', num_processes=num_processes,
                         filter_file='poisson.mlx', meshlabserver_bin=meshlabserver)
    end = time.time()
    print('SPSR with PCPNet normals took: {}'.format(end - start))
    print('### normal estimation and poisson reconstruction pcpnet - hausdorff distance')
    new_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '06_poisson_rec_pcpnet_normals')
    ref_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '03_meshes')
    csv_file = os.path.join(base_dir, dataset_dir, 'comp_poisson_rec_pcpnet_normals.csv')
    val_set_file_abs = os.path.join(base_dir, dataset_dir, 'valset.txt')
    evaluation.mesh_comparison(new_meshes_dir_abs=new_meshes_dir_abs, ref_meshes_dir_abs=ref_meshes_dir_abs,
                               num_processes=num_processes, report_name=csv_file,
                               samples_per_model=10000, dataset_file_abs=val_set_file_abs)

    # this works only when GT meshes are available
    print('### get ground truth normals for point cloud')
    utils.get_pts_normals(base_dir=base_dir, dataset_dir=dataset_dir,
                          dir_in_pointcloud='04_pts', dir_in_meshes='03_meshes',
                          dir_out_normals='06_normals', samples_per_model=100000, num_processes=num_processes)
    print('### poisson reconstruction from gt normals')
    apply_meshlab_filter(base_dir=base_dir, dataset_dir=dataset_dir, pts_dir='06_normals/pts',
                         recon_mesh_dir='06_poisson_rec_gt_normals', num_processes=num_processes,
                         filter_file='poisson.mlx', meshlabserver_bin=meshlabserver)
    print('### normal estimation and poisson reconstruction gt normals - hausdorff distance')
    new_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '06_poisson_rec_gt_normals')
    ref_meshes_dir_abs = os.path.join(base_dir, dataset_dir, '03_meshes')
    csv_file = os.path.join(base_dir, dataset_dir, 'comp_poisson_rec_gt_normals.csv')
    val_set_file_abs = os.path.join(base_dir, dataset_dir, 'valset.txt')
    evaluation.mesh_comparison(new_meshes_dir_abs=new_meshes_dir_abs, ref_meshes_dir_abs=ref_meshes_dir_abs,
                               num_processes=num_processes, report_name=csv_file,
                               samples_per_model=10000, dataset_file_abs=val_set_file_abs)