Esempio n. 1
0
def cal_chamfer(dataset, class_name, instance_name, experiment_directory, checkpoint, data_dir):
    reconstructed_mesh_filename = ws.get_reconstructed_mesh_filename(
            experiment_directory,
                    checkpoint,
                    dataset,
                    class_name,
                    instance_name,
                )
    ground_truth_samples_filename = os.path.join(
                    data_dir,
                    "SurfaceSamples",
                    dataset,
                    class_name,
                    instance_name + ".ply",
                )
    normalization_params_filename = os.path.join(
                    data_dir,
                    "NormalizationParameters",
                    dataset,
                    class_name,
                    instance_name + ".npz",
                
                )
    ground_truth_points = trimesh.load(
                    ground_truth_samples_filename
                )
    reconstruction = trimesh.load(reconstructed_mesh_filename)
    normalization_params = np.load(normalization_params_filename)
    chamfer_dist = deep_sdf.metrics.chamfer.compute_trimesh_chamfer(
                    ground_truth_points,
                    reconstruction,
                    normalization_params["offset"],
                    normalization_params["scale"],
                )
    print(instance_name, chamfer_dist)
    return os.path.join(dataset, class_name, instance_name), chamfer_dist
Esempio n. 2
0
def evaluate(experiment_directory, checkpoint, data_dir, split_filename):

    with open(split_filename, "r") as f:
        split = json.load(f)

    chamfer_results = []

    for dataset in split:
        for class_name in split[dataset]:
            for instance_name in split[dataset][class_name]:
                logging.debug("evaluating " +
                              os.path.join(dataset, class_name, instance_name))

                reconstructed_mesh_filename = ws.get_reconstructed_mesh_filename(
                    experiment_directory, checkpoint, dataset, class_name,
                    instance_name)

                logging.debug('reconstructed mesh is "' +
                              reconstructed_mesh_filename + '"')

                ground_truth_samples_filename = os.path.join(
                    data_dir,
                    "SurfaceSamples",
                    dataset,
                    class_name,
                    instance_name + ".npy",
                )

                logging.debug("ground truth samples are " +
                              ground_truth_samples_filename)

                # normalization_params_filename = os.path.join(
                #     data_dir,
                #     "NormalizationParameters",
                #     dataset,
                #     class_name,
                #     instance_name + ".npz",
                # )

                # logging.debug(
                #     "normalization params are " + ground_truth_samples_filename
                # )

                ground_truth_points = np.load(ground_truth_samples_filename)
                ground_truth_points = ground_truth_points.reshape(-1, 2)
                reconstruction = np.load(reconstructed_mesh_filename)
                reconstruction = reconstruction.reshape(-1, 2)

                # normalization_params = np.load(normalization_params_filename)

                chamfer_dist = deep_sdf.metrics.chamfer.compute_trimesh_chamfer(
                    ground_truth_points, reconstruction)

                logging.debug("chamfer distance: " + str(chamfer_dist))

                chamfer_results.append(
                    (os.path.join(dataset, class_name,
                                  instance_name), chamfer_dist))

    with open(
            os.path.join(
                ws.get_evaluation_dir(experiment_directory, checkpoint, True),
                "chamfer.csv"),
            "w",
    ) as f:
        f.write("shape, chamfer_dist\n")
        for result in chamfer_results:
            f.write("{}, {}\n".format(result[0], result[1]))
Esempio n. 3
0
def evaluate_one_instance(dataset, class_name, instance_name, experiment_directory, checkpoint, data_dir):
    logging.debug(
        "evaluating " + os.path.join(dataset, class_name, instance_name)
    )
    reconstructed_mesh_filename = ws.get_reconstructed_mesh_filename(
        experiment_directory, checkpoint, dataset, class_name, instance_name
    )

    logging.debug(
        'reconstructed mesh is "' + reconstructed_mesh_filename + '"'
    )

    if not os.path.isfile(reconstructed_mesh_filename):
        print('[WARNING] Skipping %s as it doesn\'t exists' % reconstructed_mesh_filename)
        return "", 0

    ground_truth_samples_filename = os.path.join(
        data_dir,
        "SurfaceSamples",
        dataset,
        class_name,
        instance_name + ".ply",
    )

    logging.debug(
        "ground truth samples are " + ground_truth_samples_filename
    )

    normalization_params_filename = os.path.join(
        data_dir,
        "NormalizationParameters",
        dataset,
        class_name,
        instance_name + ".npz",
    )

    logging.debug(
        "normalization params are " + ground_truth_samples_filename
    )

    ground_truth_points = trimesh.load(ground_truth_samples_filename)
    reconstruction = trimesh.load(reconstructed_mesh_filename)

    normalization_params = np.load(normalization_params_filename)

    chamfer_dist = deep_sdf.metrics.chamfer.compute_trimesh_chamfer(
        ground_truth_points,
        reconstruction,
        normalization_params["offset"],
        normalization_params["scale"],
    )

    earthmover_dist = deep_sdf.metrics.emd.compute_trimesh_emd(
        ground_truth_points,
        reconstruction,
        normalization_params["offset"],
        normalization_params["scale"],
    )

    logging.debug("chamfer distance: " + str(chamfer_dist))

    return os.path.join(dataset, class_name, instance_name), chamfer_dist, earthmover_dist