Ejemplo n.º 1
0
if __name__ == '__main__':
    # parse arguments:
    arguments = get_arguments()

    # load point cloud:
    point_cloud = read_modelnet40_normal(arguments.input)
    # compute surface normals:
    # point_cloud.estimate_normals(
    #     search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30)
    # )
    # build search tree:
    search_tree = o3d.geometry.KDTreeFlann(point_cloud)

    # detect keypoints:
    keypoints = detect(point_cloud, search_tree, arguments.radius)

    # visualize:

    # paint background as grey:
    point_cloud.paint_uniform_color([0.50, 0.50, 0.50])
    # show roi:
    max_bound = point_cloud.get_max_bound()
    min_bound = point_cloud.get_min_bound()
    center = (min_bound + max_bound) / 2.0

    min_bound[1] = max_bound[1] - 0.1
    max_bound[1] = max_bound[1]
    min_bound[2] = center[2]
    max_bound[2] = max_bound[2]
Ejemplo n.º 2
0
def main(input_dir, radius, bins, num_evaluations):
    """
    Run point cloud registration on Shenlan dataset
    """
    registration_results = io.read_registration_results(
        os.path.join(input_dir, 'reg_result.txt'))

    # init output
    df_output = io.init_output()

    for i, r in progressbar.progressbar(list(registration_results.iterrows())):
        # for interactive visualization:
        if i >= num_evaluations:
            exit(0)

        # parse point cloud index:
        idx_target = int(r['idx1'])
        idx_source = int(r['idx2'])

        # load point clouds:
        pcd_source = io.read_point_cloud_bin(
            os.path.join(input_dir, 'point_clouds', f'{idx_source}.bin'))
        pcd_source, idx_inliers = pcd_source.remove_radius_outlier(
            nb_points=4, radius=radius)
        search_tree_source = o3d.geometry.KDTreeFlann(pcd_source)

        pcd_target = io.read_point_cloud_bin(
            os.path.join(input_dir, 'point_clouds', f'{idx_target}.bin'))
        pcd_target, idx_inliers = pcd_target.remove_radius_outlier(
            nb_points=4, radius=radius)
        search_tree_target = o3d.geometry.KDTreeFlann(pcd_target)

        # detect keypoints:
        keypoints_source = detect(pcd_source, search_tree_source, radius)
        keypoints_target = detect(pcd_target, search_tree_target, radius)

        # create descriptions:
        pcd_source_keypoints = pcd_source.select_by_index(
            keypoints_source['id'].values)
        fpfh_source_keypoints = o3d.registration.compute_fpfh_feature(
            pcd_source_keypoints,
            o3d.geometry.KDTreeSearchParamHybrid(radius=5 * radius,
                                                 max_nn=100)).data

        pcd_target_keypoints = pcd_target.select_by_index(
            keypoints_target['id'].values)
        fpfh_target_keypoints = o3d.registration.compute_fpfh_feature(
            pcd_target_keypoints,
            o3d.geometry.KDTreeSearchParamHybrid(radius=5 * radius,
                                                 max_nn=100)).data

        # generate matches:
        distance_threshold_init = 1.5 * radius
        distance_threshold_final = 1.0 * radius

        # RANSAC for initial estimation:
        init_result = ransac_match(
            pcd_source_keypoints,
            pcd_target_keypoints,
            fpfh_source_keypoints,
            fpfh_target_keypoints,
            ransac_params=RANSACParams(
                max_workers=5,
                num_samples=4,
                max_correspondence_distance=distance_threshold_init,
                max_iteration=200000,
                max_validation=500,
                max_refinement=30),
            checker_params=CheckerParams(
                max_correspondence_distance=distance_threshold_init,
                max_edge_length_ratio=0.9,
                normal_angle_threshold=None))

        # exact ICP for refined estimation:
        final_result = exact_match(pcd_source, pcd_target, search_tree_target,
                                   init_result.transformation,
                                   distance_threshold_final, 60)

        # visualize:
        visualize.show_registration_result(pcd_source_keypoints,
                                           pcd_target_keypoints,
                                           init_result.correspondence_set,
                                           pcd_source, pcd_target,
                                           final_result.transformation)

        # add result:
        io.add_to_output(df_output, idx_target, idx_source,
                         final_result.transformation)

    # write output:
    io.write_output(os.path.join(input_dir, 'reg_result_yaogefad.txt'),
                    df_output)
Ejemplo n.º 3
0
def main(input_dir, radius, bins, num_evaluations):
    """
    Run pose estimation on given point cloud pair
    """
    # load source & target point clouds:
    pcd_source = o3d.io.read_point_cloud(os.path.join(input_dir, "first.pcd"))
    pcd_source = pcd_source.voxel_down_sample(voxel_size=0.05)
    pcd_source.estimate_normals(
        search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1,
                                                          max_nn=30))
    pcd_target = o3d.io.read_point_cloud(os.path.join(input_dir, "second.pcd"))
    pcd_target = pcd_target.voxel_down_sample(voxel_size=0.05)
    pcd_target.estimate_normals(
        search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1,
                                                          max_nn=30))

    # build search trees:
    pcd_source, idx_inliers = pcd_source.remove_radius_outlier(nb_points=4,
                                                               radius=radius)
    search_tree_source = o3d.geometry.KDTreeFlann(pcd_source)

    pcd_target, idx_inliers = pcd_target.remove_radius_outlier(nb_points=4,
                                                               radius=radius)
    search_tree_target = o3d.geometry.KDTreeFlann(pcd_target)

    # detect keypoints:
    keypoints_source = detect(pcd_source, search_tree_source, radius)
    keypoints_target = detect(pcd_target, search_tree_target, radius)

    # create descriptions:
    pcd_source_keypoints = pcd_source.select_by_index(
        keypoints_source['id'].values)
    fpfh_source_keypoints = o3d.registration.compute_fpfh_feature(
        pcd_source_keypoints,
        o3d.geometry.KDTreeSearchParamHybrid(radius=5 * radius,
                                             max_nn=100)).data

    pcd_target_keypoints = pcd_target.select_by_index(
        keypoints_target['id'].values)
    fpfh_target_keypoints = o3d.registration.compute_fpfh_feature(
        pcd_target_keypoints,
        o3d.geometry.KDTreeSearchParamHybrid(radius=5 * radius,
                                             max_nn=100)).data

    # generate matches:
    distance_threshold_init = 1.5 * radius
    distance_threshold_final = 1.0 * radius

    # RANSAC for initial estimation:
    init_result = ransac_match(
        pcd_source_keypoints,
        pcd_target_keypoints,
        fpfh_source_keypoints,
        fpfh_target_keypoints,
        ransac_params=RANSACParams(
            max_workers=5,
            num_samples=4,
            max_correspondence_distance=distance_threshold_init,
            max_iteration=200000,
            max_validation=500,
            max_refinement=30),
        checker_params=CheckerParams(
            max_correspondence_distance=distance_threshold_init,
            max_edge_length_ratio=0.9,
            normal_angle_threshold=None))

    # exact ICP for refined estimation:
    final_result = exact_match(pcd_source, pcd_target, search_tree_target,
                               init_result.transformation,
                               distance_threshold_final, 60)

    # visualize:
    visualize.show_registration_result(pcd_source_keypoints,
                                       pcd_target_keypoints,
                                       init_result.correspondence_set,
                                       pcd_source, pcd_target,
                                       final_result.transformation)

    # init output
    df_output = io.init_output()

    # add result:
    io.add_to_output(df_output, 2, 1, final_result.transformation)

    # write output:
    io.write_output(os.path.join(input_dir, 'reg_result_yaogefad.txt'),
                    df_output)