def multiscale_icp(source,
                   target,
                   voxel_size,
                   max_iter,
                   config,
                   init_transformation=np.identity(4)):
    current_transformation = init_transformation
    for i, scale in enumerate(range(len(max_iter))):  # multi-scale approach
        iter = max_iter[scale]
        distance_threshold = config["voxel_size"] * 1.4
        print("voxel_size {}".format(voxel_size[scale]))
        source_down = voxel_down_sample(source, voxel_size[scale])
        target_down = voxel_down_sample(target, voxel_size[scale])
        if config["icp_method"] == "point_to_point":
            result_icp = o3d.registration.registration_icp(
                source_down, target_down, distance_threshold,
                current_transformation,
                o3d.registration.TransformationEstimationPointToPoint(),
                o3d.registration.ICPConvergenceCriteria(max_iteration=iter))
        else:
            estimate_normals(
                source_down,
                o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size[scale] *
                                                     2.0,
                                                     max_nn=30))
            estimate_normals(
                target_down,
                o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size[scale] *
                                                     2.0,
                                                     max_nn=30))
            if config["icp_method"] == "point_to_plane":
                result_icp = o3d.registration.registration_icp(
                    source_down, target_down, distance_threshold,
                    current_transformation,
                    o3d.registration.TransformationEstimationPointToPlane(),
                    o3d.registration.ICPConvergenceCriteria(
                        max_iteration=iter))
            if config["icp_method"] == "color":
                result_icp = o3d.registration.registration_colored_icp(
                    source_down, target_down, voxel_size[scale],
                    current_transformation,
                    o3d.registration.TransformationEstimationForColoredICP(),
                    o3d.registration.ICPConvergenceCriteria(
                        relative_fitness=1e-6,
                        relative_rmse=1e-6,
                        max_iteration=iter))
        current_transformation = result_icp.transformation
        if i == len(max_iter) - 1:
            information_matrix = o3d.registration.get_information_matrix_from_point_clouds(
                source_down, target_down, voxel_size[scale] * 1.4,
                result_icp.transformation)

    if config["debug_mode"]:
        draw_registration_result_original_color(source, target,
                                                result_icp.transformation)
    return (result_icp.transformation, information_matrix)
Exemplo n.º 2
0
def icp_align_clouds(source,
                     target,
                     threshold,
                     show_on_visualizer=False,
                     max_iterations=50):
    from open3d.open3d import registration, visualization

    result = execute_global_registration(source.point_cloud,
                                         target.point_cloud,
                                         normals_radius=threshold * 10,
                                         treshold=threshold)
    estimate_normals(cloud=source.point_cloud,
                     search_param=KDTreeSearchParamHybrid(threshold, 30))
    estimate_normals(cloud=target.point_cloud,
                     search_param=KDTreeSearchParamHybrid(threshold, 30))

    if show_on_visualizer:
        vis = visualization.Visualizer()
        vis.create_window("ICP ALIGNMENT", 800, 600)
        vis.add_geometry(source.point_cloud)
        vis.add_geometry(target.point_cloud)
        source.point_cloud.transform(result.transformation)

        for i in range(max_iterations):
            reg_p2l = registration_icp(source.point_cloud, target.point_cloud,
                                       threshold, np.identity(4),
                                       TransformationEstimationPointToPoint(),
                                       ICPConvergenceCriteria(max_iteration=1))
            trans_matrix_z = [[
                reg_p2l.transformation[0][0], reg_p2l.transformation[0][1],
                0.0, reg_p2l.transformation[0][3]
            ],
                              [
                                  reg_p2l.transformation[1][0],
                                  reg_p2l.transformation[1][1], 0.0,
                                  reg_p2l.transformation[1][3]
                              ], [0.0, 0.0, 1, reg_p2l.transformation[2][3]],
                              [0.0, 0.0, 0.0, 1]]
            source.point_cloud.transform(trans_matrix_z)
            vis.update_geometry()
            vis.poll_events()
            vis.update_renderer()
        vis.run()
        vis.destroy_window()
    else:
        source.point_cloud.transform(result.transformation)
        for i in range(max_iterations):
            reg_p2l = registration_icp(source.point_cloud, target.point_cloud,
                                       threshold, np.identity(4),
                                       TransformationEstimationPointToPoint(),
                                       ICPConvergenceCriteria(max_iteration=1))

            source.point_cloud.transform(reg_p2l.transformation)
    return source.point_cloud, target.point_cloud
def preprocess_point_cloud(pcd, config):
    voxel_size = config["voxel_size"]
    pcd_down = voxel_down_sample(pcd, voxel_size)
    estimate_normals(
        pcd_down,
        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 2.0,
                                             max_nn=30))
    pcd_fpfh = o3d.registration.compute_fpfh_feature(
        pcd_down,
        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 5.0,
                                             max_nn=100))
    return (pcd_down, pcd_fpfh)
Exemplo n.º 4
0
def refine_registration(source, target, treshold, transformation):
    '''
        Method used for ICP registration, after global registration has been completed.
    :param source [in] source pointcloud
    :param target [in] target pointcloud
    :param treshold [in] The value by wich the search radius will be multiplied.
    :param transformation [in] Initial transformation matrix.
    :return [out] reg_p2p = ICP registration object, containing fitness, error, transformation matrix
    '''
    estimate_normals(source, KDTreeSearchParamKNN())
    estimate_normals(target, KDTreeSearchParamKNN())
    reg_p2p = registration_icp(source, target, treshold, transformation,
                               TransformationEstimationPointToPlane(),
                               ICPConvergenceCriteria(max_iteration=1))

    return reg_p2p
Exemplo n.º 5
0
    def forward(self, xyz):
        xyz = xyz.transpose(1, 2).cpu().numpy()
        res = np.zeros((xyz.shape[0], 33, xyz.shape[1]))

        for i in range(len(xyz)):
            pcd = o3d.geometry.PointCloud()
            pcd.points = o3d.utility.Vector3dVector(xyz[i])
            estimate_normals(
                pcd,
                o3d.geometry.KDTreeSearchParamHybrid(radius=self.radius_normal,
                                                     max_nn=30))
            pcd_fpfh = o3d.registration.compute_fpfh_feature(
                pcd,
                o3d.geometry.KDTreeSearchParamHybrid(
                    radius=self.radius_feature, max_nn=100))
            res[i] = pcd_fpfh.data

        res = torch.from_numpy(res).float().cuda()
        return res
Exemplo n.º 6
0
def preprocess_point_cloud(pcd, voxel_size):
    '''
        Method used for computing point cloud features.
        @ The FPFH feature is a 33-dimensional vector that describes the local geometric property of a point.
    :param pcd: [in] Open3D point cloud
    :param voxel_size: [in] Treshold for the voxel size.
    :return: [out] Downsampled cloud along with its FPFH features.
    '''
    print(":: Downsample with a voxel size %.3f." % voxel_size)
    pcd_down = pcd  #voxel_down_sample(pcd, voxel_size)
    radius_normal = voxel_size * 1.5
    print(":: Estimate normal with search radius %.3f." % radius_normal)

    estimate_normals(pcd_down,
                     KDTreeSearchParamHybrid(radius=radius_normal, max_nn=30))
    radius_feature = voxel_size * 5
    print(":: Compute FPFH feature with search radius %.3f." % radius_feature)
    pcd_fpfh = compute_fpfh_feature(
        pcd_down, KDTreeSearchParamHybrid(radius=radius_feature, max_nn=100))
    return pcd_down, pcd_fpfh
Exemplo n.º 7
0
 def compute_normals(self, radius, max_nn):
     geometry.estimate_normals(self.point_cloud, geometry.KDTreeSearchParamHybrid(radius=radius, max_nn=max_nn))
Exemplo n.º 8
0
cam3pcd = o3d.io.read_point_cloud("cam3_crop.ply")

o3d.visualization.draw_geometries([cam3_raw, cam3_hand])

T = np.eye(4)
T[2, 3] = 1
# T_test=np.array([[ 1 , 0.0 ,-0.99191778 , 100.25755102],
#  [ 0.02542272 ,-0.99475981 ,-0.09902831 , 0.01798585],
#  [-0.99669383 ,-0.01757531 ,-0.0793254 ,  1.12117939],
#  [ 0.     ,     0.     ,     0.     ,     1.        ]])

source = cam3pcd
target = pcd

estimate_normals(cloud=source,
                 search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.01,
                                                                   max_nn=30))
estimate_normals(cloud=target,
                 search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.01,
                                                                   max_nn=30))

threshold = 0.02

trans_init = Thb

print "init Thb"
print Thb

draw_registration_result(source, target, trans_init)
print("Initial alignment")
evaluation = o3d.registration.evaluate_registration(source, target, threshold,