Ejemplo n.º 1
0
    def globalRansacMatchAndICPRefine(self):
        source_down, source_fpfh, target_down, target_fpfh = self.prepareDataset()

        # showPointCloud([target_down])
        # exit()
        # result_ransac = open3d.registration_ransac_based_on_feature_matching(
        #         source_down, target_down, source_fpfh, target_fpfh, self._distance_threshold_ransac_m,
        #         open3d.TransformationEstimationPointToPoint(False), 4, [
        #         open3d.CorrespondenceCheckerBasedOnEdgeLength(0.9),
        #         open3d.CorrespondenceCheckerBasedOnDistance(
        #         self._distance_threshold_ransac_m)
        # ], open3d.RANSACConvergenceCriteria(4000000, 500))
        result_ransac = open3d.registration_fast_based_on_feature_matching(
                source_down, target_down, source_fpfh, target_fpfh,
                open3d.FastGlobalRegistrationOption(
                maximum_correspondence_distance=self._distance_threshold_ransac_m)
         )
        # ], open3d.RANSACConvergenceCriteria(40000, 500))
        if self._show_result:
            self.showMatchResult(source_down, target_down, result_ransac.transformation)
        # result_icp = open3d.registration_icp(
        #         self._source, self._target, self._distance_threshold_icp_m, result_ransac.transformation,
        #         open3d.TransformationEstimationPointToPlane())
        result_icp = open3d.registration_icp(
                source_down, target_down, self._distance_threshold_icp_m, result_ransac.transformation,
                open3d.TransformationEstimationPointToPlane())
        if self._show_result:
            self.showMatchResult(self._source, self._target, result_icp.transformation)
        return result_ransac, result_icp
Ejemplo n.º 2
0
def fast_global_registration():
    """
    Execute fast global registration

    Based on http://www.open3d.org/docs/tutorial/Advanced/fast_global_registration.html
    """
    node = hou.pwd()
    node_geo = node.geometry()
    node_geo_target = node.inputs()[1].geometry()
    voxel_size = node.parm("voxel_size").eval()
    transform = node.parm("transform").eval()

    has_fpfh_source = bool(node_geo.findPointAttrib("fpfh"))
    has_fpfh_target = bool(node_geo_target.findPointAttrib("fpfh"))

    if not has_fpfh_source or not has_fpfh_target:
        raise hou.NodeError("One of the inputs does not have 'fpfh' attribute.")

    # to numpy
    np_pos_str_source = node_geo.pointFloatAttribValuesAsString("P", float_type=hou.numericData.Float32)
    np_pos_source = np.fromstring(np_pos_str_source, dtype=np.float32).reshape(-1, 3)
    np_fpfh_str_source = node_geo.pointFloatAttribValuesAsString("fpfh", float_type=hou.numericData.Float32)
    np_fpfh_size = node_geo.findPointAttrib("fpfh").size()
    np_fpfh_source = np.fromstring(np_fpfh_str_source, dtype=np.float32).reshape(-1, np_fpfh_size)
    np_fpfh_source = np.swapaxes(np_fpfh_source, 1, 0)

    np_pos_str_target = node_geo_target.pointFloatAttribValuesAsString("P", float_type=hou.numericData.Float32)
    np_pos_target = np.fromstring(np_pos_str_target, dtype=np.float32).reshape(-1, 3)
    np_fpfh_str_target = node_geo_target.pointFloatAttribValuesAsString("fpfh", float_type=hou.numericData.Float32)
    np_fpfh_target = np.fromstring(np_fpfh_str_target, dtype=np.float32).reshape(-1, np_fpfh_size)
    np_fpfh_target = np.swapaxes(np_fpfh_target, 1, 0)

    # to open3d
    source = open3d.PointCloud()
    source.points = open3d.Vector3dVector(np_pos_source.astype(np.float64))

    source_fpfh = open3d.registration.Feature()
    source_fpfh.resize(np_fpfh_source.shape[0], np_fpfh_source.shape[1])
    source_fpfh.data = np_fpfh_source.astype(np.float64)

    target = open3d.PointCloud()
    target.points = open3d.Vector3dVector(np_pos_target.astype(np.float64))

    target_fpfh = open3d.registration.Feature()
    target_fpfh.resize(np_fpfh_source.shape[0], np_fpfh_source.shape[1])
    target_fpfh.data = np_fpfh_target.astype(np.float64)

    # registration
    registration = open3d.registration_fast_based_on_feature_matching(source, target, source_fpfh, target_fpfh, open3d.FastGlobalRegistrationOption(maximum_correspondence_distance = voxel_size * 0.5))

    registration_xform = hou.Matrix4(registration.transformation)
    registration_xform = registration_xform.transposed()

    # to houdini
    if transform:
        node_geo.transform(registration_xform)
    
    node_geo.addAttrib(hou.attribType.Global, "xform", default_value=(0.0,)*16, create_local_variable=False)
    node_geo.setGlobalAttribValue("xform", registration_xform.asTuple())
Ejemplo n.º 3
0
def execute_fast_global_registration(source_down, target_down, source_fpfh,
                                     target_fpfh, voxel_size):
    distance_threshold = voxel_size * 5
    print(":: Apply fast global registration with distance threshold %.3f" \
            % distance_threshold)
    result = open3d.registration_fast_based_on_feature_matching(
        source_down, target_down, source_fpfh, target_fpfh,
        open3d.FastGlobalRegistrationOption(
            maximum_correspondence_distance=distance_threshold))
    return result
Ejemplo n.º 4
0
def execute_fast_global_registration(source_down: op3.PointCloud,
                                     target_down: op3.PointCloud,
                                     source_fpfh: op3.PointCloud,
                                     target_fpfh: op3.PointCloud,
                                     voxel_size: float,
                                     verbose=False):
    """
    Find registertration to transform source point cloud to target point cloud
    :param source, target: 2 objects of Open3D, that are point clouds of source and target
    :param voxel_size: a float value, that is how sparse you want the sample points is
    :param verbose: a boolean value, display notification and visualization when True and no notification when False
    :return: a transformation object
    """
    distance_threshold = voxel_size * 0.5
    if verbose:
        print(
            ":: Apply fast global registration with distance threshold %.3f" %
            distance_threshold)
    result = op3.registration_fast_based_on_feature_matching(
        source_down, target_down, source_fpfh, target_fpfh,
        op3.FastGlobalRegistrationOption(
            maximum_correspondence_distance=distance_threshold))
    return result
Ejemplo n.º 5
0
def icp_o3_gicp_fast(file_idx,
                     cfg,
                     refine=None,
                     refine_radius=0.05,
                     precomputed_results=None):
    pc1, pc2, pc1_centroid = load_pountclouds(file_idx, cfg)
    voxel_size = 0.05
    distance_threshold = voxel_size * 0.5
    start = time.time()
    if precomputed_results is None:
        source_down, target_down, source_fpfh, target_fpfh = ICP._icp_global_prepare_dataset(
            pc1, pc2, voxel_size)
        reg_res = o3.registration_fast_based_on_feature_matching(
            source_down, target_down, source_fpfh, target_fpfh,
            o3.FastGlobalRegistrationOption(
                with_constraint=cfg.evaluation.special.icp.with_constraint,
                maximum_correspondence_distance=distance_threshold))
        transformation = reg_res.transformation
    else:
        precomp_pred_translation, precomp_pred_angle, precomp_pred_center = precomputed_results
        transformation = get_mat_angle(precomp_pred_translation,
                                       precomp_pred_angle, precomp_pred_center)

    if refine is None:
        time_elapsed = time.time() - start
        return transformation, pc1_centroid, time_elapsed
    else:
        if refine == 'p2p':
            reg_p2p = o3.registration_icp(
                pc1, pc2, refine_radius, transformation,
                o3.TransformationEstimationPointToPoint(
                    with_constraint=cfg.evaluation.special.icp.with_constraint,
                    with_scaling=False))
            time_elapsed = time.time() - start
            return reg_p2p.transformation, pc1_centroid, time_elapsed
        else:
            assert False
Ejemplo n.º 6
0
cam_pose_pcl.points = pn.Vector3dVector(np.array([[0, 0, 0]]))

source = copy.deepcopy(source_save)
target = pn.voxel_down_sample(pn_target_outer_hull_pcl, target_voxel_size)

target_down, target_fpfh = preprocess_point_cloud(target, voxel_size)
source_down, source_fpfh = preprocess_point_cloud(source, voxel_size)
source_down.paint_uniform_color([0, 0.651, 0.929])
target_down.paint_uniform_color([1, 0.706, 0])
pn.draw_geometries([source_down, target_down])

distance_threshold = 3 * voxel_size
start_time = time.time()

result = pn.registration_fast_based_on_feature_matching(
    source_down, target_down, source_fpfh, target_fpfh,
    pn.FastGlobalRegistrationOption(
        maximum_correspondence_distance=distance_threshold))
print result
print time.time() - start_time
cam_pose_pcl.transform(result.transformation)
source.transform(result.transformation)
source_down.transform(result.transformation)

pn.draw_geometries([source_down, target_down])
print cam_pose_pcl.points[0]
#%%
result_icp = pn.registration_icp(source, target, distance_threshold_icp,
                                 np.eye(4),
                                 pn.TransformationEstimationPointToPlane())
print result_icp
cam_pose_pcl.transform(result_icp.transformation)