def run(config): print("register fragments.") o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Debug) ply_file_names = get_file_list( join(config["path_dataset"], config["folder_fragment"]), ".ply") make_clean_folder(join(config["path_dataset"], config["folder_scene"])) make_posegraph_for_scene(ply_file_names, config) optimize_posegraph_for_scene(config["path_dataset"], config)
def optimize_posegraph_for_refined_scene(path_dataset, config): pose_graph_name = join(path_dataset, config["template_refined_posegraph"]) pose_graph_optimized_name = join( path_dataset, config["template_refined_posegraph_optimized"]) run_posegraph_optimization(pose_graph_name, pose_graph_optimized_name, max_correspondence_distance = config["voxel_size"] * 1.4, preference_loop_closure = \ config["preference_loop_closure_registration"])
def optimize_posegraph_for_fragment(path_dataset, fragment_id, config): pose_graph_name = join(path_dataset, config["template_fragment_posegraph"] % fragment_id) pose_graph_optimized_name = join( path_dataset, config["template_fragment_posegraph_optimized"] % fragment_id) run_posegraph_optimization(pose_graph_name, pose_graph_optimized_name, max_correspondence_distance = config["max_depth_diff"], preference_loop_closure = \ config["preference_loop_closure_odometry"])
def make_pointcloud_for_fragment(path_dataset, color_files, depth_files, fragment_id, n_fragments, intrinsic, config): mesh = integrate_rgb_frames_for_fragment( color_files, depth_files, fragment_id, n_fragments, join(path_dataset, config["template_fragment_posegraph_optimized"] % fragment_id), intrinsic, config) pcd = o3d.geometry.PointCloud() pcd.points = mesh.vertices pcd.colors = mesh.vertex_colors pcd_name = join(path_dataset, config["template_fragment_pointcloud"] % fragment_id) o3d.io.write_point_cloud(pcd_name, pcd, False, True)
def make_posegraph_for_refined_scene(ply_file_names, config): pose_graph = o3d.io.read_pose_graph( join(config["path_dataset"], config["template_global_posegraph_optimized"])) n_files = len(ply_file_names) matching_results = {} for edge in pose_graph.edges: s = edge.source_node_id t = edge.target_node_id matching_results[s * n_files + t] = \ matching_result(s, t, edge.transformation) if config["python_multi_threading"] == True: from joblib import Parallel, delayed import multiprocessing import subprocess MAX_THREAD = min(multiprocessing.cpu_count(), max(len(pose_graph.edges), 1)) results = Parallel(n_jobs=MAX_THREAD)( delayed(register_point_cloud_pair)( ply_file_names, matching_results[r].s, matching_results[r].t, matching_results[r].transformation, config) for r in matching_results) for i, r in enumerate(matching_results): matching_results[r].transformation = results[i][0] matching_results[r].information = results[i][1] else: for r in matching_results: (matching_results[r].transformation, matching_results[r].information) = \ register_point_cloud_pair(ply_file_names, matching_results[r].s, matching_results[r].t, matching_results[r].transformation, config) pose_graph_new = o3d.pipelines.registration.PoseGraph() odometry = np.identity(4) pose_graph_new.nodes.append( o3d.pipelines.registration.PoseGraphNode(odometry)) for r in matching_results: (odometry, pose_graph_new) = update_posegraph_for_scene( matching_results[r].s, matching_results[r].t, matching_results[r].transformation, matching_results[r].information, odometry, pose_graph_new) print(pose_graph_new) o3d.io.write_pose_graph( join(config["path_dataset"], config["template_refined_posegraph"]), pose_graph_new)
def run(config): print("making fragments from RGBD sequence.") make_clean_folder(join(config["path_dataset"], config["folder_fragment"])) [color_files, depth_files, pose_files] = get_rgbd_file_lists(config["path_dataset"], config["has_tracking"]) n_files = len(color_files) n_fragments = int( math.ceil(float(n_files) / config['n_frames_per_fragment'])) if config["python_multi_threading"] is True: from joblib import Parallel, delayed import multiprocessing import subprocess MAX_THREAD = min(multiprocessing.cpu_count(), n_fragments) Parallel(n_jobs=MAX_THREAD)(delayed(process_single_fragment) (fragment_id, color_files, depth_files, pose_files, n_files, n_fragments, config) for fragment_id in range(n_fragments)) else: for fragment_id in range(n_fragments): process_single_fragment(fragment_id, color_files, depth_files, pose_files, n_files, n_fragments, config)
def compute_initial_registration(s, t, source_down, target_down, source_fpfh, target_fpfh, path_dataset, config): if t == s + 1: # odometry case print("Using RGBD odometry") pose_graph_frag = o3d.io.read_pose_graph( join(path_dataset, config["template_fragment_posegraph_optimized"] % s)) n_nodes = len(pose_graph_frag.nodes) transformation_init = np.linalg.inv(pose_graph_frag.nodes[n_nodes - 1].pose) (transformation, information) = \ multiscale_icp(source_down, target_down, [config["voxel_size"]], [50], config, transformation_init) else: # loop closure case (success, transformation, information) = register_point_cloud_fpfh(source_down, target_down, source_fpfh, target_fpfh, config) if not success: print("No reasonable solution. Skip this pair") return (False, np.identity(4), np.zeros((6, 6))) print(transformation) if config["debug_mode"]: draw_registration_result(source_down, target_down, transformation) return (True, transformation, information)
def make_posegraph_for_fragment(path_dataset, sid, eid, color_files, depth_files, pose_files, fragment_id, n_fragments, intrinsic, with_opencv, config): o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Error) pose_graph = o3d.pipelines.registration.PoseGraph() # If it has tracking use the pose data for the initial odometry if config["has_tracking"]: trans_odometry = np.linalg.inv(read_pose(pose_files[0])) # trans_odometry = np.identity(4) else: trans_odometry = np.identity(4) pose_graph.nodes.append( o3d.pipelines.registration.PoseGraphNode( np.linalg.inv(trans_odometry))) for s in range(sid, eid): for t in range(s + 1, eid): # odometry if t == s + 1: print( "Fragment %03d / %03d :: RGBD matching between frame : %d and %d" % (fragment_id, n_fragments - 1, s, t)) [success, trans, info] = register_one_rgbd_pair(s, t, color_files, depth_files, pose_files, intrinsic, with_opencv, config) trans_odometry = np.dot(trans, trans_odometry) trans_odometry_inv = np.linalg.inv(trans_odometry) pose_graph.nodes.append( o3d.pipelines.registration.PoseGraphNode( trans_odometry_inv)) pose_graph.edges.append( o3d.pipelines.registration.PoseGraphEdge(s - sid, t - sid, trans, info, uncertain=False)) # keyframe loop closure if s % config['n_keyframes_per_n_frame'] == 0 and t % config[ 'n_keyframes_per_n_frame'] == 0: print( "Fragment %03d / %03d :: RGBD matching between frame : %d and %d" % (fragment_id, n_fragments - 1, s, t)) [success, trans, info] = register_one_rgbd_pair(s, t, color_files, depth_files, pose_files, intrinsic, with_opencv, config) if success: pose_graph.edges.append( o3d.pipelines.registration.PoseGraphEdge( s - sid, t - sid, trans, info, uncertain=True)) o3d.io.write_pose_graph( join(path_dataset, config["template_fragment_posegraph"] % fragment_id), pose_graph)
def make_posegraph_for_scene(ply_file_names, config): pose_graph = o3d.pipelines.registration.PoseGraph() odometry = np.identity(4) pose_graph.nodes.append(o3d.pipelines.registration.PoseGraphNode(odometry)) n_files = len(ply_file_names) matching_results = {} for s in range(n_files): for t in range(s + 1, n_files): matching_results[s * n_files + t] = matching_result(s, t) if config["python_multi_threading"] == True: from joblib import Parallel, delayed import multiprocessing import subprocess MAX_THREAD = min(multiprocessing.cpu_count(), max(len(matching_results), 1)) results = Parallel(n_jobs=MAX_THREAD)(delayed( register_point_cloud_pair)(ply_file_names, matching_results[r].s, matching_results[r].t, config) for r in matching_results) for i, r in enumerate(matching_results): matching_results[r].success = results[i][0] matching_results[r].transformation = results[i][1] matching_results[r].information = results[i][2] else: for r in matching_results: (matching_results[r].success, matching_results[r].transformation, matching_results[r].information) = \ register_point_cloud_pair(ply_file_names, matching_results[r].s, matching_results[r].t, config) for r in matching_results: if matching_results[r].success: (odometry, pose_graph) = update_posegraph_for_scene( matching_results[r].s, matching_results[r].t, matching_results[r].transformation, matching_results[r].information, odometry, pose_graph) o3d.io.write_pose_graph( join(config["path_dataset"], config["template_global_posegraph"]), pose_graph)