コード例 #1
0
def visualize_unwrapping():
    """Demonstrate the unwrapping process by color codes sections"""
    LEVEL = 2
    ico = refine_icosahedron(level=0)
    ico_s2 = GaussianAccumulatorS2Beta(level=LEVEL)
    ico_s2_organized_mesh = ico_s2.copy_ico_mesh(True)
    triangles_ico, vertices, ico_o3d = decompose(ico)
    triangles_s2_om, _, ico_o3d_s2_om = decompose(ico_s2_organized_mesh)
    icochart_slanted = refine_icochart(level=LEVEL, square=False)
    _, _, icochart_slanted_o3d = decompose(icochart_slanted)
    icochart_square = refine_icochart(level=LEVEL, square=True)
    _, _, icochart_square_o3d = decompose(icochart_square)

    colors = get_colors(range(triangles_ico.shape[0]),
                        colormap=plt.cm.tab20)[:, :3]
    colors_s2 = get_colors(range(triangles_s2_om.shape[0]),
                           colormap=plt.cm.tab20)[:, :3]

    colored_ico = assign_vertex_colors(ico_o3d, colors)
    colored_ico_s2 = assign_vertex_colors(ico_o3d_s2_om, colors_s2)
    colored_icochart, start_idx, end_idx = extract_chart(colored_ico_s2,
                                                         chart_idx=0)
    colored_icochart_slanted = assign_vertex_colors(
        icochart_slanted_o3d, colors_s2[start_idx:end_idx, :])
    colored_icochart_square = assign_vertex_colors(
        icochart_square_o3d, colors_s2[start_idx:end_idx, :])

    plot_meshes([colored_ico], [colored_ico_s2], colored_icochart,
                colored_icochart_slanted, colored_icochart_square)
コード例 #2
0
def example_normals(normals: np.ndarray):
    kwargs_s2 = dict(level=4)

    axis_frame = o3d.geometry.TriangleMesh.create_coordinate_frame(
        0.5).translate([-2.0, 0, 0])
    # Create Gaussian Accumulator
    ga_cpp_s2 = GaussianAccumulatorS2Beta(**kwargs_s2)
    # Integrate the normals and get open3d visualization
    colored_icosahedron = integrate_normals_and_visualize(normals, ga_cpp_s2)
    o3d.visualization.draw_geometries([colored_icosahedron, axis_frame])
    # New simplified API for finding peaks
    res = np.array(
        ga_cpp_s2.find_peaks(threshold_abs=20,
                             cluster_distance=0.1,
                             min_cluster_weight=0.2))
    print("New Detected Peaks:")
    res = sort_by_distance_from_point(res)
    print(res)

    full_image = np.asarray(ga_cpp_s2.ico_chart.image)
    plt.imshow(full_image)
    plt.xticks(np.arange(0, full_image.shape[1], step=1))
    plt.yticks(np.arange(0, full_image.shape[0], step=1))
    plt.show()

    # Don't forget to reset the GA
    ga_cpp_s2.clear_count()
コード例 #3
0
def example_normals(normals: np.ndarray):
    LEVEL = 4
    kwargs_base = dict(level=LEVEL)
    kwargs_s2 = dict(**kwargs_base)

    axis_frame = o3d.geometry.TriangleMesh.create_coordinate_frame(
        0.5).translate([-2.0, 0, 0])
    # Create Gaussian Accumulator
    ga_cpp_s2 = GaussianAccumulatorS2Beta(**kwargs_s2)
    # Integrate the normals and get open3d visualization
    colored_icosahedron = integrate_normals_and_visualize(normals, ga_cpp_s2)
    o3d.visualization.draw_geometries([colored_icosahedron, axis_frame])
    # Create the IcoChart for unwrapping
    ico_chart_ = IcoCharts(LEVEL)
    normalized_bucket_counts_by_vertex = ga_cpp_s2.get_normalized_bucket_counts_by_vertex(
        True)
    ico_chart_.fill_image(normalized_bucket_counts_by_vertex)
    average_vertex_normals = np.asarray(
        ga_cpp_s2.get_average_normals_by_vertex(True))

    # 2D Peak Detection
    find_peaks_kwargs = dict(threshold_abs=20,
                             min_distance=1,
                             exclude_border=False,
                             indices=False)
    cluster_kwargs = dict(t=0.05, criterion='distance')
    average_filter = dict(min_total_weight=0.2)

    # New simplified API for finding peaks
    res = np.array(
        ga_cpp_s2.find_peaks(
            threshold_abs=find_peaks_kwargs['threshold_abs'],
            cluster_distance=cluster_kwargs['t'],
            min_cluster_weight=average_filter['min_total_weight']))
    print("New Detected Peaks:")
    res = sort_by_distance_from_point(res)
    print(res)

    # Old Way of finding peaks
    _, _, avg_peaks, _ = find_peaks_from_ico_charts(
        ico_chart_,
        np.asarray(normalized_bucket_counts_by_vertex),
        vertices=average_vertex_normals,
        find_peaks_kwargs=find_peaks_kwargs,
        cluster_kwargs=cluster_kwargs)
    avg_peaks = sort_by_distance_from_point(avg_peaks)
    print("Detected Peaks:")
    print(avg_peaks)

    full_image = np.asarray(ico_chart_.image)
    plt.imshow(full_image)
    plt.xticks(np.arange(0, full_image.shape[1], step=1))
    plt.yticks(np.arange(0, full_image.shape[0], step=1))
    plt.show()

    # Don't forget to reset the GA
    ga_cpp_s2.clear_count()
コード例 #4
0
def setup_fastgac(normals: np.ndarray, level=4):
    kwargs_s2 = dict(level=level)
    # Create Gaussian Accumulator
    ga_cpp_s2 = GaussianAccumulatorS2Beta(**kwargs_s2)
    _ = ga_cpp_s2.integrate(MatX3d(normals))

    ico_chart_ = IcoCharts(level)
    normalized_bucket_counts_by_vertex = ga_cpp_s2.get_normalized_bucket_counts_by_vertex(
        True)
    ico_chart_.fill_image(normalized_bucket_counts_by_vertex)

    return dict(ga=ga_cpp_s2, ico=ico_chart_, normals=normals)
コード例 #5
0
def analyze_mesh(mesh):
    """Demonstrates unwrapping and peak detection of a S2 Histogram"""
    LEVEL = 4
    kwargs_opt_integrate = dict(num_nbr=12)

    # Create Gaussian Accumulator
    ga_cpp_s2 = GaussianAccumulatorS2Beta(level=LEVEL)
    # This function will integrate the normals and return an open3d mesh for visualization.
    colored_icosahedron_s2, _, _ = visualize_gaussian_integration(
        ga_cpp_s2, mesh, integrate_kwargs=kwargs_opt_integrate)
    num_triangles = ga_cpp_s2.num_buckets

    # for verification
    ico_s2_organized_mesh = ga_cpp_s2.copy_ico_mesh(True)
    _, _, ico_o3d_s2_om = decompose(ico_s2_organized_mesh)
    colors_s2 = get_colors(range(num_triangles), colormap=plt.cm.tab20)[:, :3]
    colored_ico_s2_organized_mesh = assign_vertex_colors(
        ico_o3d_s2_om, colors_s2)

    # Demonstrate the five charts for visualization
    bucket_counts = np.asarray(ga_cpp_s2.get_normalized_bucket_counts(True))
    bucket_colors = get_colors(bucket_counts)[:, :3]
    charts_triangles = []
    for chart_idx in range(5):
        chart_size = int(num_triangles / 5)
        chart_start_idx = chart_idx * chart_size
        chart_end_idx = chart_start_idx + chart_size
        icochart_square = refine_icochart(level=LEVEL, square=True)
        _, _, icochart_square_o3d = decompose(icochart_square)
        colored_icochart_square = assign_vertex_colors(
            icochart_square_o3d,
            bucket_colors[chart_start_idx:chart_end_idx, :])
        charts_triangles.append(colored_icochart_square)

    # Plot the unwrapped icosahedron
    new_charts = translate_meshes(charts_triangles,
                                  current_translation=-4.0,
                                  axis=1)
    all_charts = functools.reduce(lambda a, b: a + b, new_charts)
    plot_meshes(colored_ico_s2_organized_mesh, colored_icosahedron_s2,
                all_charts, mesh)
    avg_peaks = np.array(
        ga_cpp_s2.find_peaks(threshold_abs=25,
                             cluster_distance=0.1,
                             min_cluster_weight=0.15))
    print(avg_peaks)
    full_image = np.asarray(ga_cpp_s2.ico_chart.image)

    plt.imshow(full_image)
    plt.xticks(np.arange(0, full_image.shape[1], step=1))
    plt.yticks(np.arange(0, full_image.shape[0], step=1))
    plt.show()
コード例 #6
0
def main():
    print(
        "Here we are going to try out 4 different types of Gaussian Accumulators"
    )
    print("GaussianAccumulatorKDPy = GA using k-d tree implemented in scipy")
    print(
        "GaussianAccumulatorKD = GA using k-d tree implemented in C++ using nanoflann"
    )
    print(
        "GaussianAccumulatorOpt = GA spacing filling curves and local search. Optimized for top hemisphere. Don't use."
    )
    print(
        "GaussianAccumulatorS2 = GA spacing filling curves and local search. Works on full sphere. This is the really the best"
    )
    print("")
    kwargs_base = dict(level=4, max_phi=180)
    kwargs_kdd = dict(**kwargs_base, max_leaf_size=10)
    kwargs_opt = dict(**kwargs_base)
    kwargs_s2 = dict(**kwargs_base)

    kwargs_opt_integrate = dict(num_nbr=12)
    # Get an Example Mesh
    ga_py_kdd = GaussianAccumulatorKDPy(**kwargs_kdd)
    ga_cpp_kdd = GaussianAccumulatorKD(**kwargs_kdd)
    ga_cpp_opt = GaussianAccumulatorOpt(**kwargs_opt)
    ga_cpp_s2 = GaussianAccumulatorS2Beta(level=4)

    query_max_phi = kwargs_base['max_phi']

    for i, mesh in enumerate(get_mesh_data_iterator()):
        if i < 0:
            continue

        colored_icosahedron_py, normals, neighbors_py = visualize_gaussian_integration(
            ga_py_kdd, mesh, max_phi=query_max_phi)
        colored_icosahedron_cpp, normals, neighbors_cpp = visualize_gaussian_integration(
            ga_cpp_kdd, mesh, max_phi=query_max_phi)
        colored_icosahedron_opt, normals, neighbors_opt = visualize_gaussian_integration(
            ga_cpp_opt,
            mesh,
            max_phi=query_max_phi,
            integrate_kwargs=kwargs_opt_integrate)
        colored_icosahedron_s2, normals, neighbors_s2 = visualize_gaussian_integration(
            ga_cpp_s2,
            mesh,
            max_phi=query_max_phi,
            integrate_kwargs=kwargs_opt_integrate)

        print(
            "Visualing the mesh and the colorized Gaussian Accumulator of type 'GaussianAccumulatorS2'"
        )
        plot_meshes(colored_icosahedron_s2, mesh)

        # 1D Peak detection
        pcd_cpp_s2 = plot_hilbert_curve(ga_cpp_s2, plot=False)
        # 2D Peak Detection
        pcd_cpp_s2_image = get_image_peaks(ga_cpp_s2, **kwargs_base)

        normals_sorted_proj_hilbert = np.asarray(
            ga_cpp_opt.get_bucket_normals())
        normals_sorted_cube_hilbert = np.asarray(
            ga_cpp_s2.get_bucket_normals())

        print(
            "Visualize 1D Peak Detection (Left) and 2D Peak Detection (Right).\n"
        )
        plot_meshes([
            colored_icosahedron_s2,
            create_line_set(normals_sorted_cube_hilbert * 1.01), *pcd_cpp_s2
        ], [
            colored_icosahedron_s2,
            create_line_set(normals_sorted_cube_hilbert * 1.01),
            *pcd_cpp_s2_image
        ])

        ga_py_kdd.clear_count()
        ga_cpp_kdd.clear_count()
        ga_cpp_opt.clear_count()
        ga_cpp_s2.clear_count()
コード例 #7
0
def setup_fastgac_simple(level=4):
    ga_cpp_s2 = GaussianAccumulatorS2Beta(level=level)
    ico_chart = IcoCharts(level)
    return dict(ga=ga_cpp_s2, ico=ico_chart)
コード例 #8
0
def capture(config, video=None):
    # Configure streams
    pipeline, process_modules, filters, proj_mat, t265_device = create_pipeline(
        config)
    t265_pipeline = t265_device['pipeline']
    logging.info("Pipeline Created")

    # Long lived objects. These are the object that hold all the algorithms for surface exraction.
    # They need to be long lived (objects) because they hold state (thread scheduler, image datastructures, etc.)
    ll_objects = dict()
    ll_objects['pl'] = Polylidar3D(**config['polylidar'])
    ll_objects['ga'] = GaussianAccumulatorS2Beta(
        level=config['fastgac']['level'])
    ll_objects['ico'] = IcoCharts(level=config['fastgac']['level'])

    if video:
        frame_width = config['color']['width'] * 2
        frame_height = config['color']['height']
        out_vid = cv2.VideoWriter(video,
                                  cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'),
                                  30, (frame_width, frame_height))

    all_records = []
    counter = 0
    try:
        while True:
            t00 = time.perf_counter()
            try:
                color_image, depth_image, meta = get_frames(
                    pipeline, t265_pipeline, process_modules, filters, config)
            except RuntimeError:
                # This only gets thrown when in playback mode from a recoded file when frames "run out"
                logging.info("Out of frames")
                break
            t0 = time.perf_counter()
            if color_image is None or not valid_frames(
                    color_image, depth_image, **
                    config['polygon']['frameskip']):
                logging.debug("Invalid Frames")
                continue
            t1 = time.perf_counter()
            counter += 1
            # if counter < 10:
            #     continue

            try:
                # Get 6DOF Pose at appropriate timestamp
                if config['tracking']['enabled']:
                    euler_t265 = get_pose_matrix(meta['ts'])
                    logging.info('euler_t265: %r', euler_t265)

                if config['show_polygon']:
                    # planes, obstacles, timings, o3d_mesh = get_polygon(depth_image, config, ll_objects, **meta)
                    planes, obstacles, timings = get_polygon(
                        depth_image, config, ll_objects, **meta)
                    timings['t_get_frames'] = (t0 - t00) * 1000
                    timings['t_check_frames'] = (t1 - t0) * 1000
                    all_records.append(timings)

                    # Plot polygon in rgb frame
                    plot_planes_and_obstacles(planes, obstacles, proj_mat,
                                              None, color_image, config)

                # Show images
                if config.get("show_images"):
                    # Convert to open cv image types (BGR)
                    color_image_cv, depth_image_cv = colorize_images_open_cv(
                        color_image, depth_image, config)
                    # Stack both images horizontally
                    images = np.hstack((color_image_cv, depth_image_cv))
                    cv2.imshow('RealSense Color/Depth (Aligned)', images)
                    if video:
                        out_vid.write(images)
                    res = cv2.waitKey(1)
                    if res == ord('p'):
                        uid = uuid.uuid4()
                        logging.info("Saving Picture: {}".format(uid))
                        cv2.imwrite(
                            path.join(PICS_DIR, "{}_color.jpg".format(uid)),
                            color_image_cv)
                        cv2.imwrite(
                            path.join(PICS_DIR, "{}_stack.jpg".format(uid)),
                            images)
                    if res == ord('m'):
                        pass
                    to_save_frames = config['save'].get('frames')
                    if config['playback'][
                            'enabled'] and to_save_frames is not None and counter in to_save_frames:
                        logging.info("Saving Picture: {}".format(counter))
                        cv2.imwrite(
                            path.join(PICS_DIR,
                                      "{}_color.jpg".format(counter)),
                            color_image_cv)
                        cv2.imwrite(
                            path.join(PICS_DIR,
                                      "{}_stack.jpg".format(counter)), images)

                logging.info(
                    f"Frame %d; Get Frames: %.2f; Check Valid Frame: %.2f; Laplacian: %.2f; Bilateral: %.2f; Mesh: %.2f; FastGA: %.2f; Plane/Poly: %.2f; Filtering: %.2f",
                    counter, timings['t_get_frames'],
                    timings['t_check_frames'], timings['t_laplacian'],
                    timings['t_bilateral'], timings['t_mesh'],
                    timings['t_fastga_total'],
                    timings['t_polylidar_planepoly'],
                    timings['t_polylidar_filter'])
            except Exception as e:
                logging.exception("Error!")
    finally:
        pipeline.stop()
    if video is not None:
        out_vid.release()
    cv2.destroyAllWindows()

    df = pd.DataFrame.from_records(all_records)
    print(df.mean())
    if config['save'].get('timings') != "":
        df.to_csv(config['save'].get('timings', 'data/timings.csv'))
コード例 #9
0
def main():
    EXAMPLE_INDEX = 1
    kwargs_base = dict(level=4)
    kwargs_s2 = dict(**kwargs_base)
    kwargs_opt_integrate = dict(num_nbr=12)
    query_max_phi = 175

    # Get an Example Mesh
    ga_cpp_s2 = GaussianAccumulatorS2Beta(**kwargs_s2)

    example_mesh = o3d.io.read_triangle_mesh(str(ALL_MESHES[EXAMPLE_INDEX]))
    r = ALL_MESHES_ROTATIONS[EXAMPLE_INDEX]
    example_mesh_filtered = example_mesh
    if r is not None:
        example_mesh_filtered = example_mesh_filtered.rotate(r.as_matrix())
        example_mesh_filtered = example_mesh_filtered.filter_smooth_laplacian(
            5)

    example_mesh_filtered.compute_triangle_normals()
    # np.save('fixtures/normals/basement.npy', np.asarray(example_mesh_filtered.triangle_normals))
    colored_icosahedron_s2, normals, neighbors_s2 = visualize_gaussian_integration(
        ga_cpp_s2,
        example_mesh_filtered,
        max_phi=query_max_phi,
        integrate_kwargs=kwargs_opt_integrate)

    o3d.visualization.draw_geometries([example_mesh_filtered])
    o3d.visualization.draw_geometries([colored_icosahedron_s2])

    # Visualize unwrapping
    ico_chart_ = IcoCharts(kwargs_base['level'])
    t2 = time.perf_counter()
    normalized_bucket_counts_by_vertex = ga_cpp_s2.get_normalized_bucket_counts_by_vertex(
        True)
    ico_chart_.fill_image(normalized_bucket_counts_by_vertex)

    average_bucket_normals = np.asarray(
        ga_cpp_s2.get_bucket_average_normals(True))
    pcd = o3d.geometry.PointCloud(
        o3d.utility.Vector3dVector(average_bucket_normals))
    pcd.paint_uniform_color([1, 0, 0])
    average_vertex_normals = np.asarray(
        ga_cpp_s2.get_average_normals_by_vertex(True))

    find_peaks_kwargs = dict(threshold_abs=50,
                             min_distance=1,
                             exclude_border=False,
                             indices=False)
    print(np.asarray(ico_chart_.image).shape)
    cluster_kwargs = dict(t=0.1, criterion='distance')
    _, _, avg_peaks, avg_weights = find_peaks_from_ico_charts(
        ico_chart_,
        np.asarray(normalized_bucket_counts_by_vertex),
        vertices=average_vertex_normals,
        find_peaks_kwargs=find_peaks_kwargs,
        cluster_kwargs=cluster_kwargs)
    t3 = time.perf_counter()
    print(t3 - t2)
    print(avg_peaks)
    # import ipdb; ipdb.set_trace()

    arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)
    wireframe = o3d.geometry.LineSet.create_from_triangle_mesh(
        colored_icosahedron_s2)
    o3d.visualization.draw_geometries(
        [colored_icosahedron_s2, *arrow_avg_peaks, wireframe])
    # o3d.visualization.draw_geometries([colored_icosahedron_s2, *arrow_avg_peaks, pcd])

    full_image = np.asarray(ico_chart_.image)

    plt.imshow(full_image)
    plt.axis('off')
    # plt.xticks(np.arange(0, full_image.shape[1], step=1))
    # plt.yticks(np.arange(0, full_image.shape[0], step=1))
    plt.show()
コード例 #10
0
def extract_all_dominant_plane_normals(tri_mesh,
                                       level=5,
                                       with_o3d=False,
                                       ga_=None,
                                       ico_chart_=None,
                                       **kwargs):

    # Reuse objects if provided
    if ga_ is not None:
        ga = ga_
    else:
        ga = GaussianAccumulatorS2Beta(level=level)

    if ico_chart_ is not None:
        ico_chart = ico_chart_
    else:
        ico_chart = IcoCharts(level=level)

    triangle_normals = np.asarray(tri_mesh.triangle_normals)
    triangle_normals_ds = down_sample_normals(triangle_normals, **kwargs)

    # np.savetxt('bad_normals.txt', triangle_normals_ds)
    triangle_normals_ds_mat = MatX3d(triangle_normals_ds)
    t1 = time.perf_counter()
    ga.integrate(triangle_normals_ds_mat)
    t2 = time.perf_counter()

    logging.debug(
        "Gaussian Accumulator - Normals Sampled: %d; Took (ms): %.2f",
        triangle_normals_ds.shape[0], (t2 - t1) * 1000)

    # New way of detecting peaks, all in C++
    # Only need three parameters now
    fp = kwargs['find_peaks_kwargs']
    cl = kwargs['cluster_kwargs']
    avg_filter = kwargs['average_filter']
    t3 = time.perf_counter()
    avg_peaks = np.array(
        ga.find_peaks(threshold_abs=fp['threshold_abs'],
                      cluster_distance=cl['t'],
                      min_cluster_weight=avg_filter['min_total_weight']))
    t4 = time.perf_counter()

    # Old, python library (Scipy, sklearn) way of detecting peaks
    # Should still work, this API is not deprecated
    # avg_peaks, pcd_all_peaks, arrow_avg_peaks, timings_dict = get_image_peaks(
    #     ico_chart, ga, level=level, with_o3d=with_o3d, **kwargs)

    # Create Open3D structures for visualization
    if with_o3d:
        # Visualize the Sphere
        accumulator_counts = np.asarray(ga.get_normalized_bucket_counts())
        refined_icosahedron_mesh = create_open_3d_mesh(
            np.asarray(ga.mesh.triangles), np.asarray(ga.mesh.vertices))
        color_counts = get_colors(accumulator_counts)[:, :3]
        colored_icosahedron = assign_vertex_colors(refined_icosahedron_mesh,
                                                   color_counts)
    else:
        colored_icosahedron = None

    elapsed_time_fastga = (t2 - t1) * 1000
    elapsed_time_peak = (t4 - t3) * 1000
    elapsed_time_total = elapsed_time_fastga + elapsed_time_peak

    timings = dict(t_fastga_total=elapsed_time_total,
                   t_fastga_integrate=elapsed_time_fastga,
                   t_fastga_peak=elapsed_time_peak)

    ga.clear_count()
    # return avg_peaks, pcd_all_peaks, arrow_avg_peaks, colored_icosahedron, timings
    return avg_peaks, None, None, colored_icosahedron, timings