Пример #1
0
def get_image_peaks(ga_cpp_s2, level=2, **kwargs):
    ico_chart = IcoCharts(level)
    normalized_bucket_counts_by_vertex = ga_cpp_s2.get_normalized_bucket_counts_by_vertex(
        True)
    ico_chart.fill_image(normalized_bucket_counts_by_vertex)

    find_peaks_kwargs = dict(threshold_abs=20,
                             min_distance=1,
                             exclude_border=False,
                             indices=False)
    cluster_kwargs = dict(t=0.2, criterion='distance')
    average_filter = dict(min_total_weight=0.05)

    peaks, clusters, avg_peaks, avg_weights = find_peaks_from_ico_charts(
        ico_chart,
        np.asarray(normalized_bucket_counts_by_vertex),
        find_peaks_kwargs=find_peaks_kwargs,
        cluster_kwargs=cluster_kwargs,
        average_filter=average_filter)
    gaussian_normals_sorted = np.asarray(ico_chart.sphere_mesh.vertices)
    pcd_all_peaks = get_pc_all_peaks(peaks, clusters, gaussian_normals_sorted)
    arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)

    print(avg_peaks)

    return [pcd_all_peaks, *arrow_avg_peaks]
Пример #2
0
def get_image_peaks(ico_chart,
                    ga,
                    level=2,
                    with_o3d=False,
                    find_peaks_kwargs=dict(threshold_abs=2,
                                           min_distance=1,
                                           exclude_border=False,
                                           indices=False),
                    cluster_kwargs=dict(t=0.10, criterion='distance'),
                    average_filter=dict(min_total_weight=0.01),
                    **kwargs):

    normalized_bucket_counts_by_vertex = ga.get_normalized_bucket_counts_by_vertex(
        True)

    t1 = time.perf_counter()
    ico_chart.fill_image(
        normalized_bucket_counts_by_vertex)  # this takes microseconds
    # plt.imshow(np.asarray(ico_chart.image))
    # plt.show()
    peaks, clusters, avg_peaks, avg_weights = find_peaks_from_ico_charts(
        ico_chart,
        np.asarray(normalized_bucket_counts_by_vertex),
        find_peaks_kwargs=find_peaks_kwargs,
        cluster_kwargs=cluster_kwargs,
        average_filter=average_filter)
    t2 = time.perf_counter()

    gaussian_normals_sorted = np.asarray(ico_chart.sphere_mesh.vertices)
    # Create Open3D structures for visualization
    if with_o3d:
        pcd_all_peaks = get_pc_all_peaks(peaks, clusters,
                                         gaussian_normals_sorted)
        arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)
    else:
        pcd_all_peaks = None
        arrow_avg_peaks = None

    elapsed_time = (t2 - t1) * 1000
    timings = dict(t_fastga_peak=elapsed_time)

    logging.debug("Peak Detection - Took (ms): %.2f", (t2 - t1) * 1000)

    return avg_peaks, pcd_all_peaks, arrow_avg_peaks, timings
Пример #3
0
def plot_hilbert_curve(ga: GaussianAccumulatorKDPy, plot=False):

    normals = np.asarray(ga.get_bucket_normals())
    normalized_counts = np.asarray(ga.get_normalized_bucket_counts())
    colors = get_colors(normalized_counts)[:, :3]
    bucket_normals_hv = np.asarray(ga.get_bucket_sfc_values())
    num_buckets = ga.num_buckets
    idx_sort = np.argsort(bucket_normals_hv)
    bucket_normals_hv_sorted = bucket_normals_hv[idx_sort]
    colors = colors[idx_sort, :]
    accumulator_normalized_sorted = normalized_counts[idx_sort]
    gaussian_normals_sorted = normals[idx_sort, :]

    # Find Peaks using 1D signal detector
    peaks, clusters, avg_peaks, avg_weights = find_peaks_from_accumulator(
        gaussian_normals_sorted, accumulator_normalized_sorted)

    # 2D Plots
    if plot:
        class_name_str = type(ga).__name__
        if class_name_str == 'GaussianAccumulatorS2':
            fig, ax = plt.subplots(1, 1, figsize=(5, 5))

        else:
            proj_ = np.asarray(ga.get_bucket_projection())
            proj = proj_[idx_sort, :]
            fig, axs = plt.subplots(2, 1, figsize=(8, 10))
            ax = axs[0]
            scatter1 = ax.scatter(proj[:, 0],
                                  proj[:, 1],
                                  c=colors,
                                  label='Projected Buckets')
            scatter2 = ax.scatter(proj[peaks, :][:, 0],
                                  proj[peaks, :][:, 1],
                                  marker='x',
                                  c=clusters,
                                  label='Clusters',
                                  cmap='tab20')
            ax.set_title("Hilbert Curve with Azimuth Equidistant Projection")
            ax.set_xlabel("x*")
            ax.set_ylabel("y*")
            line1 = ax.plot(proj[:, 0],
                            proj[:, 1],
                            c='k',
                            label='Hilbert Curve Connections')[0]
            ax.axis('equal')
            leg = ax.legend(loc='upper left', fancybox=True, shadow=True)

            # we will set up a dict mapping legend line to orig line, and enable
            # picking on the legend line
            lines = [line1, scatter1, scatter2]
            lined = dict()
            for legline, origline in zip(leg.legendHandles, lines):
                legline.set_picker(5)  # 5 pts tolerance
                lined[legline] = origline

            def onpick(event):
                # on the pick event, find the orig line corresponding to the
                # legend proxy line, and toggle the visibility
                legline = event.artist
                origline = lined[legline]
                vis = not origline.get_visible()
                origline.set_visible(vis)
                # Change the alpha on the line in the legend so we can see what lines
                # have been toggled
                if vis:
                    legline.set_alpha(1.0)
                else:
                    legline.set_alpha(0.2)
                fig.canvas.draw()

            ax = axs[1]
            fig.canvas.mpl_connect('pick_event', onpick)

        ax.bar(np.arange(num_buckets), accumulator_normalized_sorted)
        ax.scatter(peaks,
                   accumulator_normalized_sorted[peaks],
                   marker='x',
                   c=clusters,
                   cmap='tab20')

        ax.set_title("Histogram of Normal Counts sorted by Hilbert Values")
        ax.set_xlabel("Hilbert Value (Ascending)")
        ax.set_ylabel("Normal Counts")
        fig.tight_layout()
        plt.show()

    pcd_all_peaks = get_pc_all_peaks(peaks, clusters, gaussian_normals_sorted)
    arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)
    return [pcd_all_peaks, *arrow_avg_peaks]
Пример #4
0
def main():
    EXAMPLE_INDEX = 1
    kwargs_base = dict(level=2, max_phi=180)
    kwargs_s2 = dict(**kwargs_base)
    kwargs_opt_integrate = dict(num_nbr=12)
    query_max_phi = kwargs_base['max_phi'] - 5

    # Get an Example Mesh
    ga_cpp_s2 = GaussianAccumulatorS2(**kwargs_s2)

    example_mesh = o3d.io.read_triangle_mesh(str(ALL_MESHES[EXAMPLE_INDEX]))
    r = ALL_MESHES_ROTATIONS[EXAMPLE_INDEX]
    example_mesh_filtered = example_mesh
    if r is not None:
        example_mesh_filtered = example_mesh_filtered.rotate(r.as_matrix())
        example_mesh_filtered = example_mesh_filtered.filter_smooth_laplacian(
            5)

    example_mesh_filtered.compute_triangle_normals()
    # np.save('fixtures/normals/basement.npy', np.asarray(example_mesh_filtered.triangle_normals))
    colored_icosahedron_s2, normals, neighbors_s2 = visualize_gaussian_integration(
        ga_cpp_s2,
        example_mesh_filtered,
        max_phi=query_max_phi,
        integrate_kwargs=kwargs_opt_integrate)

    o3d.visualization.draw_geometries([example_mesh_filtered])
    o3d.visualization.draw_geometries([colored_icosahedron_s2])

    # Visualize unwrapping
    ico_chart_ = IcoCharts(kwargs_base['level'])
    t2 = time.perf_counter()
    normalized_bucket_counts_by_vertex = ga_cpp_s2.get_normalized_bucket_counts_by_vertex(
        True)
    ico_chart_.fill_image(normalized_bucket_counts_by_vertex)

    find_peaks_kwargs = dict(threshold_abs=50,
                             min_distance=1,
                             exclude_border=False,
                             indices=False)
    print(np.asarray(ico_chart_.image).shape)
    cluster_kwargs = dict(t=0.1, criterion='distance')
    _, _, avg_peaks, avg_weights = find_peaks_from_ico_charts(
        ico_chart_,
        np.asarray(normalized_bucket_counts_by_vertex),
        find_peaks_kwargs=find_peaks_kwargs,
        cluster_kwargs=cluster_kwargs)
    t3 = time.perf_counter()
    print(t3 - t2)
    print(avg_peaks)

    arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)
    o3d.visualization.draw_geometries(
        [colored_icosahedron_s2, *arrow_avg_peaks])

    full_image = np.asarray(ico_chart_.image)

    plt.imshow(full_image)
    plt.axis('off')
    # plt.xticks(np.arange(0, full_image.shape[1], step=1))
    # plt.yticks(np.arange(0, full_image.shape[0], step=1))
    plt.show()