Exemplo n.º 1
0
def compute_normals_and_centroids_opc(opc, convert_f64=True, **kwargs):
    """Computes the Normals and Centroid of Implicit Triangle Mesh in the Organized Point Cloud
    
    Arguments:
        opc {ndarray} -- MXNX3
    
    Keyword Arguments:
        convert_f64 {bool} -- Return F64? (default: {True})
    
    Returns:
        ndarray -- Numpy array
    """
    opc_float = (np.ascontiguousarray(opc[:, :, :3])).astype(np.float32)

    a_ref = Matrix3fRef(opc_float)

    t1 = time.perf_counter()
    normals, centroids = opf.filter.compute_normals_and_centroids(a_ref)
    t2 = time.perf_counter()
    logger.info("OPC Compute Normals and Centroids Took (ms): %.2f",
                (t2 - t1) * 1000)
    normals_float_out = np.asarray(normals)
    centroids_float_out = np.asarray(centroids)

    if not convert_f64:
        return (normals_float_out, centroids_float_out)

    return (normals_float_out.astype(np.float64),
            centroids_float_out.astype(np.float64))
Exemplo n.º 2
0
def get_image_peaks(ico_chart, ga, level=2, with_o3d=True,
                    find_peaks_kwargs=dict(threshold_abs=3, min_distance=1, exclude_border=False, indices=False),
                    cluster_kwargs=dict(t=0.10, criterion='distance'),
                    average_filter=dict(min_total_weight=0.01),
                    **kwargs):

    normalized_bucket_counts_by_vertex = ga.get_normalized_bucket_counts_by_vertex(True)

    ico_chart.fill_image(normalized_bucket_counts_by_vertex)
    # image = np.asarray(ico_chart.image)
    # plt.imshow(image)
    # plt.show()
    # find_peaks_kwargs = dict(threshold_abs=2, min_distance=1, exclude_border=False, indices=False)
    # cluster_kwargs = dict(t=0.10, criterion='distance')
    # average_filter = dict(min_total_weight=0.01)

    t1 = time.perf_counter()
    peaks, clusters, avg_peaks, avg_weights = find_peaks_from_ico_charts(ico_chart, np.asarray(
        normalized_bucket_counts_by_vertex), find_peaks_kwargs, cluster_kwargs, average_filter)
    t2 = time.perf_counter()
    gaussian_normals_sorted = np.asarray(ico_chart.sphere_mesh.vertices)
    # Create Open3D structures for visualization
    if with_o3d:
        pcd_all_peaks = get_pc_all_peaks(peaks, clusters, gaussian_normals_sorted)
        arrow_avg_peaks = get_arrow_normals(avg_peaks, avg_weights)
    else:
        pcd_all_peaks = None
        arrow_avg_peaks = None

    elapsed_time = (t2 - t1) * 1000
    timings = dict(fastga_peak=elapsed_time)

    logger.info("Peak Detection - Took (ms): %.2f", (t2 - t1) * 1000)

    return avg_peaks, pcd_all_peaks, arrow_avg_peaks, timings
Exemplo n.º 3
0
def bilateral_opc(opc, loops=5, sigma_length=0.1, sigma_angle=0.261, **kwargs):
    """Performs bilateral normal smoothing on a mesh implicit from an organized point

    Arguments:
        opc {ndarray} -- Organized Point Cloud MXNX3, Assumed Float 64

    Keyword Arguments:
        loops {int} -- How many iterations of smoothing (default: {5})
        sigma_length {float} -- Saling factor for length (default: {0.1})
        sigma_angle {float} -- Scaling factor for angle (default: {0.261})

    Returns:
        ndarray -- MX3 Triangle Normal Array, Float 64
    """
    opc_float = (np.ascontiguousarray(opc[:, :, :3])).astype(np.float32)

    a_ref = Matrix3fRef(opc_float)

    t1 = time.perf_counter()
    normals = opf.filter.bilateral_K3(a_ref,
                                      iterations=loops,
                                      sigma_length=sigma_length,
                                      sigma_angle=sigma_angle)
    t2 = time.perf_counter()
    logger.info("OPC Bilateral Filter Took (ms): %.2f", (t2 - t1) * 1000)
    normals_float_out = np.asarray(normals)
    normals_out = normals_float_out.astype(np.float64)

    time_elapsed = (t2 - t1) * 1000

    return normals_out, time_elapsed
Exemplo n.º 4
0
def filter_and_create_open3d_polygons(points, polygons, rm=None, line_radius=0.005,
                                      config_pp=dict(filter=dict(hole_area=dict(min=0.025, max=100.0), hole_vertices=dict(min=6), plane_area=dict(min=0.05)),
                                                     positive_buffer=0.00, negative_buffer=0.00, simplify=0.0), **kwargs):
    " Apply polygon filtering algorithm, return Open3D Mesh Lines "
    # config_pp = dict(filter=dict(hole_area=dict(min=0.00, max=100.0), hole_vertices=dict(min=6), plane_area=dict(min=0.0001)),
    #                  positive_buffer=0.00, negative_buffer=0.0, simplify=0.00)
    t1 = time.perf_counter()
    planes, obstacles = filter_planes_and_holes(polygons, points, config_pp, rm=rm)
    t2 = time.perf_counter()
    logger.info("Plane Filtering Took (ms): %.2f", (t2 - t1) * 1000)
    all_poly_lines = create_lines(planes, obstacles, line_radius=line_radius)
    return all_poly_lines, (t2 - t1) * 1000
Exemplo n.º 5
0
def extract_all_dominant_plane_normals(tri_mesh, level=5, with_o3d=True, ga_=None, ico_chart_=None, **kwargs):

    # Reuse objects if provided
    if ga_ is not None:
        ga = ga_
    else:
        ga = GaussianAccumulatorS2(level=level)

    if ico_chart_ is not None:
        ico_chart = ico_chart_
    else:
        ico_chart = IcoCharts(level=level)

    triangle_normals = np.asarray(tri_mesh.triangle_normals)
    triangle_normals_ds = down_sample_normals(triangle_normals, **kwargs)

    triangle_normals_ds_mat = MatX3d(triangle_normals_ds)
    t1 = time.perf_counter()
    ga.integrate(triangle_normals_ds_mat)
    t2 = time.perf_counter()

    logger.info("Gaussian Accumulator - Normals Sampled: %d; Took (ms): %.2f",
                triangle_normals_ds.shape[0], (t2 - t1) * 1000)

    avg_peaks, pcd_all_peaks, arrow_avg_peaks, timings_dict = get_image_peaks(
        ico_chart, ga, level=level, with_o3d=with_o3d, **kwargs)

    gaussian_normals = np.asarray(ga.get_bucket_normals())
    accumulator_counts = np.asarray(ga.get_normalized_bucket_counts())

    # Create Open3D structures for visualization
    if with_o3d:
        # Visualize the Sphere
        refined_icosahedron_mesh = create_open_3d_mesh(np.asarray(ga.mesh.triangles), np.asarray(ga.mesh.vertices))
        color_counts = get_colors(accumulator_counts)[:, :3]
        colored_icosahedron = assign_vertex_colors(refined_icosahedron_mesh, color_counts)
    else:
        colored_icosahedron = None

    elapsed_time_fastga = (t2 - t1) * 1000
    elapsed_time_peak = timings_dict['fastga_peak']
    elapsed_time_total = elapsed_time_fastga + elapsed_time_peak

    timings = dict(fastga_total=elapsed_time_total, fastga_integrate=elapsed_time_fastga, fastga_peak=elapsed_time_peak)

    return avg_peaks, pcd_all_peaks, arrow_avg_peaks, colored_icosahedron, timings
Exemplo n.º 6
0
def load_pcd_and_meshes(input_file, stride=2, loops=5, _lambda=0.5, loops_bilateral=0, kernel_size=3, **kwargs):
    """Load PCD and Meshes
    """
    pc_raw, pc_image = load_pcd_file(input_file, stride)

    # Get just the points, no intensity
    pc_points = np.ascontiguousarray(pc_raw[:, :3])
    # Create Open3D point cloud
    cmap = cc.cm.glasbey_bw
    pcd_raw = create_open_3d_pcd(pc_raw[:, :3], pc_raw[:, 3], cmap=cmap)
    # tri_mesh, tri_mesh_o3d = create_meshes(pc_points, stride=stride, loops=loops)
    tri_mesh, tri_mesh_o3d, timings = create_meshes_cuda_with_o3d(pc_image, loops_laplacian=loops, _lambda=_lambda,
                                                                  loops_bilateral=loops_bilateral, kernel_size=kernel_size, sigma_angle=0.1, **kwargs)

    logger.info("Visualizing Point Cloud - Size: %dX%d ; # Points: %d; # Triangles: %d",
                pc_image.shape[0], pc_image.shape[1], pc_raw.shape[0], np.asarray(tri_mesh.triangles).shape[0])

    return pc_raw, pcd_raw, pc_image, tri_mesh, tri_mesh_o3d, timings
def polygons_all(ctx, variance, data, stride, loops, llambda):
    """Visualize Polygon Extraction from training/testing/gt set"""
    if int(variance) == 0:
        base_dir = SYNPEB_DIR_TRAIN_GT if data == "train" else SYNPEB_DIR_TEST_GT
    else:
        base_dir = SYNPEB_DIR_TRAIN_ALL[
            int(variance) -
            1] if data == "train" else SYNPEB_DIR_TEST_ALL[int(variance) - 1]

    all_fnames = SYNPEB_ALL_FNAMES
    if int(variance) != 0:
        all_fnames = all_fnames[0:10]

    for fname in all_fnames:
        fpath = str(base_dir / fname)
        logger.info("File: %s; stride=%d, loops=%d", fpath, stride, loops)
        ctx.invoke(polygons,
                   input_file=fpath,
                   stride=stride,
                   loops=loops,
                   llambda=llambda)
Exemplo n.º 8
0
def bilateral_opc_cuda(opc,
                       loops=5,
                       sigma_length=0.1,
                       sigma_angle=0.261,
                       **kwargs):
    """Performs bilateral normal smoothing on a mesh implicit from an organized point

    Arguments:
        opc {ndarray} -- Organized Point Cloud MXNX3, Assumed Float 64

    Keyword Arguments:
        loops {int} -- How many iterations of smoothing (default: {5})
        sigma_length {float} -- Saling factor for length (default: {0.1})
        sigma_angle {float} -- Scaling factor for angle (default: {0.261})

    Returns:
        ndarray -- MX3 Triangle Normal Array, Float 64
    """

    normals_opc, centroids_opc = compute_normals_and_centroids_opc(
        opc, convert_f64=False)
    assert normals_opc.dtype == np.float32
    assert centroids_opc.dtype == np.float32

    t1 = time.perf_counter()
    normals_float_out = opf_cuda.kernel.bilateral_K3_cuda(
        normals_opc,
        centroids_opc,
        loops=loops,
        sigma_length=sigma_length,
        sigma_angle=sigma_angle)
    t2 = time.perf_counter()

    normals_out = normals_float_out.astype(np.float64)

    time_elapsed = (t2 - t1) * 1000

    logger.info("OPC CUDA Bilateral Filter Took (ms): %.2f", (t2 - t1) * 1000)

    return normals_out, time_elapsed
Exemplo n.º 9
0
def convert_polygons_to_classified_point_cloud(all_polygons, tri_mesh, all_normals, gt_image=None, stride=2, filter_kwargs=dict()):
    logger.info("Total number of normals identified: %d", len(all_polygons))
    all_classified_planes = []
    class_index_counter = 1  # start class indices at 1

    # The ground truth ORIGINAL image size
    window_size_out = gt_image.shape[0:2]
    # Our downsampled window size used
    window_size_in = (np.array(window_size_out) / stride).astype(np.int)
    # used to hold rasterization of polygons
    image_out = np.zeros(window_size_out, dtype=np.uint8)

    total_points = gt_image.shape[0] * gt_image.shape[1]
    # A flattened array of the point indices
    vertices = np.ascontiguousarray(gt_image[:, :, :3].reshape((total_points, 3)))
    gt_class = gt_image[:, :, 3].astype(np.uint8)

    for i, normal_polygons in enumerate(all_polygons):
        logger.debug("Number of Polygons for normal: %d", len(normal_polygons))
        normal = all_normals[i, :]
        for polygon in normal_polygons:
            polygon_shapely = convert_to_shapely_geometry_in_image_space(polygon, window_size_in, stride)
            polygon_shapely = polygon_shapely.buffer(3.0)
            polygon_shapely.simplify(1.0)
            rasterize_polygon(polygon_shapely, class_index_counter, image_out)
            point_indices = extract_image_coordinates(image_out, class_index_counter)
            points = np.ascontiguousarray(vertices[point_indices, :])
            plane_triangles = None
            normal = np.copy(normal)
            # f, (ax1, ax2) = plt.subplots(1,2)
            # ax1.imshow(image_out)
            # ax2.imshow(gt_class)
            # plt.show()
            classified_plane = dict(triangles=None, point_indices=point_indices,
                                    points=points, class_id=class_index_counter, normal=normal)
            all_classified_planes.append(classified_plane)
            class_index_counter += 1
    logger.info("A total of %d planes are captured", len(all_classified_planes))
    return all_classified_planes
Exemplo n.º 10
0
def laplacian_opc_cuda(opc, loops=5, _lambda=0.5, kernel_size=3, **kwargs):
    """Performs Laplacian Smoothing on an organized point cloud

    Arguments:
        opc {ndarray} -- Organized Point Cloud MXNX3, Assumed F64

    Keyword Arguments:
        loops {int} -- How many iterations of smoothing (default: {5})
        _lambda {float} -- Weight factor for update (default: {0.5})
        kernel_size {int} -- Kernel Size (How many neighbors to intregrate) (default: {3})

    Returns:
        ndarray -- Smoothed Point Cloud, MXNX3
    """

    opc_float = (np.ascontiguousarray(opc[:, :, :3])).astype(np.float32)

    t1 = time.perf_counter()
    if kernel_size == 3:
        opc_float_out = opf_cuda.kernel.laplacian_K3_cuda(opc_float,
                                                          loops=loops,
                                                          _lambda=_lambda,
                                                          **kwargs)
    else:
        opc_float_out = opf_cuda.kernel.laplacian_K5_cuda(opc_float,
                                                          loops=loops,
                                                          _lambda=_lambda,
                                                          **kwargs)
    t2 = time.perf_counter()

    logger.info("OPC CUDA Laplacian Mesh Smoothing Took (ms): %.2f",
                (t2 - t1) * 1000)

    # only for visualization purposes here
    opc_out = opc_float_out.astype(np.float64)
    time_elapsed = (t2 - t1) * 1000

    return opc_out, time_elapsed
Exemplo n.º 11
0
def laplacian_opc(opc, loops=5, _lambda=0.5, kernel_size=3, **kwargs):
    """Performs Laplacian Smoothing on an organized point cloud

    Arguments:
        opc {ndarray} -- Organized Point Cloud MXNX3, Assumed F64

    Keyword Arguments:
        loops {int} -- How many iterations of smoothing (default: {5})
        _lambda {float} -- Weight factor for update (default: {0.5})
        kernel_size {int} -- Kernel Size (How many neighbors to intregrate) (default: {3})

    Returns:
        ndarray -- Smoothed Point Cloud, MXNX3, F64
    """
    opc_float = (np.ascontiguousarray(opc[:, :, :3])).astype(np.float32)

    a_ref = Matrix3fRef(opc_float)

    t1 = time.perf_counter()
    if kernel_size == 3:
        b_cp = opf.filter.laplacian_K3(a_ref,
                                       _lambda=_lambda,
                                       iterations=loops,
                                       **kwargs)
    else:
        b_cp = opf.filter.laplacian_K5(a_ref,
                                       _lambda=_lambda,
                                       iterations=loops,
                                       **kwargs)
    t2 = time.perf_counter()
    logger.info("OPC Mesh Smoothing Took (ms): %.2f", (t2 - t1) * 1000)

    opc_float_out = np.asarray(b_cp)
    opc_out = opc_float_out.astype(np.float64)

    time_elapsed = (t2 - t1) * 1000

    return opc_out, time_elapsed
Exemplo n.º 12
0
def convert_planes_to_classified_point_cloud(all_planes, tri_mesh, all_normals, filter_kwargs=dict()):
    logger.info("Total number of normals identified: %d", len(all_planes))
    all_classified_planes = []

    # classified_plane = dict(triangles=None, points=None, class_id=None, normal=None)
    plane_counter = 0
    vertices = np.asarray(tri_mesh.vertices)
    triangles = np.asarray(tri_mesh.triangles)
    for i, normal_planes in enumerate(all_planes):
        logger.debug("Number of Planes in normal: %d", len(normal_planes))
        normal = all_normals[i, :]
        for plane in normal_planes:
            logger.debug("Number of triangles in plane: %d", len(plane))
            plane_triangles = np.asarray(plane)
            point_indices = np.unique(triangles[plane_triangles, :].flatten())
            points = np.ascontiguousarray(vertices[point_indices, :])
            normal = np.copy(normal)
            classified_plane = dict(triangles=plane_triangles, point_indices=point_indices,
                                    points=points, class_id=plane_counter, normal=normal)
            all_classified_planes.append(classified_plane)
            plane_counter += 1
    logger.info("A total of %d planes are captured", len(all_classified_planes))
    return all_classified_planes
Exemplo n.º 13
0
def extract_planes_and_polygons_from_mesh(tri_mesh, avg_peaks,
                                          polylidar_kwargs=dict(alpha=0.0, lmax=0.1, min_triangles=2000,
                                                                z_thresh=0.1, norm_thresh=0.95, norm_thresh_min=0.95, min_hole_vertices=50, task_threads=4),
                                          filter_polygons=True, pl_=None, optimized=True, **kwargs):

    if pl_ is not None:
        pl = pl_
    else:
        pl = Polylidar3D(**polylidar_kwargs)

    avg_peaks_mat = MatrixDouble(avg_peaks)
    t0 = time.perf_counter()
    if optimized:
        all_planes, all_polygons = pl.extract_planes_and_polygons_optimized(tri_mesh, avg_peaks_mat)
    else:
        all_planes, all_polygons = pl.extract_planes_and_polygons(tri_mesh, avg_peaks_mat)

    t1 = time.perf_counter()

    polylidar_time = (t1 - t0) * 1000
    logger.info("Polygon Extraction - Took (ms): %.2f", polylidar_time)
    all_poly_lines = []
    if filter_polygons:
        vertices = np.asarray(tri_mesh.vertices)
        for i in range(avg_peaks.shape[0]):
            avg_peak = avg_peaks[i, :]
            rm, _ = R.align_vectors([[0, 0, 1]], [avg_peak])
            polygons_for_normal = all_polygons[i]
            # print(polygons_for_normal)
            if len(polygons_for_normal) > 0:
                poly_lines, _ = filter_and_create_open3d_polygons(vertices, polygons_for_normal, rm=rm, **kwargs)
                all_poly_lines.extend(poly_lines)

    timings = dict(polylidar=polylidar_time)

    return all_planes, all_polygons, all_poly_lines, timings
Exemplo n.º 14
0
def laplacian_then_bilateral_opc(opc,
                                 loops_laplacian=5,
                                 _lambda=0.5,
                                 kernel_size=3,
                                 loops_bilateral=0,
                                 sigma_length=0.1,
                                 sigma_angle=0.261,
                                 **kwargs):
    """Performs Laplacian Smoothing on Point Cloud and then performs Bilateral normal smoothing

    Arguments:
        opc {ndarray} -- Organized Point Cloud (MXNX3)

    Keyword Arguments:
        loops_laplacian {int} -- How many iterations of laplacian smoothing (default: {5})
        _lambda {float} -- Weigh factor for laplacian  (default: {0.5})
        kernel_size {int} -- Kernel Size for Laplacian (default: {3})
        loops_bilateral {int} -- How many iterations of bilateral smoothing (default: {0})
        sigma_length {float} -- Scaling factor for length bilateral (default: {0.1})
        sigma_angle {float} -- Scaling factor for angle bilateral (default: {0.261})

    Returns:
        tuple(ndarray, ndarray) -- Smoothed OPC MXNX3, Smoothed Normals M*NX3, normals arrays are flattened
    """
    opc_float = (np.ascontiguousarray(opc[:, :, :3])).astype(np.float32)

    a_ref = Matrix3fRef(opc_float)

    t1 = time.perf_counter()
    if kernel_size == 3:
        b_cp = opf.filter.laplacian_K3(a_ref,
                                       _lambda=_lambda,
                                       iterations=loops_laplacian,
                                       **kwargs)
    else:
        b_cp = opf.filter.laplacian_K5(a_ref,
                                       _lambda=_lambda,
                                       iterations=loops_laplacian,
                                       **kwargs)
    t2 = time.perf_counter()
    logger.info("OPC Mesh Smoothing Took (ms): %.2f", (t2 - t1) * 1000)

    opc_float_out = np.asarray(b_cp)
    b_ref = Matrix3fRef(opc_float_out)

    opc_normals_float = opf.filter.bilateral_K3(b_ref,
                                                iterations=loops_bilateral,
                                                sigma_length=sigma_length,
                                                sigma_angle=sigma_angle)
    t3 = time.perf_counter()
    logger.info("OPC Bilateral Normal Filter Took (ms): %.2f",
                (t3 - t2) * 1000)

    opc_normals_float_out = np.asarray(opc_normals_float)
    total_triangles = int(opc_normals_float_out.size / 3)
    opc_normals_out = opc_normals_float_out.astype(np.float64)
    opc_normals_out = opc_normals_float_out.reshape((total_triangles, 3))

    opc_out = opc_float_out.astype(np.float64)
    # total_points = int(opc_out.size / 3)
    # opc_out = opc_out.reshape((total_points, 3))

    timings = dict(laplacian=(t2 - t1) * 1000, bilateral=(t3 - t2) * 1000)

    return opc_out, opc_normals_out, timings