Exemple #1
0
def vis(Cs, qs, ns, path='tmp/rays.ply'):
    M = len(Cs)
    verts = empty((4 * M, 3), Cs.dtype)

    verts[::4] = Cs
    verts[1::4] = Cs + 20 * qs

    verts[2::4] = Cs
    verts[3::4] = Cs + 0.5 * ns

    edges = empty((2 * M, 2), int)
    edges[:M, 0] = r_[0:2 * M:2]
    edges[:M, 1] = r_[1:2 * M:2]

    edges[M:, 0] = 2 * M + r_[0:2 * M:2]
    edges[M:, 1] = 2 * M + r_[1:2 * M:2]

    IO.save_point_cloud(path, verts, edges)
Exemple #2
0
def write_clusters(outdir, lookup, P1, P2, indmap):
    Util.try_mkdir(outdir)
    verts_all = []
    edges_all = []
    E = 0
    for key, cluster in lookup.items():
        # path = os.path.join(outdir, 'set_{}.ply'.format(key))

        inds = indmap[cluster]
        verts = np.vstack((P1[inds], P2[inds]))
        V = len(verts)
        edges1 = np.r_[0:V].reshape(2, -1).T
        edges2 = np.c_[np.r_[0:V - 1], np.r_[1:V]]
        edges = np.vstack((edges1, edges2))

        verts_all.append(verts)
        edges_all.append(edges + E)

        E += len(edges)

    path = os.path.join(outdir, '_full.ply')
    IO.save_point_cloud(path, np.vstack(verts_all), np.vstack(edges_all))
Exemple #3
0
def reconstruct(seq, sub, cloud, config, scale=1):
    stat_path = os.path.join(seq, sub, 'segment_stats.npz')
    filter_path = os.path.join(seq, sub, 'reciprocal_filtered.npz')

    # --- Load data ---
    npz = np.load(stat_path)
    depths = npz['depths']
    score = npz['score']
    radii = npz['radii']
    inlier_frac = npz['inlier_frac']
    neighbor_count = npz['neighbor_count']
    reciprocated = npz['reciprocated']

    # --- Prepare vars ---
    N = cloud.N

    # Valid inds start at zero. Invalids are -1
    Cs, Qs, Ns = cloud.global_rays()
    planes = empty((N, 4), Qs.dtype)
    planes[:, :3] = Ns
    planes[:, 3] = -(Cs * Ns).sum(axis=1)

    # --- Filter inds ---
    abradii = np.abs(radii)
    min_score = config.min_score
    min_radius = config.min_radius * scale
    min_neighbors = config.min_neighbors
    min_inlier = config.min_inlier

    good = (score >= min_score) & (abradii < min_radius) & (
        neighbor_count > min_neighbors) & (inlier_frac > min_inlier)
    inds = np.where(good)[0]

    if not os.path.exists(filter_path):
        print("Filtering common")
        rec_filtered, rec_count = filter_common(inds,
                                                good,
                                                reciprocated,
                                                neighbor_count,
                                                min_common=20)
        print("Saving")
        np.savez(filter_path, reciprocated=rec_filtered, count=rec_count)
        print("Done")
    else:
        npz = np.load(filter_path)
        rec_filtered = npz['reciprocated']
        rec_count = npz['count']

    bbox_min, bbox_max = cloud.bbox_min, cloud.bbox_max
    print(bbox_min, bbox_max)

    keep = good
    inds2 = np.where(keep)[0]

    Pa = Cs[inds2] + depths[inds2, 0:1] * Qs[inds2]
    Pb = Cs[inds2] + depths[inds2, 1:2] * Qs[inds2 + 1]

    inside = inds2[((Pa > bbox_min).all(axis=1) & (Pa < bbox_max).all(axis=1) &
                    (Pb > bbox_min).all(axis=1) & (Pb < bbox_max).all(axis=1))]

    sel = inside

    print("Persistent edges:", len(sel))

    Pa = Cs[sel] + depths[sel, 0:1] * Qs[sel]
    Pb = Cs[sel] + depths[sel, 1:2] * Qs[sel + 1]

    rebuild_persistent_voxel(seq, sub, cloud, sel, Pa, Pb)

    verts = np.vstack((Pa, Pb))
    V = len(verts)
    edges = np.r_[0:V].reshape(2, -1).T

    cloud_out = os.path.join(seq, sub, 'persistent_cloud.ply')
    IO.save_point_cloud(cloud_out, verts, edges)

    pers_out = os.path.join(seq, sub, 'persistent_data.npz')
    np.savez(pers_out, verts=verts, edges=edges, inds=sel, P1=Pa, P2=Pb)

    mesh = Render.Mesh()
    mesh.verts = verts.astype(np.float32)
    mesh.edges = edges.astype(np.uint32)
    mesh.normals = Ns[sel].astype(np.float32)
    mesh_out = os.path.join(seq, 'persistent_mesh.npz')
    Render.save_mesh(mesh_out, mesh)
Exemple #4
0
def visualize_intersections(seq, sub, skip=20, imtype='png'):
    edge_dir = os.path.join(seq, 'edges', '*.' + imtype)
    paths = glob.glob(edge_dir)

    voxel_dir = os.path.join(seq, sub + '_voxel')

    # ---------- Set up the figure -----------
    fig = pynutmeg.figure('segments', 'figs/intersection.qml')
    fig.set_gui('figs/intersection_gui.qml')

    # Parameters
    sld_frame = fig.parameter('frame')
    sld_frameoffset = fig.parameter('frameoffset')
    sld_segment = fig.parameter('segment')
    sld_index = fig.parameter('index')
    sld_anglesupport = fig.parameter('anglesupport')
    sld_planarsupport = fig.parameter('planarsupport')
    sld_support = fig.parameter('framesupport')
    btn_cache = fig.parameter('cachebtn')
    btn_export = fig.parameter('exportbtn')
    btn_cache.wait_changed(5)
    btn_export.wait_changed(1)

    # ------------- Load in data -------------
    print("Loading rays")
    cloud = RayCloud.load(os.path.join(seq, sub))

    F = int(cloud.frames.max())
    sld_frame.set(maximumValue=F - 1, stepSize=skip)
    sld_support.set(maximumValue=1000, stepSize=skip)

    N = cloud.N
    Cs, Qs, Ns = cloud.global_rays()

    planes = empty((N, 4), Qs.dtype)
    planes[:, :3] = Ns
    planes[:, 3] = -(Cs * Ns).sum(axis=1)

    plucker = Geom.to_plucker(Cs, Qs)

    # Get a rough scale for closeness threshold based on
    # size of camera center bounding box
    print("Loading voxels")
    raygrid = RayVoxel.load(voxel_dir)

    # longest = (bbox_max - bbox_min).max()
    # eps = longest * 1e-2
    eps = 1 / cloud.cam[0]
    print("Eps:", eps)

    # Load the cam so we can offset the image properly
    fx, fy, cx, cy = cloud.cam
    # Make image show in homogenious coords
    fig.set('ax.im',
            xOffset=-(cx + 0.5) / fx,
            yOffset=-(cy + 0.5) / fy,
            xScale=1 / fx,
            yScale=1 / fy)
    fig.set('fit', minX=0.4, maxX=1, minY=-1, maxY=1)

    # Make sure the figure's online
    pynutmeg.wait_for_nutmeg()
    pynutmeg.check_errors()

    # Init state vars
    frame = 0
    label = 1
    index = 0
    frame_offset = 0

    labels = empty(0, int)
    max_label = 1
    max_ind = 0
    s, e = 0, 0
    ray_ind = 0

    frame_changed = True

    cache = [[], [], []]
    validcache = False
    cachechanged = False

    cluster_sel = empty(0, int)

    hough = zeros((500, 1000), np.uint32)

    while True:
        # Check parameter update
        if sld_frame.changed:
            frame = max(0, sld_frame.read())
            frame_changed = True

        if sld_segment.changed:
            label = sld_segment.read()
            segment_changed = True

        if sld_index.changed:
            index = sld_index.read()
            index_changed = True

        # Apply updated values
        if frame_changed:
            E = IO.imread(paths[frame])
            fig.set('ax.im', binary=255 - E)

            s, e = cloud.frame_range[frame]
            labels = cloud.labels_frame[s:e]
            if len(labels) > 0:
                max_label = labels.max()
                sld_segment.set(maximumValue=int(max_label))
            else:
                sld_segment.set(maximumValue=0)

            label = 0
            segment_changed = True
            frame_changed = False

        if segment_changed:
            segment_inds = s + np.where(labels == label)[0]

            max_ind = max(0, len(segment_inds))
            sld_index.set(maximumValue=max_ind)
            if len(segment_inds) > 0:
                P_seg = cloud.local_rays[np.r_[segment_inds,
                                               segment_inds[-1] + 1]].T
                fig.set('ax.P0', x=P_seg[0], y=P_seg[1])
            else:
                fig.set('ax.P0', x=[], y=[])
                fig.set('ax.P1', x=[], y=[])
                fig.set('ax.rays', x=[], y=[])

            index = min(max_ind, index)

            index_changed = True
            segment_changed = False
            validcache = False

        # if sld_frameoffset.changed:
        #     print("Recalculation frame offset...")
        #     frame_offset = sld_frameoffset.read()

        #     # Slow, but don't care, atm...
        #     Cs, Qs, Ns = cloud.global_rays(frame_offset)
        #     planes = empty((N,4), Qs.dtype)
        #     planes[:,:3] = Ns
        #     planes[:,3] = -(Cs*Ns).sum(axis=1)
        #     plucker = Geom.to_plucker(Cs, Qs)
        #     print("Done")

        #     validcache = False

        if index_changed and index >= 0 and index < len(segment_inds):
            ray_ind = segment_inds[index]

            P_seg = cloud.local_rays[ray_ind:ray_ind + 2]
            P_ind = P_seg.mean(axis=0).reshape(-1, 1)
            fig.set('ax.P1', x=P_ind[0], y=P_ind[1])

            tx, ty, _ = P_seg[1] - P_seg[0]
            mag = sqrt(tx * tx + ty * ty)
            # nx, ny = Geom.project_normal(q, cloud.local_normals[ray_ind])
            nx, ny = -ty / mag * 3e-2, tx / mag * 3e-2
            L = empty((2, 2))
            L[:, 0] = P_ind[:2, 0]
            L[:, 1] = L[:, 0] + (nx, ny)
            fig.set('ax.rays', x=L[0], y=L[1])

        if (index_changed or sld_support.changed or sld_anglesupport.changed
                or sld_planarsupport.changed or cachechanged) and validcache:
            frame_support = max(sld_support.read(),
                                2 * sld_support.read() - frame)
            angle_support = sld_anglesupport.read() / 10000
            planarsupport = sld_planarsupport.read() / 1000
            cachechanged = False
            # print("Cache: {}, Index: {}".format(len(cache[0]), index))
            if len(cache[0]) > 0 and 0 <= index < len(cache[0]):
                frames = cache[3][index]
                df = frames - frame
                planar_angles = cache[7][index]
                keep = np.where((np.abs(df) <= frame_support)
                                | (np.abs(planar_angles) <= planarsupport))[0]

                P = cache[0][index][:, keep]
                deltas = cache[1][index][:, keep]
                depths = cache[2][index][keep]
                # angles = np.arctan(deltas[0]/deltas[1])
                angleres = np.deg2rad(5)
                centers = cache[4][index][:, keep]

                rays2d = cache[5][index][:, keep]
                rays2d /= norm(rays2d, axis=0)

                print("Sel:", len(cache[6][index]))

                print("Clustering")
                # cluster_inds, radius, depth, ray_angles, inlier_frac = ClassifyEdges.find_cluster_line3(
                #     deltas, rays2d, depths,
                #     angle_support, res=(angleres, 1e-2),
                #     thresh=(np.deg2rad(10), 4e-3))

                result = ClassifyEdges.find_cluster_line4(
                    centers,
                    rays2d,
                    depth_thresh=angle_support,
                    percentile=0.15)
                cluster_inds, radius, depth, dual_centers, dual_rays, inlier_frac = result
                print(".. Done")

                # np.savez('tmp/frame_cluster/ray_{}.npz'.format(ray_ind), frames=frames-frame, depths=depths, angles=angles, cluster_inds=cluster_inds)
                # print("Saved cluster", ray_ind)

                cluster_sel = cache[6][index][keep][cluster_inds]
                # fig.set('fit.P0', x=depths, y=ray_angles)
                # fig.set('fit.P1', x=depths[cluster_inds], y=ray_angles[cluster_inds])
                # fig.set('fit.P2', x=depths[line_cluster], y=ray_angles[line_cluster])

                x1, y1 = dual_centers - 10 * dual_rays
                x2, y2 = dual_centers + 10 * dual_rays
                fig.set('fit.rays', x=x1, y=y1, endX=x2, endY=y2)
                fig.set('fit.rays2',
                        x=x1[cluster_inds],
                        y=y1[cluster_inds],
                        endX=x2[cluster_inds],
                        endY=y2[cluster_inds])
                # fig.set('fit.rays3', x=x1[line_cluster], y=y1[line_cluster], endX=x2[line_cluster], endY=y2[line_cluster])

                print(cluster_sel.shape)
                # c_out = Cs[cluster_sel]
                # q_out = (Cs[ray_ind] + Qs[ray_ind] * depths[cluster_inds].reshape(-1,1)) - c_out
                # verts = np.vstack((c_out, c_out + 1.2*q_out))
                # edges = empty((len(verts)//2, 2), np.uint32)
                # edges[:] = np.r_[0:len(verts)].reshape(2,-1).T
                # IO.save_point_cloud("tmp/cluster_rays.ply", verts, edges)

                # fig.set('fit.P2', x=depths[init_inds], y=ray_space[init_inds])

                if len(cluster_inds) >= 10:
                    # hist *= (0.04/hist.max())
                    # fig.set('tangent.l1', x=histx, y=hist)

                    # fig.set('fit.P0', x=angles, y=depths)
                    # fig.set('fit.P1', x=angles[cluster_inds], y=depths[cluster_inds])

                    P = P[:, cluster_inds]
                    deltas = deltas[:, cluster_inds]

                    x1, y1 = P
                    x2, y2 = P + deltas

                    fig.set('tangent.rays', x=x1, y=y1, endX=x2, endY=y2)
                    fig.set('tangent.l0', x=[0.0, 1.5], y=[0.0, 0.0])
                    fig.set('tangent.P0', x=depths, y=zeros(len(depths)))

                    # Determine tangent angle
                    intersect_angles = np.arctan(-deltas[0] / deltas[1])
                    # Zero is verticle
                    alpha = np.median(intersect_angles)
                    qa = np.r_[-np.sin(alpha), np.cos(alpha)]
                    a1 = np.r_[depth, 0] + 0.05 * qa
                    a2 = np.r_[depth, 0] - 0.05 * qa
                    fig.set('tangent.l1', x=[a1[0], a2[0]], y=[a1[1], a2[1]])

                    # Draw the other axis
                    Q2 = 2 * rays2d[:, cluster_inds]
                    P2a = centers[:, cluster_inds]
                    P2b = P2a + Q2
                    fig.set('normal.rays',
                            x=P2a[0],
                            y=P2a[1],
                            endX=P2b[0],
                            endY=P2b[1])
                    fig.set('normal.l0', x=[0.0, 1.5], y=[0.0, 0.0])

                    # fig.set('fit.P0', x=angles2, y=depths)
                    # fig.set('fit.P1', x=angles2[cluster_inds], y=depths[cluster_inds])
                    # np.savez('tmp/circle3.npz', P=P2a, Q=Q2, eps=eps)

                    # depth_std = np.std(depths[cluster_inds])

                    # nearby = np.where( np.abs(ray_angles[cluster_inds]) < 1.5*angle_support )[0]

                    # maxangle = np.percentile(np.abs(ray_angles[cluster_inds]), 95)
                    print("Radius:", radius, depth)
                    # frac = len(cluster_inds)/len(depths)
                    print("Frac:", len(cluster_inds), inlier_frac)
                    # print("Nearby:", len(nearby))
                    # if angle_range > np.deg2rad(20):
                    #     print("Hard edge", len(cluster_inds))
                    if inlier_frac > 0.05 and len(cluster_inds) > 100:
                        if abs(radius) < 1e-2:
                            print("Hard edge", len(cluster_inds))
                        else:
                            print("Occlusion", len(cluster_inds))

                else:
                    print("Cluster too small:", len(cluster_inds))

        index_changed = False

        if btn_cache.read_changed() or not validcache:
            if len(segment_inds) > 0 and label != 0:
                print("Caching segment... Total indices: {}".format(
                    len(segment_inds)),
                      flush=True)
                cache = cache_segment(Cs,
                                      Qs,
                                      planes,
                                      plucker,
                                      cloud.frames,
                                      cloud.labels_frame,
                                      raygrid,
                                      segment_inds,
                                      eps=eps)
                validcache = True
                cachechanged = True
                print("Done")

                # TODO: Output cluster segments to .ply for blender visualization.......

        if btn_export.read_changed() and validcache and len(cluster_sel) > 0:
            export_triangles('tmp/cluster_tris.ply', Cs, Qs * 1.5, cluster_sel,
                             ray_ind)

            c_out = Cs[cluster_sel]
            q_out = (Cs[ray_ind] +
                     Qs[ray_ind] * depths[cluster_inds].reshape(-1, 1)) - c_out
            verts = np.vstack((c_out, c_out + 1.5 * q_out))
            edges = empty((len(verts) // 2, 2), np.uint32)
            edges[:] = np.r_[0:len(verts)].reshape(2, -1).T
            IO.save_point_cloud("tmp/cluster_rays.ply", verts, edges)
            print("Saved .ply")

        time.sleep(0.005)