def meshify(points, n_surfaces=None):
    """Returns vertex and normal coordinates for a 3D mesh model from an Nx3 array of points after filtering.

    Args:
        -points (Nx3 Numpy Array): Data to be fit to a model.
        -n_surfaces: If none, many different models with different numbers of surfaces will be compared.

    Returns:
        -vertices
        -normals
    """

    # Remove Obviously Bad Points according to how far away from main cluster they are
    histmask = np.ones(points.shape[0], dtype=bool)  # Initializing mask with all True values
    for coord in range(3):
        histmask &= filters.hist_mask(points[:, coord], keep='middle')
    points_f = points[histmask, :]

    # Get the normals of the N-Neighborhood around each point, and filter out points with lowish planarity
    normals_f, explained_variances = normal_nearest_neighbors(points_f)

    # Histogram filter: take the 70% best-planar data to model.
    normfilter = filters.hist_mask(explained_variances[:, 2], threshold=.7, keep='lower')
    points_ff = points_f[normfilter, :]
    normals_ff = normals_f[normfilter, :]

    ceiling_height = points_ff[:, 1].max() + .005

    # Fit the filtered normal data using a gaussian classifier.
    min_clusters = n_surfaces if n_surfaces else 4
    max_clusters = n_surfaces + 1 if n_surfaces else 15
    model = cluster_normals(normals_ff, min_clusters=min_clusters, max_clusters=max_clusters)

    # Get normals from model means
    surface_normals = model.means_  # n_components x 3 normals array, giving mean normal for each surface.

    # Calculate mean offset of vertices for each wall
    ids = model.predict(normals_ff)  # index for each point, giving the wall id number (0:n_components)

    surface_offsets = np.zeros_like(surface_normals)
    for idx in range(len(surface_normals)):
        surface_offsets[idx, :] = np.mean(points_ff[ids==idx, :], axis=0)
    assert not np.isnan(surface_offsets.sum()), "Incorrect model: No Points found to assign to at least one wall for intersection calculation."

    ## CALCULATE PLANE INTERSECTIONS TO GET VERTICES ##
    vertices, normals = get_vertices_at_intersections(surface_normals, surface_offsets, ceiling_height)
    return vertices, normals
        # If the experimenter needs to enter the room, give them a bit of time to get inside.
        if args.human_scan:
            time.sleep(5)

        # Collect random points for calibration.
        motive.load_project(args.motive_projectfile)
        hardware.motive_camera_vislight_configure()

        screenPos, pointPos = random_scan(window, scene, n_points=args.n_points)

        print("Size of Point Data: {}".format(pointPos.shape))

        # Remove Obviously Bad Points according to how far away from main cluster they are
        histmask = np.ones(pointPos.shape[0], dtype=bool)  # Initializing mask with all True values
        for coord in range(3):
            histmask &= filters.hist_mask(pointPos[:, coord], keep='middle')
        pointPos = pointPos[histmask, :]
        screenPos = screenPos[histmask, :]

        print("Size of Point Data after hist_mask: {}".format(pointPos.shape))

        # Project a few points that the experimenter can make rays from (by moving a piece of paper up and down along them.
        # Don't include human data, because its non-gaussian distribution can screw things up a bit.
        # TODO: Figure out how to properly get human-scanned projector calibration data in so OpenCV gets better estimate.
        if args.human_scan:
            ray_scan(window)

        # Close Window
        window.close()

        # Early Tests