Exemple #1
0
def run(n,
        pool,
        board_id=None,
        processors=5,
        chunk_size=500,
        batch_size=2000,
        outname="2003super5x5",
        save=True):
    """
	Solves several generated boggle configs

	:param method: solving method
	:param n: number of boards to run
	:param board: name of board to use
	:return: n board layouts, n heatmaps by points, n lists of worded solutions
	"""

    if board_id is None:
        board = Boggle()
    else:
        board = Boggle(board=board_id)

    board.width = 5

    max_words_per_board = 1200  # store maximum number of valid entries per board

    solver = Solver()  # Load solver
    max_word_length = solver.d.max_length

    # produce array of batch sizes
    batches = [batch_size] * (n // batch_size)
    if n % batch_size > 0: batches += [n % batch_size]

    # initialise data storage arrays
    layouts = np.empty((n, board.width, board.width),
                       dtype="<U1")  # list of each board
    solutions = np.empty(
        (n, max_words_per_board),
        dtype=f"|S{max_word_length}")  # list of valid words for each board
    points = np.zeros((n, 5, 5),
                      dtype=np.uint64)  # point distribution across each board

    with tqdm(total=n) as progress:
        for b, batch_size in enumerate(batches):
            batch_layouts = [board.gen() for i in range(batch_size)]
            batch_sols = pool.map(solver.solve,
                                  batch_layouts,
                                  chunksize=chunk_size)

            # compute and save data
            for i_inbatch, sol in enumerate(batch_sols):
                i = b * batch_size + i_inbatch  # overall idx
                for word in sol:
                    val = len(word) - 3
                    all_coords = set([
                        tuple(coord) for route in sol[word] for coord in route
                    ])  # all UNIQUE coords in routes
                    for coord in all_coords:
                        x, y = coord
                        points[i, y, x] += val

                layouts[i] = [list(r) for r in batch_layouts[i_inbatch]]
                solutions[i][:len(sol.keys())] = list(sol.keys())

            progress.update(batch_size)

    if save:
        out = dict(layouts=layouts, solutions=solutions, points=points)

        np.savez_compressed(os.path.join("results", outname),
                            width=5,
                            height=5,
                            **out)
Exemple #2
0
                                all_words[new_string] = all_words.get(
                                    new_string, []) + [new_path]

        return all_words


if __name__ == "__main__":
    board = Boggle()
    s = Solver()

    N = 300
    t0 = perf_counter()
    ts = []

    for i in range(N):
        s.solve(board.gen())
        ts.append(perf_counter() - t0)
        t0 = perf_counter()

    print(f"Time to solve: {sum(ts)/len(ts)*1000:.1f}ms")

    # SOLVE BEST - EXPECTED ANSWER - 2945
    t0 = perf_counter()
    sol = s.solve(["EASAN", "BLRME", "AUIEE", "STSNS", "NURIG"])
    print(sum(len(w) - 3 for w in sol),
          f"[{(perf_counter()-t0) * 1000:.1f}ms]")
    by_length = {}
    for w in sol:
        by_length[len(w)] = by_length.get(len(w), []) + [w]
    print(", ".join([f"{i}: {len(v)}" for i, v in by_length.items()]))
    print(by_length[11])