Example #1
0
def run(n,
        pool,
        board_id=None,
        processors=5,
        chunk_size=500,
        batch_size=2000,
        outname="2003super5x5",
        save=True):
    """
	Solves several generated boggle configs

	:param method: solving method
	:param n: number of boards to run
	:param board: name of board to use
	:return: n board layouts, n heatmaps by points, n lists of worded solutions
	"""

    if board_id is None:
        board = Boggle()
    else:
        board = Boggle(board=board_id)

    board.width = 5

    max_words_per_board = 1200  # store maximum number of valid entries per board

    solver = Solver()  # Load solver
    max_word_length = solver.d.max_length

    # produce array of batch sizes
    batches = [batch_size] * (n // batch_size)
    if n % batch_size > 0: batches += [n % batch_size]

    # initialise data storage arrays
    layouts = np.empty((n, board.width, board.width),
                       dtype="<U1")  # list of each board
    solutions = np.empty(
        (n, max_words_per_board),
        dtype=f"|S{max_word_length}")  # list of valid words for each board
    points = np.zeros((n, 5, 5),
                      dtype=np.uint64)  # point distribution across each board

    with tqdm(total=n) as progress:
        for b, batch_size in enumerate(batches):
            batch_layouts = [board.gen() for i in range(batch_size)]
            batch_sols = pool.map(solver.solve,
                                  batch_layouts,
                                  chunksize=chunk_size)

            # compute and save data
            for i_inbatch, sol in enumerate(batch_sols):
                i = b * batch_size + i_inbatch  # overall idx
                for word in sol:
                    val = len(word) - 3
                    all_coords = set([
                        tuple(coord) for route in sol[word] for coord in route
                    ])  # all UNIQUE coords in routes
                    for coord in all_coords:
                        x, y = coord
                        points[i, y, x] += val

                layouts[i] = [list(r) for r in batch_layouts[i_inbatch]]
                solutions[i][:len(sol.keys())] = list(sol.keys())

            progress.update(batch_size)

    if save:
        out = dict(layouts=layouts, solutions=solutions, points=points)

        np.savez_compressed(os.path.join("results", outname),
                            width=5,
                            height=5,
                            **out)