import itertools import os from pathlib import Path from anytree import Node from anytree.exporter import DotExporter from tabulate import tabulate from src.Util import Util N = 6 MY_UTIL = Util(N, -1) nr_comparisons_count = {} difficult_words = [] all_words = itertools.product(range(N), repeat=N) nr_words = N ** N decision_tree = [Node(0, obj=(0, 1))] max_height = 8 base_dir = "Greedy" Path(base_dir).mkdir(parents=True, exist_ok=True) pic_filename = "{}.png".format(N) txt_filename = "{}.txt".format(N) pic_filepath = os.path.join(base_dir, pic_filename) txt_filepath = os.path.join(base_dir, txt_filename) start_index = 2 for depth in range(1, max_height + 1):
mp_candidates.remove(n - longest_streak) while len(mp_candidates) > 1: count += 1 i = mp_candidates.pop() j = mp_candidates.pop() if word[i + 1] > word[j + 1]: G.add_edge(j + 1, i + 1) mp_candidates.append(i) elif word[i + 1] < word[j + 1]: G.add_edge(i + 1, j + 1) mp_candidates.append(j) else: return j, count return mp_candidates.pop(), count nr_comps = {} for r, words in enumerate(Util(6, -1).generate_all_words()): for word in words: (r_fuzzier, count) = compute_fuzzier(word) if r_fuzzier != r: print("Fuzzier Algorithm failed for {} [r={}, r_actual={}]".format(word, r_fuzzier, r)) if count not in nr_comps: nr_comps[count] = 1 else: nr_comps[count] += 1 if count == 8: print(word) pprint(nr_comps)
if j == i + 1: r = i break k = i l = j while True: k += 1 l += 1 if l > len(word) - 1: j -= 1 r = i break if k == j: return i if word[k] < word[l]: i += 1 r = j break if word[k] > word[l]: j -= 1 r = i break return r for word, r in Util(7, -1).generate_all_words(): r_sandwich = compute_max_prefix_sandwich(word) if r_sandwich != r: print("Sandwich Algorithm failed for {} [r={}, r_actual={}".format( word, r_sandwich, r))
def __init__(self): self.util = Util() self.apiRequestUrl = Constants.API_REQUEST_URL self.apiResponseUrl = Constants.API_RESPONSE_URL self.apiToken = Constants.TOKEN