Esempio n. 1
0
def write_out_maps(gn, import_map):
    try:
        with open(f"symbol_table/{gn}.json", "r") as f:
            old_map = load(f)
    except (FileNotFoundError, json.decoder.JSONDecodeError):
        old_map = import_map
    else:
        for k in list(import_map):
            old_map.setdefault(k, set()).update(import_map.pop(k))
    with open(f"symbol_table/{gn}.json", "w") as f:
        dump(old_map, f)
Esempio n. 2
0
def read_sharded_dict():
    d = {}
    for file in os.listdir("import_maps"):
        with open(file) as f:
            d.update(load(f))
    return d
Esempio n. 3
0
def read_sharded_dict():
    d = {}
    for file in os.listdir("symbol_table"):
        with open(file) as f:
            d.update(load(f))
    return d
Esempio n. 4
0
            imports, files = future.result()
            pkg = f.rsplit("-", 2)[0]
            for impt in imports:
                import_map[impt].add(f)
                if (not impt.startswith(pkg.replace("-", "_"))
                        and pkg not in CLOBBER_EXCEPTIONS):
                    clobbers.add(pkg)

        os.makedirs("import_maps", exist_ok=True)
        sorted_imports = sorted(import_map.keys(), key=lambda x: x.lower())
        for gn, keys in tqdm(
                groupby(sorted_imports, lambda x: _get_head_letters(x))):
            sub_import_map = {k: import_map.pop(k) for k in keys}
            tpe.submit(write_out_maps, gn, sub_import_map)

    with open(".indexed_files", "a") as f:
        for file in files_indexed:
            f.write(f"{file}\n")
    try:
        with open("clobbering_pkgs.json", "r") as f:
            _clobbers = load(f)
    except FileNotFoundError:
        _clobbers = set()
    _clobbers.update(clobbers)

    with open("clobbering_pkgs.json", "w") as f:
        dump(_clobbers, f)

    with open("import_maps_meta.json", "w") as f:
        dump({"num_letters": NUM_LETTERS}, f)