def format_modules(modules: ModuleIRs) -> List[str]: ops = [] for module in modules.values(): for fn in module.functions: ops.extend(format_func(fn)) ops.append('') return ops
def write_cache( modules: ModuleIRs, result: BuildResult, group_map: Dict[str, Optional[str]], ctext: Dict[Optional[str], List[Tuple[str, str]]], ) -> None: """Write out the cache information for modules. Each module has the following cache information written (which is in addition to the cache information written by mypy itself): * A serialized version of its mypyc IR, minus the bodies of functions. This allows code that depends on it to use these serialized data structures when compiling against it instead of needing to recompile it. (Compiling against a module requires access to both its mypy and mypyc data structures.) * The hash of the mypy metadata cache file for the module. This is used to ensure that the mypyc cache and the mypy cache are in sync and refer to the same version of the code. This is particularly important if mypyc crashes/errors/is stopped after mypy has written its cache but before mypyc has. * The hashes of all of the source file outputs for the group the module is in. This is so that the module will be recompiled if the source outputs are missing. """ hashes = {} for name, files in ctext.items(): hashes[name] = {file: compute_hash(data) for file, data in files} # Write out cache data for id, module in modules.items(): st = result.graph[id] meta_path, _, _ = get_cache_names(id, st.xpath, result.manager.options) # If the metadata isn't there, skip writing the cache. try: meta_data = result.manager.metastore.read(meta_path) except IOError: continue newpath = get_state_ir_cache_name(st) ir_data = { 'ir': module.serialize(), 'meta_hash': compute_hash(meta_data), 'src_hashes': hashes[group_map[id]], } result.manager.metastore.write(newpath, json.dumps(ir_data)) result.manager.metastore.commit()