Esempio n. 1
0
def write_cache(
    modules: ModuleIRs,
    result: BuildResult,
    group_map: Dict[str, Optional[str]],
    ctext: Dict[Optional[str], List[Tuple[str, str]]],
) -> None:
    """Write out the cache information for modules.

    Each module has the following cache information written (which is
    in addition to the cache information written by mypy itself):
      * A serialized version of its mypyc IR, minus the bodies of
        functions. This allows code that depends on it to use
        these serialized data structures when compiling against it
        instead of needing to recompile it. (Compiling against a
        module requires access to both its mypy and mypyc data
        structures.)
      * The hash of the mypy metadata cache file for the module.
        This is used to ensure that the mypyc cache and the mypy
        cache are in sync and refer to the same version of the code.
        This is particularly important if mypyc crashes/errors/is
        stopped after mypy has written its cache but before mypyc has.
      * The hashes of all of the source file outputs for the group
        the module is in. This is so that the module will be
        recompiled if the source outputs are missing.
     """

    hashes = {}
    for name, files in ctext.items():
        hashes[name] = {file: compute_hash(data) for file, data in files}

    # Write out cache data
    for id, module in modules.items():
        st = result.graph[id]

        meta_path, _, _ = get_cache_names(id, st.xpath, result.manager.options)
        # If the metadata isn't there, skip writing the cache.
        try:
            meta_data = result.manager.metastore.read(meta_path)
        except IOError:
            continue

        newpath = get_state_ir_cache_name(st)
        ir_data = {
            'ir': module.serialize(),
            'meta_hash': compute_hash(meta_data),
            'src_hashes': hashes[group_map[id]],
        }

        result.manager.metastore.write(newpath, json.dumps(ir_data))

    result.manager.metastore.commit()
Esempio n. 2
0
    def report_config_data(
            self, ctx: ReportConfigContext
    ) -> Optional[Tuple[Optional[str], List[str]]]:
        # The config data we report is the group map entry for the module.
        # If the data is being used to check validity, we do additional checks
        # that the IR cache exists and matches the metadata cache and all
        # output source files exist and are up to date.

        id, path, is_check = ctx.id, ctx.path, ctx.is_check

        if id not in self.group_map:
            return None

        # If we aren't doing validity checks, just return the cache data
        if not is_check:
            return self.group_map[id]

        # Load the metadata and IR cache
        meta_path, _, _ = get_cache_names(id, path, self.options)
        ir_path = get_ir_cache_name(id, path, self.options)
        try:
            meta_json = self.metastore.read(meta_path)
            ir_json = self.metastore.read(ir_path)
        except FileNotFoundError:
            # This could happen if mypyc failed after mypy succeeded
            # in the previous run or if some cache files got
            # deleted. No big deal, just fail to load the cache.
            return None

        ir_data = json.loads(ir_json)

        # Check that the IR cache matches the metadata cache
        if compute_hash(meta_json) != ir_data['meta_hash']:
            return None

        # Check that all of the source files are present and as
        # expected. The main situation where this would come up is the
        # user deleting the build directory without deleting
        # .mypy_cache, which we should handle gracefully.
        for path, hash in ir_data['src_hashes'].items():
            try:
                with open(os.path.join(self.compiler_options.target_dir, path),
                          'rb') as f:
                    contents = f.read()
            except FileNotFoundError:
                return None
            real_hash = hash_digest(contents)
            if hash != real_hash:
                return None

        return self.group_map[id]
Esempio n. 3
0
def get_ir_cache_name(id: str, path: str, options: Options) -> str:
    meta_path, _, _ = get_cache_names(id, path, options)
    return meta_path.replace('.meta.json', '.ir.json')