def _setup(cls): """NOT_RPYTHON""" lp = py.path.local import pypy, os p = lp(pypy.__file__).new(basename='_cache').ensure(dir=1) cls.cache_path = p ini = p.join('__init__.py') try: if not ini.check(): raise ImportError # don't import if only a .pyc file left!!! from pypy._cache import known_code, \ GI_VERSION_RENDERED except ImportError: GI_VERSION_RENDERED = 0 from pypy.translator.geninterplevel import GI_VERSION cls.seed = md5(str(GI_VERSION)).digest() if GI_VERSION != GI_VERSION_RENDERED or GI_VERSION is None: for pth in p.listdir(): if pth.check(file=1): try: pth.remove() except: pass f = file(get_tmp_file_name(str(ini)), "w") f.write("""\ # This folder acts as a cache for code snippets which have been # compiled by compile_as_module(). # It will get a new entry for every piece of code that has # not been seen, yet. # # Caution! Only the code snippet is checked. If something # is imported, changes are not detected. Also, changes # to geninterplevel or gateway are also not checked. # Exception: There is a checked version number in geninterplevel.py # # If in doubt, remove this file from time to time. GI_VERSION_RENDERED = %r known_code = {} # self-destruct on double-click: def harakiri(): import pypy._cache as _c import py lp = py.path.local for pth in lp(_c.__file__).dirpath().listdir(): try: pth.remove() except: pass if __name__ == "__main__": harakiri() del harakiri """ % GI_VERSION) f.close() rename_tmp_to_eventual_file_name(str(ini)) import pypy._cache cls.known_code = pypy._cache.known_code cls._setup_done = True
def build_applevelinterp_dict(cls, self, space): "NOT_RPYTHON" # N.B. 'self' is the ApplevelInterp; this is a class method, # just so that we have a convenient place to store the global state. if not cls._setup_done: cls._setup() from pypy.translator.geninterplevel import translate_as_module import marshal scramble = md5(cls.seed) scramble.update(marshal.dumps(self.source)) key = scramble.hexdigest() initfunc = cls.known_code.get(key) if not initfunc: # try to get it from file name = key if self.filename: prename = os.path.splitext(os.path.basename(self.filename))[0] else: prename = 'zznoname' name = "%s_%s" % (prename, name) try: __import__("pypy._cache."+name) except ImportError, x: # print x pass else: initfunc = cls.known_code[key]
def build_applevelinterp_dict(cls, self, space): "NOT_RPYTHON" # N.B. 'self' is the ApplevelInterp; this is a class method, # just so that we have a convenient place to store the global state. if not cls._setup_done: cls._setup() from pypy.translator.geninterplevel import translate_as_module import marshal scramble = md5(cls.seed) scramble.update(marshal.dumps(self.code)) key = scramble.hexdigest() initfunc = cls.known_code.get(key) if not initfunc: # try to get it from file name = key if self.filename: prename = os.path.splitext(os.path.basename(self.filename))[0] else: prename = 'zznoname' name = "%s_%s" % (prename, name) try: __import__("pypy._cache." + name) except ImportError, x: # print x pass else: initfunc = cls.known_code[key]
def cache_file_path(c_files, eci, cachename): "Builds a filename to cache compilation data" # Import 'platform' every time, the compiler may have been changed from pypy.translator.platform import platform cache_dir = cache_dir_root.join(cachename).ensure(dir=1) filecontents = [c_file.read() for c_file in c_files] key = repr((filecontents, eci, platform.key())) hash = md5(key).hexdigest() return cache_dir.join(hash)
def md5digest(translator): from pypy.tool.compat import md5 graph2digest = {} for graph in translator.graphs: m = md5() for op in graph_operations(graph): m.update(op.opname + str(op.result)) for a in op.args: m.update(str(a)) graph2digest[graph.name] = m.digest() return graph2digest
def get_statistics(graph, translator, save_per_graph_details=None, ignore_stack_checks=False): seen_graphs = {} stack = [graph] num_graphs = 0 num_blocks = 0 num_ops = 0 num_mallocs = 0 per_graph = {} while stack: graph = stack.pop() if graph in seen_graphs: continue seen_graphs[graph] = True num_graphs += 1 old_num_blocks = num_blocks old_num_ops = num_ops old_num_mallocs = num_mallocs for block in graph.iterblocks(): num_blocks += 1 for op in block.operations: if op.opname == "direct_call": called_graph = get_graph(op.args[0], translator) if called_graph is not None and ignore_stack_checks: if called_graph.name.startswith('ll_stack_check'): continue if called_graph is not None: stack.append(called_graph) elif op.opname == "indirect_call": called_graphs = op.args[-1].value if called_graphs is not None: stack.extend(called_graphs) elif op.opname.startswith("malloc"): num_mallocs += 1 num_ops += 1 per_graph[graph] = (num_blocks-old_num_blocks, num_ops-old_num_ops, num_mallocs-old_num_mallocs) if save_per_graph_details: details = [] for graph, (nblocks, nops, nmallocs) in per_graph.iteritems(): try: code = graph.func.func_code.co_code except AttributeError: code = "None" hash = md5(code).hexdigest() details.append((hash, graph.name, nblocks, nops, nmallocs)) details.sort() f = open(save_per_graph_details, "w") try: for hash, name, nblocks, nops, nmallocs in details: print >>f, hash, name, nblocks, nops, nmallocs finally: f.close() return num_graphs, num_blocks, num_ops, num_mallocs
def cache_file_path(c_files, eci, cachename): cache_dir = cache_dir_root.join(cachename).ensure(dir=1) filecontents = [c_file.read() for c_file in c_files] key = repr((filecontents, eci, platform.key())) hash = md5(key).hexdigest() return cache_dir.join(hash)