def test_func_simple(): # -------------------- flowgraph building -------------------- # def f(x): # return x+1 x = Variable("x") x.concretetype = Signed result = Variable("result") result.concretetype = Signed one = Constant(1) one.concretetype = Signed op = SpaceOperation("int_add", [x, one], result) block = Block([x]) graph = FunctionGraph("f", block) block.operations.append(op) block.closeblock(Link([result], graph.returnblock)) graph.getreturnvar().concretetype = Signed # -------------------- end -------------------- F = FuncType([Signed], Signed) f = functionptr(F, "f", graph=graph) db = LowLevelDatabase() db.get(f) db.complete() dump_on_stdout(db) S = GcStruct('testing', ('fptr', Ptr(F))) s = malloc(S) s.fptr = f db = LowLevelDatabase() db.get(s) db.complete() dump_on_stdout(db)
def inputconst(reqtype, value): """Return a Constant with the given value, of the requested type, which can be a Repr instance or a low-level type. """ if isinstance(reqtype, Repr): value = reqtype.convert_const(value) lltype = reqtype.lowleveltype elif isinstance(reqtype, LowLevelType): lltype = reqtype else: raise TypeError(repr(reqtype)) # Void Constants can hold any value; # non-Void Constants must hold a correctly ll-typed value if lltype is not Void: try: realtype = typeOf(value) except (AssertionError, AttributeError): realtype = '???' if not isCompatibleType(realtype, lltype): raise TyperError("inputconst(reqtype = %s, value = %s):\n" "expected a %r,\n" " got a %r" % (reqtype, value, lltype, realtype)) c = Constant(value) c.concretetype = lltype return c
def flatten(self, S): start = 0 if S._names and self.equivalent_substruct(S, S._names[0]): SUBTYPE = S._flds[S._names[0]] if isinstance(SUBTYPE, lltype.Struct): self.flatten(SUBTYPE) start = 1 else: ARRAY = lltype.FixedSizeArray(SUBTYPE, 1) self.direct_fieldptr_key[ARRAY, 'item0'] = S, S._names[0] for name in S._names[start:]: key = S, name FIELDTYPE = S._flds[name] if key in self.accessed_substructs: self.needsubmallocs.append(key) self.flatnames.append(key) self.newvarstype[key] = lltype.Ptr(lltype.GcStruct('wrapper', ('data', FIELDTYPE))) elif not isinstance(FIELDTYPE, lltype.ContainerType): example = FIELDTYPE._defl() constant = Constant(example) constant.concretetype = FIELDTYPE self.flatconstants[key] = constant self.flatnames.append(key) self.newvarstype[key] = FIELDTYPE
def make_const_rt_result(self, v_result, value): newrtnode = RuntimeSpecNode(v_result, v_result.concretetype) self.setnode(v_result, newrtnode) if v_result.concretetype is not lltype.Void: assert v_result.concretetype == lltype.typeOf(value) c_value = Constant(value) c_value.concretetype = v_result.concretetype self.renamings[newrtnode] = c_value
def flatten(self, TYPE): for name, (FIELDTYPE, default) in self._get_fields(TYPE).iteritems(): key = self.key_for_field_access(TYPE, name) constant = Constant(default) constant.concretetype = FIELDTYPE self.flatconstants[key] = constant self.flatnames.append(key) self.newvarstype[key] = FIELDTYPE
def inputdesc(reqtype, desc): """Return a Constant for the given desc, of the requested type, which can only be a Repr. """ assert isinstance(reqtype, Repr) value = reqtype.convert_desc(desc) lltype = reqtype.lowleveltype c = Constant(value) c.concretetype = lltype return c
def handle_op_malloc(self, op): if op.result is self.v_expand_malloc: MALLOCTYPE = op.result.concretetype.TO typedesc = self.graphbuilder.mallocv.getmalloctypedesc(MALLOCTYPE) virtualnode = VirtualSpecNode(typedesc, []) self.setnode(op.result, virtualnode) for name, FIELDTYPE in typedesc.names_and_types: fieldnode = RuntimeSpecNode(name, FIELDTYPE) virtualnode.fields.append(fieldnode) c = Constant(FIELDTYPE._defl()) c.concretetype = FIELDTYPE self.renamings[fieldnode] = c self.v_expand_malloc = None # done return [] else: return self.handle_default(op)
def get_exc_reconstruction_block(self, typedesc): exceptblock = self.graph.exceptblock self.mallocv.fixup_except_block(exceptblock) TEXC = exceptblock.inputargs[0].concretetype TVAL = exceptblock.inputargs[1].concretetype # v_ignored_type = varoftype(TEXC) v_incoming_value = varoftype(TVAL) block = Block([v_ignored_type, v_incoming_value]) # c_EXCTYPE = Constant(typedesc.MALLOCTYPE, lltype.Void) v = varoftype(lltype.Ptr(typedesc.MALLOCTYPE)) c_flavor = Constant({'flavor': 'gc'}, lltype.Void) op = SpaceOperation('malloc', [c_EXCTYPE, c_flavor], v) block.operations.append(op) # for name, FIELDTYPE in typedesc.names_and_types: EXACTPTR = lltype.Ptr(typedesc.name2subtype[name]) c_name = Constant(name) c_name.concretetype = lltype.Void # v_in = varoftype(EXACTPTR) op = SpaceOperation('cast_pointer', [v_incoming_value], v_in) block.operations.append(op) # v_field = varoftype(FIELDTYPE) op = SpaceOperation('getfield', [v_in, c_name], v_field) block.operations.append(op) # v_out = varoftype(EXACTPTR) op = SpaceOperation('cast_pointer', [v], v_out) block.operations.append(op) # v0 = varoftype(lltype.Void) op = SpaceOperation('setfield', [v_out, c_name, v_field], v0) block.operations.append(op) # v_exc_value = varoftype(TVAL) op = SpaceOperation('cast_pointer', [v], v_exc_value) block.operations.append(op) # exc_type = self.mallocv.EXCTYPE_to_vtable[typedesc.MALLOCTYPE] c_exc_type = Constant(exc_type, TEXC) block.closeblock(Link([c_exc_type, v_exc_value], exceptblock)) return block
def generic_exception_matching(self, afterblock, copiedexceptblock): #XXXXX don't look: insert blocks that do exception matching #for the cases where direct matching did not work exc_match = Constant( self.translator.rtyper.getexceptiondata().fn_exception_match) exc_match.concretetype = typeOf(exc_match.value) blocks = [] for i, link in enumerate(afterblock.exits[1:]): etype = copyvar(None, copiedexceptblock.inputargs[0]) evalue = copyvar(None, copiedexceptblock.inputargs[1]) passon_vars = self.passon_vars(i) block = Block([etype, evalue] + passon_vars) res = Variable() res.concretetype = Bool cexitcase = Constant(link.llexitcase) cexitcase.concretetype = typeOf(cexitcase.value) args = [exc_match, etype, cexitcase] block.operations.append(SpaceOperation("direct_call", args, res)) block.exitswitch = res linkargs = self.find_args_in_exceptional_case(link, link.target, etype, evalue, afterblock, passon_vars) l = Link(linkargs, link.target) l.prevblock = block l.exitcase = True l.llexitcase = True block.closeblock(l) if i > 0: l = Link(blocks[-1].inputargs, block) l.exitcase = False l.llexitcase = False blocks[-1].recloseblock(l, *blocks[-1].exits) blocks.append(block) blocks[-1].recloseblock(*blocks[-1].exits[:1]) blocks[-1].operations = [] blocks[-1].exitswitch = None blocks[-1].exits[0].exitcase = None del blocks[-1].exits[0].llexitcase linkargs = copiedexceptblock.inputargs copiedexceptblock.recloseblock(Link(linkargs, blocks[0])) copiedexceptblock.operations += self.generate_keepalive(linkargs)
def adjust_shape(hop2, s_shape): new_shape = (s_shape.const[0] + 1, ) + s_shape.const[1:] c_shape = Constant(new_shape) s_shape = hop2.rtyper.annotator.bookkeeper.immutablevalue(new_shape) hop2.v_s_insertfirstarg(c_shape, s_shape) # reinsert adjusted shape
def render(self, generator, op): generator.load(Constant(self.value, ootype.typeOf(self.value)))
def normalize_calltable_row_signature(annotator, shape, row): graphs = row.values() assert graphs, "no graph??" sig0 = graphs[0].signature defaults0 = graphs[0].defaults for graph in graphs[1:]: if graph.signature != sig0: break if graph.defaults != defaults0: break else: return False # nothing to do, all signatures already match shape_cnt, shape_keys, shape_star, shape_stst = shape assert not shape_star, "XXX not implemented" assert not shape_stst, "XXX not implemented" # for the first 'shape_cnt' arguments we need to generalize to # a common type call_nbargs = shape_cnt + len(shape_keys) did_something = False NODEFAULT = object() for graph in graphs: argnames, varargname, kwargname = graph.signature assert not varargname, "XXX not implemented" assert not kwargname, "XXX not implemented" # ? inputargs_s = [annotator.binding(v) for v in graph.getargs()] argorder = range(shape_cnt) for key in shape_keys: i = list(argnames).index(key) assert i not in argorder argorder.append(i) need_reordering = (argorder != range(call_nbargs)) if need_reordering or len(graph.getargs()) != call_nbargs: oldblock = graph.startblock inlist = [] defaults = graph.defaults or () num_nondefaults = len(inputargs_s) - len(defaults) defaults = [NODEFAULT] * num_nondefaults + list(defaults) newdefaults = [] for j in argorder: v = Variable(graph.getargs()[j]) annotator.setbinding(v, inputargs_s[j]) inlist.append(v) newdefaults.append(defaults[j]) newblock = Block(inlist) # prepare the output args of newblock: # 1. collect the positional arguments outlist = inlist[:shape_cnt] # 2. add defaults and keywords for j in range(shape_cnt, len(inputargs_s)): try: i = argorder.index(j) v = inlist[i] except ValueError: default = defaults[j] if default is NODEFAULT: raise TyperError( "call pattern has %d positional arguments, " "but %r takes at least %d arguments" % ( shape_cnt, graph.name, num_nondefaults)) v = Constant(default) outlist.append(v) newblock.closeblock(Link(outlist, oldblock)) graph.startblock = newblock for i in range(len(newdefaults)-1,-1,-1): if newdefaults[i] is NODEFAULT: newdefaults = newdefaults[i:] break graph.defaults = tuple(newdefaults) graph.signature = Signature([argnames[j] for j in argorder], None, None) # finished checkgraph(graph) annotator.annotated[newblock] = annotator.annotated[oldblock] did_something = True return did_something
def inittime_helper(self, ll_helper, ll_args, ll_result, inline=True): ptr = self.annotate_helper(ll_helper, ll_args, ll_result, inline=inline) return Constant(ptr, lltype.typeOf(ptr))
def immutablevalue(self, x, need_const=True): """The most precise SomeValue instance that contains the immutable value x.""" # convert unbound methods to the underlying function if hasattr(x, 'im_self') and x.im_self is None: x = x.im_func assert not hasattr(x, 'im_self') if x is sys: # special case constant sys to someobject return SomeObject() tp = type(x) if issubclass(tp, Symbolic): # symbolic constants support result = x.annotation() result.const_box = Constant(x) return result if tp is bool: result = SomeBool() elif tp is int: result = SomeInteger(nonneg=x >= 0) elif tp is long: if -sys.maxint - 1 <= x <= sys.maxint: x = int(x) result = SomeInteger(nonneg=x >= 0) else: raise Exception("seeing a prebuilt long (value %s)" % hex(x)) elif issubclass(tp, str): # py.lib uses annotated str subclasses if len(x) == 1: result = SomeChar() else: result = SomeString() elif tp is unicode: if len(x) == 1: result = SomeUnicodeCodePoint() else: result = SomeUnicodeString() elif tp is tuple: result = SomeTuple( items=[self.immutablevalue(e, need_const) for e in x]) elif tp is float: result = SomeFloat() elif tp is list: if need_const: key = Constant(x) try: return self.immutable_cache[key] except KeyError: result = SomeList(ListDef(self, s_ImpossibleValue)) self.immutable_cache[key] = result for e in x: result.listdef.generalize(self.immutablevalue(e)) result.const_box = key return result else: listdef = ListDef(self, s_ImpossibleValue) for e in x: listdef.generalize(self.immutablevalue(e, False)) result = SomeList(listdef) elif tp is dict or tp is r_dict: if need_const: key = Constant(x) try: return self.immutable_cache[key] except KeyError: result = SomeDict( DictDef(self, s_ImpossibleValue, s_ImpossibleValue, is_r_dict=tp is r_dict)) self.immutable_cache[key] = result if tp is r_dict: s_eqfn = self.immutablevalue(x.key_eq) s_hashfn = self.immutablevalue(x.key_hash) result.dictdef.dictkey.update_rdict_annotations( s_eqfn, s_hashfn) seen_elements = 0 while seen_elements != len(x): items = x.items() for ek, ev in items: result.dictdef.generalize_key( self.immutablevalue(ek)) result.dictdef.generalize_value( self.immutablevalue(ev)) result.dictdef.seen_prebuilt_key(ek) seen_elements = len(items) # if the dictionary grew during the iteration, # start over again result.const_box = key return result else: dictdef = DictDef(self, s_ImpossibleValue, s_ImpossibleValue, is_r_dict=tp is r_dict) if tp is r_dict: s_eqfn = self.immutablevalue(x.key_eq) s_hashfn = self.immutablevalue(x.key_hash) dictdef.dictkey.update_rdict_annotations(s_eqfn, s_hashfn) for ek, ev in x.iteritems(): dictdef.generalize_key(self.immutablevalue(ek, False)) dictdef.generalize_value(self.immutablevalue(ev, False)) dictdef.seen_prebuilt_key(ek) result = SomeDict(dictdef) elif tp is weakref.ReferenceType: x1 = x() if x1 is None: result = SomeWeakRef(None) # dead weakref else: s1 = self.immutablevalue(x1) assert isinstance(s1, SomeInstance) result = SomeWeakRef(s1.classdef) elif ishashable(x) and x in BUILTIN_ANALYZERS: _module = getattr(x, "__module__", "unknown") result = SomeBuiltin(BUILTIN_ANALYZERS[x], methodname="%s.%s" % (_module, x.__name__)) elif extregistry.is_registered(x, self.policy): entry = extregistry.lookup(x, self.policy) result = entry.compute_annotation_bk(self) elif isinstance(x, lltype._ptr): result = SomePtr(lltype.typeOf(x)) elif isinstance(x, llmemory.fakeaddress): result = SomeAddress() elif isinstance(x, ootype._static_meth): result = SomeOOStaticMeth(ootype.typeOf(x)) elif isinstance(x, ootype._class): result = SomeOOClass(x._INSTANCE) # NB. can be None elif isinstance(x, ootype.instance_impl): # XXX result = SomeOOInstance(ootype.typeOf(x)) elif isinstance(x, (ootype._record, ootype._string)): result = SomeOOInstance(ootype.typeOf(x)) elif isinstance(x, (ootype._object)): result = SomeOOObject() elif callable(x): if hasattr(x, 'im_self') and hasattr(x, 'im_func'): # on top of PyPy, for cases like 'l.append' where 'l' is a # global constant list, the find_method() returns non-None s_self = self.immutablevalue(x.im_self, need_const) result = s_self.find_method(x.im_func.__name__) elif hasattr(x, '__self__') and x.__self__ is not None: # for cases like 'l.append' where 'l' is a global constant list s_self = self.immutablevalue(x.__self__, need_const) result = s_self.find_method(x.__name__) if result is None: result = SomeObject() else: result = None if result is None: if (self.annotator.policy.allow_someobjects and getattr(x, '__module__', None) == '__builtin__' # XXX note that the print support functions are __builtin__ and tp not in (types.FunctionType, types.MethodType)): result = SomeObject() result.knowntype = tp # at least for types this needs to be correct else: result = SomePBC([self.getdesc(x)]) elif hasattr(x, '_freeze_') and x._freeze_(): # user-defined classes can define a method _freeze_(), which # is called when a prebuilt instance is found. If the method # returns True, the instance is considered immutable and becomes # a SomePBC(). Otherwise it's just SomeInstance(). result = SomePBC([self.getdesc(x)]) elif hasattr(x, '__class__') \ and x.__class__.__module__ != '__builtin__': self.see_mutable(x) result = SomeInstance(self.getuniqueclassdef(x.__class__)) elif x is None: return s_None else: result = SomeObject() if need_const: result.const = x return result
def detect_list_comprehension(graph): """Look for the pattern: Replace it with marker operations: v0 = newlist() v2 = newlist() v1 = hint(v0, iterable, {'maxlength'}) loop start loop start ... ... exactly one append per loop v1.append(..) and nothing else done with v2 ... ... loop end v2 = hint(v1, {'fence'}) """ # NB. this assumes RPythonicity: we can only iterate over something # that has a len(), and this len() cannot change as long as we are # using the iterator. if simplify_disabled(graph): return from pypy.translator.backendopt.ssa import DataFlowFamilyBuilder builder = DataFlowFamilyBuilder(graph) variable_families = builder.get_variable_families() c_append = Constant('append') newlist_v = {} iter_v = {} append_v = [] loopnextblocks = [] # collect relevant operations based on the family of their result for block in graph.iterblocks(): if (len(block.operations) == 1 and block.operations[0].opname == 'next' and block.exitswitch == c_last_exception and len(block.exits) >= 2): cases = [link.exitcase for link in block.exits] if None in cases and StopIteration in cases: # it's a straightforward loop start block loopnextblocks.append((block, block.operations[0].args[0])) continue for op in block.operations: if op.opname == 'newlist' and not op.args: vlist = variable_families.find_rep(op.result) newlist_v[vlist] = block if op.opname == 'iter': viter = variable_families.find_rep(op.result) iter_v[viter] = block loops = [] for block, viter in loopnextblocks: viterfamily = variable_families.find_rep(viter) if viterfamily in iter_v: # we have a next(viter) operation where viter comes from a # single known iter() operation. Check that the iter() # operation is in the block just before. iterblock = iter_v[viterfamily] if (len(iterblock.exits) == 1 and iterblock.exitswitch is None and iterblock.exits[0].target is block): # yes - simple case. loops.append((block, iterblock, viterfamily)) if not newlist_v or not loops: return # XXX works with Python >= 2.4 only: find calls to append encoded as # getattr/simple_call pairs, as produced by the LIST_APPEND bytecode. for block in graph.iterblocks(): for i in range(len(block.operations) - 1): op = block.operations[i] if op.opname == 'getattr' and op.args[1] == c_append: vlist = variable_families.find_rep(op.args[0]) if vlist in newlist_v: op2 = block.operations[i + 1] if (op2.opname == 'simple_call' and len(op2.args) == 2 and op2.args[0] is op.result): append_v.append((op.args[0], op.result, block)) if not append_v: return detector = ListComprehensionDetector(graph, loops, newlist_v, variable_families) graphmutated = False for location in append_v: if graphmutated: # new variables introduced, must restart the whole process return detect_list_comprehension(graph) try: detector.run(*location) except DetectorFailed: pass else: graphmutated = True
def flowin_op(self, op, vars, newvarsmap): if op.opname in ("getfield", "getarrayitem"): S = op.args[0].concretetype.TO fldname = op.args[1].value key = self.key_for_field_access(S, fldname) if key in self.accessed_substructs: c_name = Constant('data', lltype.Void) newop = SpaceOperation("getfield", [newvarsmap[key], c_name], op.result) else: newop = SpaceOperation("same_as", [newvarsmap[key]], op.result) self.newops.append(newop) self.last_removed_access = len(self.newops) elif op.opname in ("setfield", "setarrayitem"): S = op.args[0].concretetype.TO fldname = op.args[1].value key = self.key_for_field_access(S, fldname) assert key in newvarsmap if key in self.accessed_substructs: c_name = Constant('data', lltype.Void) newop = SpaceOperation("setfield", [newvarsmap[key], c_name, op.args[2]], op.result) self.newops.append(newop) else: newvarsmap[key] = op.args[2] self.last_removed_access = len(self.newops) elif op.opname in ("same_as", "cast_pointer"): assert op.result not in vars vars[op.result] = True # Consider the two pointers (input and result) as # equivalent. We can, and indeed must, use the same # flattened list of variables for both, as a "setfield" # via one pointer must be reflected in the other. elif op.opname == 'keepalive': self.last_removed_access = len(self.newops) elif op.opname in ("getsubstruct", "getarraysubstruct", "direct_fieldptr"): S = op.args[0].concretetype.TO fldname = op.args[1].value if op.opname == "getarraysubstruct": fldname = 'item%d' % fldname equiv = self.equivalent_substruct(S, fldname) if equiv: # exactly like a cast_pointer assert op.result not in vars vars[op.result] = True else: # do it with a getsubstruct on the independently # malloc'ed GcStruct if op.opname == "direct_fieldptr": opname = "direct_fieldptr" else: opname = "getsubstruct" v = newvarsmap[S, fldname] cname = Constant('data', lltype.Void) newop = SpaceOperation(opname, [v, cname], op.result) self.newops.append(newop) elif op.opname in ("ptr_iszero", "ptr_nonzero"): # we know the pointer is not NULL if it comes from # a successful malloc c = Constant(op.opname == "ptr_nonzero", lltype.Bool) newop = SpaceOperation('same_as', [c], op.result) self.newops.append(newop) else: raise AssertionError, op.opname
def make_closure(fullfuncname): state = self.state def closure(i): getattr(state, fullfuncname)(i) funcptr = self.helper_func(PTR_SET_PARAM_FUNCTYPE, closure) return Constant(funcptr, PTR_SET_PARAM_FUNCTYPE)
def insert_exits(self, block): if len(block.exits) == 1: # A single link, fall-through link = block.exits[0] assert link.exitcase in (None, False, True) # the cases False or True should not really occur, but can show # up in the manually hacked graphs for generators... self.make_link(link) # elif block.exitswitch is c_last_exception: # An exception block. See test_exc_exitswitch in test_flatten.py # for an example of what kind of code this makes. index = -1 while True: lastopname = block.operations[index].opname if lastopname != '-live-': break index -= 1 assert block.exits[0].exitcase is None # is this always True? # if not self._include_all_exc_links: if index == -1: # cannot raise: the last instruction is not # actually a '-live-' self.make_link(block.exits[0]) return # self.emitline('catch_exception', TLabel(block.exits[0])) self.make_link(block.exits[0]) self.emitline(Label(block.exits[0])) for link in block.exits[1:]: if (link.exitcase is Exception or (link.exitcase is OverflowError and lastopname.startswith('int_') and lastopname.endswith('_ovf'))): # this link captures all exceptions self.make_exception_link(link) break self.emitline( 'goto_if_exception_mismatch', Constant(link.llexitcase, lltype.typeOf(link.llexitcase)), TLabel(link)) self.make_exception_link(link) self.emitline(Label(link)) else: # no link captures all exceptions, so we have to put a reraise # for the other exceptions self.emitline("reraise") self.emitline("---") # elif len(block.exits) == 2 and (isinstance(block.exitswitch, tuple) or block.exitswitch.concretetype == lltype.Bool): # Two exit links with a boolean condition linkfalse, linktrue = block.exits if linkfalse.llexitcase == True: linkfalse, linktrue = linktrue, linkfalse opname = 'goto_if_not' livebefore = False if isinstance(block.exitswitch, tuple): # special case produced by jtransform.optimize_goto_if_not() opname = 'goto_if_not_' + block.exitswitch[0] opargs = block.exitswitch[1:] if opargs[-1] == '-live-before': livebefore = True opargs = opargs[:-1] else: assert block.exitswitch.concretetype == lltype.Bool opargs = [block.exitswitch] # lst = self.flatten_list(opargs) + [TLabel(linkfalse)] if livebefore: self.emitline('-live-') self.emitline(opname, *lst) if not livebefore: self.emitline('-live-', TLabel(linkfalse)) # true path: self.make_link(linktrue) # false path: self.emitline(Label(linkfalse)) self.make_link(linkfalse) # else: # A switch. # def emitdefaultpath(): if block.exits[-1].exitcase == 'default': self.make_link(block.exits[-1]) else: self.emitline("unreachable") self.emitline("---") # self.emitline('-live-') switches = [ link for link in block.exits if link.exitcase != 'default' ] switches.sort(key=lambda link: link.llexitcase) kind = getkind(block.exitswitch.concretetype) if len(switches) >= 5 and kind == 'int': # A large switch on an integer, implementable efficiently # with the help of a SwitchDictDescr from pypy.jit.codewriter.jitcode import SwitchDictDescr switchdict = SwitchDictDescr() switchdict._labels = [] self.emitline('switch', self.getcolor(block.exitswitch), switchdict) emitdefaultpath() # for switch in switches: key = lltype.cast_primitive(lltype.Signed, switch.llexitcase) switchdict._labels.append((key, TLabel(switch))) # emit code for that path self.emitline(Label(switch)) self.make_link(switch) # else: # A switch with several possible answers, though not too # many of them -- a chain of int_eq comparisons is fine assert kind == 'int' # XXX color = self.getcolor(block.exitswitch) self.emitline('int_guard_value', color) for switch in switches: # make the case described by 'switch' self.emitline( 'goto_if_not_int_eq', color, Constant(switch.llexitcase, block.exitswitch.concretetype), TLabel(switch)) # emit code for the "taken" path self.make_link(switch) # finally, emit the label for the "non-taken" path self.emitline(Label(switch)) # emitdefaultpath()
def constant_func(self, name, inputtypes, rettype, graph, **kwds): FUNC_TYPE = ootype.StaticMethod(inputtypes, rettype) fn_ptr = ootype.static_meth(FUNC_TYPE, name, graph=graph, **kwds) return Constant(fn_ptr, FUNC_TYPE)
# XXX a bit ugly sticking if vinfo is not None: self.cpu.index_of_virtualizable = (vinfo.index_of_virtualizable - self.num_green_args) else: self.cpu.index_of_virtualizable = -1 # ____________________________________________________________ # Now mutate origportalgraph to end with a call to portal_runner_ptr # _, origblock, origindex = self.jit_merge_point_pos op = origblock.operations[origindex] assert op.opname == 'jit_marker' assert op.args[0].value == 'jit_merge_point' greens_v, reds_v = decode_hp_hint_args(op) vlist = [Constant(self.portal_runner_ptr, self.PTR_PORTAL_FUNCTYPE)] vlist += greens_v vlist += reds_v v_result = Variable() v_result.concretetype = PORTALFUNC.RESULT newop = SpaceOperation('direct_call', vlist, v_result) del origblock.operations[origindex:] origblock.operations.append(newop) origblock.exitswitch = None origblock.recloseblock(Link([v_result], origportalgraph.returnblock)) checkgraph(origportalgraph) def add_finish(self): def finish(): if self.metainterp_sd.profiler.initialized: self.metainterp_sd.profiler.finish()
def constant_value(llvalue): return Constant(llvalue, lltype.typeOf(llvalue))
def constant_func(self, name, inputtypes, rettype, graph, **kwds): FUNC_TYPE = lltype.FuncType(inputtypes, rettype) fn_ptr = lltype.functionptr(FUNC_TYPE, name, graph=graph, **kwds) return Constant(fn_ptr, lltype.Ptr(FUNC_TYPE))
def error_constant(T): return Constant(error_value(T), T)
def rtype_is_true(self, hop): return Constant(False, Bool)
def handle_call_with_close_stack(self, hop): fnptr = hop.spaceop.args[0].value # We cannot easily pass variable amount of arguments of the call # across the call to the pypy_asm_stackwalk helper. So we store # them away and restore them. We need to make a new graph # that starts with restoring the arguments. if self._asmgcc_save_restore_arguments is None: self._asmgcc_save_restore_arguments = {} sradict = self._asmgcc_save_restore_arguments sra = [] # list of pointers to raw-malloced containers for args seen = {} FUNC1 = lltype.typeOf(fnptr).TO for TYPE in FUNC1.ARGS: if isinstance(TYPE, lltype.Ptr): TYPE = llmemory.Address num = seen.get(TYPE, 0) seen[TYPE] = num + 1 key = (TYPE, num) if key not in sradict: CONTAINER = lltype.FixedSizeArray(TYPE, 1) p = lltype.malloc(CONTAINER, flavor='raw', zero=True) sradict[key] = Constant(p, lltype.Ptr(CONTAINER)) sra.append(sradict[key]) # # store the value of the arguments livevars = self.push_roots(hop) c_item0 = Constant('item0', lltype.Void) for v_arg, c_p in zip(hop.spaceop.args[1:], sra): if isinstance(v_arg.concretetype, lltype.Ptr): v_arg = hop.genop("cast_ptr_to_adr", [v_arg], resulttype=llmemory.Address) hop.genop("bare_setfield", [c_p, c_item0, v_arg]) # # make a copy of the graph that will reload the values graph2 = copygraph(fnptr._obj.graph) block2 = graph2.startblock block2.isstartblock = False block1 = Block([]) reloadedvars = [] for v, c_p in zip(block2.inputargs, sra): v = copyvar(None, v) if isinstance(v.concretetype, lltype.Ptr): w = Variable('tmp') w.concretetype = llmemory.Address else: w = v block1.operations.append(SpaceOperation('getfield', [c_p, c_item0], w)) if w is not v: block1.operations.append(SpaceOperation('cast_adr_to_ptr', [w], v)) reloadedvars.append(v) block1.closeblock(Link(reloadedvars, block2)) block1.isstartblock = True graph2.startblock = block1 FUNC2 = lltype.FuncType([], FUNC1.RESULT) fnptr2 = lltype.functionptr(FUNC2, fnptr._obj._name + '_reload', graph=graph2) c_fnptr2 = Constant(fnptr2, lltype.Ptr(FUNC2)) HELPERFUNC = lltype.FuncType([lltype.Ptr(FUNC2)], FUNC1.RESULT) # v_asm_stackwalk = hop.genop("cast_pointer", [c_asm_stackwalk], resulttype=lltype.Ptr(HELPERFUNC)) hop.genop("indirect_call", [v_asm_stackwalk, c_fnptr2, Constant(None, lltype.Void)], resultvar=hop.spaceop.result) self.pop_roots(hop, livevars)
def run(self, vlist, vmeth, appendblock): # first check that the 'append' method object doesn't escape for op in appendblock.operations: if op.opname == 'simple_call' and op.args[0] is vmeth: pass elif vmeth in op.args: raise DetectorFailed # used in another operation for link in appendblock.exits: if vmeth in link.args: raise DetectorFailed # escapes to a next block self.vmeth = vmeth self.vlistfamily = self.variable_families.find_rep(vlist) newlistblock = self.newlist_v[self.vlistfamily] self.vlistcone = {newlistblock: True} self.escapes = { self.graph.returnblock: True, self.graph.exceptblock: True } # in which loop are we? for loopnextblock, iterblock, viterfamily in self.loops: # check that the vlist is alive across the loop head block, # which ensures that we have a whole loop where the vlist # doesn't change if not self.vlist_alive(loopnextblock): continue # no - unrelated loop # check that we cannot go from 'newlist' to 'append' without # going through the 'iter' of our loop (and the following 'next'). # This ensures that the lifetime of vlist is cleanly divided in # "before" and "after" the loop... if self.reachable(newlistblock, appendblock, avoid=iterblock): continue # ... with the possible exception of links from the loop # body jumping back to the loop prologue, between 'newlist' and # 'iter', which we must forbid too: if self.reachable(loopnextblock, iterblock, avoid=newlistblock): continue # there must not be a larger number of calls to 'append' than # the number of elements that 'next' returns, so we must ensure # that we cannot go from 'append' to 'append' again without # passing 'next'... if self.reachable(appendblock, appendblock, avoid=loopnextblock): continue # ... and when the iterator is exhausted, we should no longer # reach 'append' at all. stopblocks = [ link.target for link in loopnextblock.exits if link.exitcase is not None ] accepted = True for stopblock1 in stopblocks: if self.reachable(stopblock1, appendblock, avoid=newlistblock): accepted = False if not accepted: continue # now explicitly find the "loop body" blocks: they are the ones # from which we can reach 'append' without going through 'iter'. # (XXX inefficient) loopbody = {} for block in self.graph.iterblocks(): if (self.vlist_alive(block) and self.reachable(block, appendblock, iterblock)): loopbody[block] = True # if the 'append' is actually after a 'break' or on a path that # can only end up in a 'break', then it won't be recorded as part # of the loop body at all. This is a strange case where we have # basically proved that the list will be of length 1... too # uncommon to worry about, I suspect if appendblock not in loopbody: continue # This candidate loop is acceptable if the list is not escaping # too early, i.e. in the loop header or in the loop body. loopheader = list( self.enum_blocks_with_vlist_from(newlistblock, avoid=loopnextblock)) assert loopheader[0] is newlistblock escapes = False for block in loopheader + loopbody.keys(): assert self.vlist_alive(block) if self.vlist_escapes(block): escapes = True break if not escapes: break # accept this loop! else: raise DetectorFailed # no suitable loop # Found a suitable loop, let's patch the graph: assert iterblock not in loopbody assert loopnextblock in loopbody for stopblock1 in stopblocks: assert stopblock1 not in loopbody # at StopIteration, the new list is exactly of the same length as # the one we iterate over if it's not possible to skip the appendblock # in the body: exactlength = not self.reachable_within(loopnextblock, loopnextblock, avoid=appendblock, stay_within=loopbody) # - add a hint(vlist, iterable, {'maxlength'}) in the iterblock, # where we can compute the known maximum length link = iterblock.exits[0] vlist = self.contains_vlist(link.args) assert vlist for op in iterblock.operations: res = self.variable_families.find_rep(op.result) if res is viterfamily: break else: raise AssertionError("lost 'iter' operation") vlength = Variable('maxlength') vlist2 = Variable(vlist) chint = Constant({'maxlength': True}) iterblock.operations += [ SpaceOperation('hint', [vlist, op.args[0], chint], vlist2) ] link.args = list(link.args) for i in range(len(link.args)): if link.args[i] is vlist: link.args[i] = vlist2 # - wherever the list exits the loop body, add a 'hint({fence})' from pypy.translator.unsimplify import insert_empty_block for block in loopbody: for link in block.exits: if link.target not in loopbody: vlist = self.contains_vlist(link.args) if vlist is None: continue # list not passed along this link anyway hints = {'fence': True} if (exactlength and block is loopnextblock and link.target in stopblocks): hints['exactlength'] = True chints = Constant(hints) newblock = insert_empty_block(None, link) index = link.args.index(vlist) vlist2 = newblock.inputargs[index] vlist3 = Variable(vlist2) newblock.inputargs[index] = vlist3 newblock.operations.append( SpaceOperation('hint', [vlist3, chints], vlist2))
def specialize_call(self, hop): hop.exception_cannot_occur() retval = Constant(hop.r_result.convert_const(hop.args_v[0].value)) retval.concretetype = hop.r_result.lowleveltype return retval
def transform_ovfcheck(graph): """The special function calls ovfcheck and ovfcheck_lshift need to be translated into primitive operations. ovfcheck is called directly after an operation that should be turned into an overflow-checked version. It is considered a syntax error if the resulting <op>-ovf is not defined in baseobjspace.py . ovfcheck_lshift is special because there is no preceding operation. Instead, it will be replaced by an OP_LSHIFT_OVF operation. The exception handling of the original operation is completely ignored. Only exception handlers for the ovfcheck function call are taken into account. This gives us the best possible control over situations where we want exact contol over certain operations. Example: try: array1[idx-1] = ovfcheck(array1[idx-1] + array2[idx+1]) except OverflowError: ... assuming two integer arrays, we are only checking the element addition for overflows, but the indexing is not checked. """ # General assumption: # empty blocks have been eliminated. # ovfcheck can appear in the same block with its operation. # this is the case if no exception handling was provided. # Otherwise, we have a block ending in the operation, # followed by a block with a single ovfcheck call. if simplify_disabled(graph): return from pypy.rlib.rarithmetic import ovfcheck, ovfcheck_lshift from pypy.objspace.flow.objspace import op_appendices from pypy.objspace.flow.objspace import implicit_exceptions covf = Constant(ovfcheck) covfls = Constant(ovfcheck_lshift) appendix = op_appendices[OverflowError] renaming = {} seen_ovfblocks = {} # get all blocks blocks = {} def visit(block): if isinstance(block, Block): blocks[block] = True traverse(visit, graph) def is_ovfcheck(bl): ops = bl.operations return (ops and ops[-1].opname == "simple_call" and ops[-1].args[0] == covf) def is_ovfshiftcheck(bl): ops = bl.operations return (ops and ops[-1].opname == "simple_call" and ops[-1].args[0] == covfls) def is_single(bl): return is_ovfcheck(bl) and len(bl.operations) > 1 def is_paired(bl): if bl.exits: ovfblock = bl.exits[0].target return (bl.exits and is_ovfcheck(ovfblock) and len(ovfblock.operations) == 1) def rename(v): return renaming.get(v, v) def remove_last_op(bl): delop = bl.operations.pop() assert delop.opname == "simple_call" assert len(delop.args) == 2 renaming[delop.result] = rename(delop.args[1]) for exit in bl.exits: exit.args = [rename(a) for a in exit.args] def check_syntax(ovfblock, block=None): """check whether ovfblock is reachable more than once or if they cheated about the argument""" if block: link = block.exits[0] for lprev, ltarg in zip(link.args, ovfblock.inputargs): renaming[ltarg] = rename(lprev) arg = ovfblock.operations[0].args[-1] res = block.operations[-1].result opname = block.operations[-1].opname else: arg = ovfblock.operations[-1].args[-1] res = ovfblock.operations[-2].result opname = ovfblock.operations[-2].opname if rename(arg) != rename(res) or ovfblock in seen_ovfblocks: raise SyntaxError("ovfcheck in %s: The checked operation %s" " is misplaced" % (graph.name, opname)) exlis = implicit_exceptions.get("%s_%s" % (opname, appendix), []) if OverflowError not in exlis: raise SyntaxError("ovfcheck in %s: Operation %s has no" " overflow variant" % (graph.name, opname)) blocks_to_join = False for block in blocks: if is_ovfshiftcheck(block): # ovfcheck_lshift: # simply rewrite the operation op = block.operations[-1] op.opname = "lshift" # augmented later op.args = op.args[1:] elif is_single(block): # remove the call to ovfcheck and keep the exceptions check_syntax(block) remove_last_op(block) seen_ovfblocks[block] = True elif is_paired(block): # remove the block's exception links link = block.exits[0] ovfblock = link.target check_syntax(ovfblock, block) block.recloseblock(link) block.exitswitch = None # remove the ovfcheck call from the None target remove_last_op(ovfblock) seen_ovfblocks[ovfblock] = True blocks_to_join = True else: continue op = block.operations[-1] op.opname = "%s_%s" % (op.opname, appendix) if blocks_to_join: join_blocks(graph)
def only_raise_AttributeError(link): if isinstance(link, Link): if link.target is x.exceptblock: assert link.args[0] == Constant(AttributeError) found_AttributeError.append(link)
def recreate_malloc(self, c, v): return SpaceOperation(self.MALLOC_OP, [c, Constant({'flavor': 'gc'}, lltype.Void)], v)
def constfunc(self, ll_function, args_s, s_result): p = self.delayedfunction(ll_function, args_s, s_result) return Constant(p, lltype.typeOf(p))
if len(args_w) > 3: w_frm = args_w[3] if not isinstance(w_loc, Constant): # import * in a function gives us the locals as Variable # we always forbid it as a SyntaxError raise SyntaxError, "RPython: import * is not allowed in functions" if space.do_imports_immediately: name, glob, loc, frm = (space.unwrap(w_name), space.unwrap(w_glob), space.unwrap(w_loc), space.unwrap(w_frm)) try: mod = __import__(name, glob, loc, frm) except ImportError, e: raise OperationError(space.w_ImportError, space.wrap(str(e))) return space.wrap(mod) # redirect it, but avoid exposing the globals w_glob = Constant({}) return space.do_operation('simple_call', Constant(__import__), w_name, w_glob, w_loc, w_frm) def sc_operator(space, fn, args): args_w, kwds_w = args.unpack() assert kwds_w == {}, "should not call %r with keyword arguments" % (fn, ) opname = OperationName[fn] if len(args_w) != Arity[opname]: if opname == 'pow' and len(args_w) == 2: args_w = args_w + [Constant(None)] elif opname == 'getattr' and len(args_w) == 3: return space.do_operation('simple_call', Constant(getattr), *args_w) else:
def graph2const(self, graph): p = self.graph2delayed(graph) return Constant(p, lltype.typeOf(p))
from pypy.rlib import jit from pypy.objspace.flow.model import copygraph, SpaceOperation, Constant from pypy.objspace.flow.model import Variable, Block, Link, FunctionGraph from pypy.annotation import model as annmodel from pypy.rpython.lltypesystem import lltype, lloperation from pypy.rpython.ootypesystem import ootype from pypy.tool.algo.unionfind import UnionFind from pypy.translator.backendopt import graphanalyze from pypy.translator.unsimplify import copyvar TLS = tlsobject() log = py.log.Producer("hintannotate") py.log.setconsumer("hintannotate", ansi_log) TIMESHIFTMAP = {Constant(jit._we_are_jitted): Constant(1, lltype.Signed)} class GraphDesc(object): def __init__(self, bookkeeper, origgraph): self.bookkeeper = bookkeeper self.origgraph = origgraph self._cache = {} def specialize(self, input_args_hs, key=None, alt_name=None): # get the specialized graph -- for now, no specialization graph = self.cachedgraph(key, alt_name) # modify input_args_hs in-place to change their origin for i in range(len(input_args_hs)):
def __getitem__(self, key): F = lltype.FuncType([lltype.Signed, lltype.Signed], lltype.Signed) f = lltype.functionptr(F, key[0]) c_func = Constant(f, lltype.typeOf(f)) return c_func, lltype.Signed
ASM_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([], lltype.Void)) # used internally by walk_stack_from() WALKFRAME = lltype.Struct('WALKFRAME', ('regs_stored_at', # address of where the registers have been saved lltype.FixedSizeArray(llmemory.Address, CALLEE_SAVED_REGS)), ('frame_address', llmemory.Address), ) pypy_asm_stackwalk = rffi.llexternal('pypy_asm_stackwalk', [ASM_CALLBACK_PTR], lltype.Signed, sandboxsafe=True, _nowrapper=True) c_asm_stackwalk = Constant(pypy_asm_stackwalk, lltype.typeOf(pypy_asm_stackwalk)) pypy_asm_gcroot = rffi.llexternal('pypy_asm_gcroot', [llmemory.Address], llmemory.Address, sandboxsafe=True, _nowrapper=True) c_asm_gcroot = Constant(pypy_asm_gcroot, lltype.typeOf(pypy_asm_gcroot)) QSORT_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([llmemory.Address, llmemory.Address], rffi.INT)) qsort = rffi.llexternal('qsort', [llmemory.Address, rffi.SIZE_T, rffi.SIZE_T, QSORT_CALLBACK_PTR],
def immutablevalue(self, value): return self.immutableconstant(Constant(value, lltype.typeOf(value)))
def create_new_attribute(self, name, value): assert name not in self.classdict, "name clash: %r" % (name,) self.classdict[name] = Constant(value)
def assign(mangled_name, value): if isinstance(value, Constant) and isinstance(value.value, staticmethod): value = Constant(value.value.__get__(42)) # staticmethod => bare function llvalue = r.convert_desc_or_const(value) setattr(vtable, mangled_name, llvalue)