def test_flattenrec(): r = FlattenRecursion() seen = set() def rec(n): if n > 0: r(rec, n - 1) seen.add(n) rec(10000) assert seen == set(range(10001))
class InstanceRepr(Repr): def __init__(self, rtyper, classdef, gcflavor='gc'): self.rtyper = rtyper self.classdef = classdef if classdef is None: self.object_type = OBJECT_BY_FLAVOR[LLFLAVOR[gcflavor]] else: ForwardRef = lltype.FORWARDREF_BY_FLAVOR[LLFLAVOR[gcflavor]] self.object_type = ForwardRef() self.iprebuiltinstances = identity_dict() self.lowleveltype = Ptr(self.object_type) self.gcflavor = gcflavor def has_special_memory_pressure(self, tp): if 'special_memory_pressure' in tp._flds: return True if 'super' in tp._flds: return self.has_special_memory_pressure(tp._flds['super']) return False def _setup_repr(self, llfields=None, hints=None, adtmeths=None): # NOTE: don't store mutable objects like the dicts below on 'self' # before they are fully built, to avoid strange bugs in case # of recursion where other code would uses these # partially-initialized dicts. if self.classdef is None: self.immutable_field_set = set() self.rclass = getclassrepr(self.rtyper, self.classdef) fields = {} allinstancefields = {} if self.classdef is None: fields['__class__'] = 'typeptr', get_type_repr(self.rtyper) else: # instance attributes attrs = self.classdef.attrs.items() attrs.sort() myllfields = [] for name, attrdef in attrs: if not attrdef.readonly: r = self.rtyper.getrepr(attrdef.s_value) mangled_name = 'inst_' + name fields[name] = mangled_name, r myllfields.append((mangled_name, r.lowleveltype)) myllfields.sort(key=attr_reverse_size) if llfields is None: llfields = myllfields else: llfields = llfields + myllfields self.rbase = getinstancerepr(self.rtyper, self.classdef.basedef, self.gcflavor) self.rbase.setup() MkStruct = lltype.STRUCT_BY_FLAVOR[LLFLAVOR[self.gcflavor]] if adtmeths is None: adtmeths = {} if hints is None: hints = {} hints = self._check_for_immutable_hints(hints) if self.classdef.classdesc.get_param('_rpython_never_allocate_'): hints['never_allocate'] = True kwds = {} if self.gcflavor == 'gc': kwds['rtti'] = True for name, attrdef in attrs: if not attrdef.readonly and self.is_quasi_immutable(name): llfields.append(('mutate_' + name, OBJECTPTR)) bookkeeper = self.rtyper.annotator.bookkeeper if self.classdef in bookkeeper.memory_pressure_types: # we don't need to add it if it's already there for some of # the parent type if not self.has_special_memory_pressure( self.rbase.object_type): llfields.append(('special_memory_pressure', lltype.Signed)) fields['special_memory_pressure'] = ( 'special_memory_pressure', self.rtyper.getrepr(lltype_to_annotation( lltype.Signed))) object_type = MkStruct(self.classdef.name, ('super', self.rbase.object_type), hints=hints, adtmeths=adtmeths, *llfields, **kwds) self.object_type.become(object_type) allinstancefields.update(self.rbase.allinstancefields) allinstancefields.update(fields) self.fields = fields self.allinstancefields = allinstancefields def _check_for_immutable_hints(self, hints): hints = hints.copy() classdesc = self.classdef.classdesc immut = classdesc.get_param('_immutable_', inherit=False) if immut is None: if classdesc.get_param('_immutable_', inherit=True): raise ImmutableConflictError( "class %r inherits from its parent _immutable_=True, " "so it should also declare _immutable_=True" % (self.classdef, )) elif immut is not True: raise TyperError( "class %r: _immutable_ = something else than True" % (self.classdef, )) else: hints['immutable'] = True self.immutable_field_set = classdesc.immutable_fields if (classdesc.immutable_fields or 'immutable_fields' in self.rbase.object_type._hints): accessor = FieldListAccessor() hints['immutable_fields'] = accessor return hints def __repr__(self): if self.classdef is None: clsname = 'object' else: clsname = self.classdef.name return '<InstanceRepr for %s>' % (clsname, ) def compact_repr(self): if self.classdef is None: clsname = 'object' else: clsname = self.classdef.name return 'InstanceR %s' % (clsname, ) def _setup_repr_final(self): self._setup_immutable_field_list() self._check_for_immutable_conflicts() if self.gcflavor == 'gc': if (self.classdef is not None and self.classdef.classdesc.lookup('__del__') is not None): s_func = self.classdef.classdesc.s_read_attribute('__del__') source_desc = self.classdef.classdesc.lookup('__del__') source_classdef = source_desc.getclassdef(None) source_repr = getinstancerepr(self.rtyper, source_classdef) assert len(s_func.descriptions) == 1 funcdesc, = s_func.descriptions graph = funcdesc.getuniquegraph() self.check_graph_of_del_does_not_call_too_much( self.rtyper, graph) FUNCTYPE = FuncType([Ptr(source_repr.object_type)], Void) destrptr = functionptr(FUNCTYPE, graph.name, graph=graph, _callable=graph.func) else: destrptr = None self.rtyper.call_all_setups() # compute ForwardReferences now args_s = [SomePtr(Ptr(OBJECT))] graph = self.rtyper.annotate_helper(ll_runtime_type_info, args_s) s = self.rtyper.annotation(graph.getreturnvar()) if (not isinstance(s, SomePtr) or s.ll_ptrtype != Ptr(RuntimeTypeInfo)): raise TyperError("runtime type info function returns %r, " "expected Ptr(RuntimeTypeInfo)" % (s)) funcptr = self.rtyper.getcallable(graph) attachRuntimeTypeInfo(self.object_type, funcptr, destrptr) vtable = self.rclass.getvtable() self.rtyper.set_type_for_typeptr(vtable, self.lowleveltype.TO) def _setup_immutable_field_list(self): hints = self.object_type._hints if "immutable_fields" in hints: accessor = hints["immutable_fields"] if not hasattr(accessor, 'fields'): immutable_fields = set() rbase = self while rbase.classdef is not None: immutable_fields.update(rbase.immutable_field_set) rbase = rbase.rbase self._parse_field_list(immutable_fields, accessor, hints) def _parse_field_list(self, fields, accessor, hints): ranking = {} for fullname in fields: name = fullname quasi = False if name.endswith('?[*]'): # a quasi-immutable field pointing to name = name[:-4] # an immutable array rank = IR_QUASIIMMUTABLE_ARRAY quasi = True elif name.endswith('[*]'): # for virtualizables' lists name = name[:-3] rank = IR_IMMUTABLE_ARRAY elif name.endswith('?'): # a quasi-immutable field name = name[:-1] rank = IR_QUASIIMMUTABLE quasi = True else: # a regular immutable/green field rank = IR_IMMUTABLE try: mangled_name, r = self._get_field(name) except KeyError: continue if quasi and hints.get("immutable"): raise TyperError( "can't have _immutable_ = True and a quasi-immutable field " "%s in class %s" % (name, self.classdef)) if rank in (IR_QUASIIMMUTABLE_ARRAY, IR_IMMUTABLE_ARRAY): from rpython.rtyper.rlist import AbstractBaseListRepr if not isinstance(r, AbstractBaseListRepr): raise TyperError( "_immutable_fields_ = [%r] in %r, but %r is not a list " "(got %r)" % (fullname, self, name, r)) ranking[mangled_name] = rank accessor.initialize(self.object_type, ranking) return ranking def _check_for_immutable_conflicts(self): # check for conflicts, i.e. a field that is defined normally as # mutable in some parent class but that is now declared immutable is_self_immutable = "immutable" in self.object_type._hints base = self while base.classdef is not None: base = base.rbase for fieldname in base.fields: if fieldname == 'special_memory_pressure': continue try: mangled, r = base._get_field(fieldname) except KeyError: continue if r.lowleveltype == Void: continue base._setup_immutable_field_list() if base.object_type._immutable_field(mangled): continue # 'fieldname' is a mutable, non-Void field in the parent if is_self_immutable: raise ImmutableConflictError( "class %r has _immutable_=True, but parent class %r " "defines (at least) the mutable field %r" % (self, base, fieldname)) if (fieldname in self.immutable_field_set or (fieldname + '?') in self.immutable_field_set): raise ImmutableConflictError( "field %r is defined mutable in class %r, but " "listed in _immutable_fields_ in subclass %r" % (fieldname, base, self)) def hook_access_field(self, vinst, cname, llops, flags): pass # for virtualizables; see rvirtualizable.py def hook_setfield(self, vinst, fieldname, llops): if self.is_quasi_immutable(fieldname): c_fieldname = inputconst(Void, 'mutate_' + fieldname) llops.genop('jit_force_quasi_immutable', [vinst, c_fieldname]) def is_quasi_immutable(self, fieldname): search1 = fieldname + '?' search2 = fieldname + '?[*]' rbase = self while rbase.classdef is not None: if (search1 in rbase.immutable_field_set or search2 in rbase.immutable_field_set): return True rbase = rbase.rbase return False def new_instance(self, llops, classcallhop=None, nonmovable=False): """Build a new instance, without calling __init__.""" flavor = self.gcflavor flags = {'flavor': flavor} if nonmovable: flags['nonmovable'] = True ctype = inputconst(Void, self.object_type) cflags = inputconst(Void, flags) vlist = [ctype, cflags] vptr = llops.genop('malloc', vlist, resulttype=Ptr(self.object_type)) ctypeptr = inputconst(CLASSTYPE, self.rclass.getvtable()) self.setfield(vptr, '__class__', ctypeptr, llops) if self.has_special_memory_pressure(self.object_type): self.setfield(vptr, 'special_memory_pressure', inputconst(lltype.Signed, 0), llops) # initialize instance attributes from their defaults from the class if self.classdef is not None: flds = self.allinstancefields.keys() flds.sort() for fldname in flds: if fldname == '__class__': continue mangled_name, r = self.allinstancefields[fldname] if r.lowleveltype is Void: continue value = self.classdef.classdesc.read_attribute(fldname, None) if value is not None: ll_value = r.convert_desc_or_const(value) # don't write NULL GC pointers: we know that the malloc # done above initialized at least the GC Ptr fields to # NULL already, and that's true for all our GCs if (isinstance(r.lowleveltype, Ptr) and r.lowleveltype.TO._gckind == 'gc' and not ll_value): continue cvalue = inputconst(r.lowleveltype, ll_value) self.setfield(vptr, fldname, cvalue, llops, flags={'access_directly': True}) return vptr def convert_const(self, value): if value is None: return self.null_instance() if isinstance(value, types.MethodType): value = value.im_self # bound method -> instance bk = self.rtyper.annotator.bookkeeper try: classdef = bk.getuniqueclassdef(value.__class__) except KeyError: raise TyperError("no classdef: %r" % (value.__class__, )) if classdef != self.classdef: # if the class does not match exactly, check that 'value' is an # instance of a subclass and delegate to that InstanceRepr if classdef.commonbase(self.classdef) != self.classdef: raise TyperError("not an instance of %r: %r" % (self.classdef.name, value)) rinstance = getinstancerepr(self.rtyper, classdef) result = rinstance.convert_const(value) return self.upcast(result) # common case return self.convert_const_exact(value) def convert_const_exact(self, value): try: return self.iprebuiltinstances[value] except KeyError: self.setup() result = self.create_instance() self.iprebuiltinstances[value] = result self.initialize_prebuilt_instance(value, self.classdef, result) return result def get_reusable_prebuilt_instance(self): "Get a dummy prebuilt instance. Multiple calls reuse the same one." try: return self._reusable_prebuilt_instance except AttributeError: self.setup() result = self.create_instance() self._reusable_prebuilt_instance = result self.initialize_prebuilt_data(Ellipsis, self.classdef, result) return result _initialize_data_flattenrec = FlattenRecursion() def initialize_prebuilt_instance(self, value, classdef, result): # must fill in the hash cache before the other ones # (see test_circular_hash_initialization) self._initialize_data_flattenrec(self.initialize_prebuilt_data, value, classdef, result) def get_ll_hash_function(self): return ll_inst_hash get_ll_fasthash_function = get_ll_hash_function def rtype_type(self, hop): if hop.s_result.is_constant(): return hop.inputconst(hop.r_result, hop.s_result.const) instance_repr = self.common_repr() vinst, = hop.inputargs(instance_repr) if hop.args_s[0].can_be_none(): return hop.gendirectcall(ll_inst_type, vinst) else: return instance_repr.getfield(vinst, '__class__', hop.llops) def rtype_getattr(self, hop): if hop.s_result.is_constant(): return hop.inputconst(hop.r_result, hop.s_result.const) attr = hop.args_s[1].const vinst, vattr = hop.inputargs(self, Void) if attr == '__class__' and hop.r_result.lowleveltype is Void: # special case for when the result of '.__class__' is a constant [desc] = hop.s_result.descriptions return hop.inputconst(Void, desc.pyobj) if attr in self.allinstancefields: return self.getfield(vinst, attr, hop.llops, flags=hop.args_s[0].flags) elif attr in self.rclass.allmethods: # special case for methods: represented as their 'self' only # (see MethodsPBCRepr) return hop.r_result.get_method_from_instance( self, vinst, hop.llops) else: vcls = self.getfield(vinst, '__class__', hop.llops) return self.rclass.getclsfield(vcls, attr, hop.llops) def rtype_setattr(self, hop): attr = hop.args_s[1].const r_value = self.getfieldrepr(attr) vinst, vattr, vvalue = hop.inputargs(self, Void, r_value) self.setfield(vinst, attr, vvalue, hop.llops, flags=hop.args_s[0].flags) def rtype_bool(self, hop): vinst, = hop.inputargs(self) return hop.genop('ptr_nonzero', [vinst], resulttype=Bool) def ll_str(self, i): # doesn't work for non-gc classes! from rpython.rtyper.lltypesystem.ll_str import ll_int2hex from rpython.rlib.rarithmetic import r_uint if not i: return rstr.conststr("NULL") instance = cast_pointer(OBJECTPTR, i) # Two choices: the first gives a fast answer but it can change # (typically only once) during the life of the object. #uid = r_uint(cast_ptr_to_int(i)) uid = r_uint(llop.gc_id(lltype.Signed, i)) # res = rstr.conststr("<") res = rstr.ll_strconcat(res, instance.typeptr.name) res = rstr.ll_strconcat(res, rstr.conststr(" object at 0x")) res = rstr.ll_strconcat(res, ll_int2hex(uid, False)) res = rstr.ll_strconcat(res, rstr.conststr(">")) return res def get_ll_eq_function(self): return None # defaults to compare by identity ('==' on pointers) def can_ll_be_null(self, s_value): return s_value.can_be_none() @staticmethod def check_graph_of_del_does_not_call_too_much(rtyper, graph): # RPython-level __del__() methods should not do "too much". # In the PyPy Python interpreter, they usually do simple things # like file.__del__() closing the file descriptor; or if they # want to do more like call an app-level __del__() method, they # enqueue the object instead, and the actual call is done later. # # Here, as a quick way to check "not doing too much", we check # that from no RPython-level __del__() method we can reach a # JitDriver. # # XXX wrong complexity, but good enough because the set of # reachable graphs should be small callgraph = rtyper.annotator.translator.callgraph.values() seen = {graph: None} while True: oldlength = len(seen) for caller, callee in callgraph: if caller in seen and callee not in seen: func = getattr(callee, 'func', None) if getattr(func, '_dont_reach_me_in_del_', False): lst = [str(callee)] g = caller while g: lst.append(str(g)) g = seen.get(g) lst.append('') raise TyperError("the RPython-level __del__() method " "in %r calls:%s" % (graph, '\n\t'.join(lst[::-1]))) if getattr(func, '_cannot_really_call_random_things_', False): continue seen[callee] = caller if len(seen) == oldlength: break def common_repr(self): # -> object or nongcobject reprs return getinstancerepr(self.rtyper, None, self.gcflavor) def _get_field(self, attr): return self.fields[attr] def null_instance(self): return nullptr(self.object_type) def upcast(self, result): return cast_pointer(self.lowleveltype, result) def create_instance(self): return malloc(self.object_type, flavor=self.gcflavor, immortal=True) def initialize_prebuilt_data(self, value, classdef, result): if self.classdef is not None: # recursively build the parent part of the instance self.rbase.initialize_prebuilt_data(value, classdef, result.super) # then add instance attributes from this level for name, (mangled_name, r) in self.fields.items(): if r.lowleveltype is Void: llattrvalue = None else: try: attrvalue = getattr(value, name) except AttributeError: attrvalue = self.classdef.classdesc.read_attribute( name, None) if attrvalue is None: # Ellipsis from get_reusable_prebuilt_instance() #if value is not Ellipsis: #warning("prebuilt instance %r has no " # "attribute %r" % (value, name)) llattrvalue = r.lowleveltype._defl() else: llattrvalue = r.convert_desc_or_const(attrvalue) else: llattrvalue = r.convert_const(attrvalue) setattr(result, mangled_name, llattrvalue) else: # OBJECT part rclass = getclassrepr(self.rtyper, classdef) result.typeptr = rclass.getvtable() def getfieldrepr(self, attr): """Return the repr used for the given attribute.""" if attr in self.fields: mangled_name, r = self.fields[attr] return r else: if self.classdef is None: raise MissingRTypeAttribute(attr) return self.rbase.getfieldrepr(attr) def getfield(self, vinst, attr, llops, force_cast=False, flags={}): """Read the given attribute (or __class__ for the type) of 'vinst'.""" if attr in self.fields: mangled_name, r = self.fields[attr] cname = inputconst(Void, mangled_name) if force_cast: vinst = llops.genop('cast_pointer', [vinst], resulttype=self) self.hook_access_field(vinst, cname, llops, flags) return llops.genop('getfield', [vinst, cname], resulttype=r) else: if self.classdef is None: raise MissingRTypeAttribute(attr) return self.rbase.getfield(vinst, attr, llops, force_cast=True, flags=flags) def setfield(self, vinst, attr, vvalue, llops, force_cast=False, flags={}): """Write the given attribute (or __class__ for the type) of 'vinst'.""" if attr in self.fields: mangled_name, r = self.fields[attr] cname = inputconst(Void, mangled_name) if force_cast: vinst = llops.genop('cast_pointer', [vinst], resulttype=self) self.hook_access_field(vinst, cname, llops, flags) self.hook_setfield(vinst, attr, llops) llops.genop('setfield', [vinst, cname, vvalue]) else: if self.classdef is None: raise MissingRTypeAttribute(attr) self.rbase.setfield(vinst, attr, vvalue, llops, force_cast=True, flags=flags) def rtype_isinstance(self, hop): class_repr = get_type_repr(hop.rtyper) instance_repr = self.common_repr() v_obj, v_cls = hop.inputargs(instance_repr, class_repr) if isinstance(v_cls, Constant): cls = v_cls.value llf, llf_nonnull = make_ll_isinstance(self.rtyper, cls) if hop.args_s[0].can_be_None: return hop.gendirectcall(llf, v_obj) else: return hop.gendirectcall(llf_nonnull, v_obj) else: return hop.gendirectcall(ll_isinstance, v_obj, v_cls)
class ClassDef(object): "Wraps a user class." def __init__(self, bookkeeper, classdesc): self.bookkeeper = bookkeeper self.attrs = {} # {name: Attribute} self.classdesc = classdesc self.name = self.classdesc.name self.shortname = self.name.split('.')[-1] self.subdefs = [] self.attr_sources = {} # {name: list-of-sources} self.read_locations_of__class__ = {} self.repr = None self.extra_access_sets = {} self.instances_seen = set() if classdesc.basedesc: self.basedef = classdesc.basedesc.getuniqueclassdef() self.basedef.subdefs.append(self) self.basedef.see_new_subclass(self) else: self.basedef = None self.parentdefs = dict.fromkeys(self.getmro()) def setup(self, sources): # collect the (supposed constant) class attributes for name, source in sources.items(): self.add_source_for_attribute(name, source) if self.bookkeeper: self.bookkeeper.event('classdef_setup', self) def s_getattr(self, attrname, flags): attrdef = self.find_attribute(attrname) s_result = attrdef.s_value # hack: if s_result is a set of methods, discard the ones # that can't possibly apply to an instance of self. # XXX do it more nicely if isinstance(s_result, SomePBC): s_result = self.lookup_filter(s_result, attrname, flags) elif isinstance(s_result, SomeImpossibleValue): self.check_missing_attribute_update(attrname) # blocking is harmless if the attribute is explicitly listed # in the class or a parent class. for basedef in self.getmro(): if basedef.classdesc.all_enforced_attrs is not None: if attrname in basedef.classdesc.all_enforced_attrs: raise HarmlesslyBlocked("get enforced attr") elif isinstance(s_result, SomeList): s_result = self.classdesc.maybe_return_immutable_list( attrname, s_result) return s_result def add_source_for_attribute(self, attr, source): """Adds information about a constant source for an attribute. """ for cdef in self.getmro(): if attr in cdef.attrs: # the Attribute() exists already for this class (or a parent) attrdef = cdef.attrs[attr] s_prev_value = attrdef.s_value attrdef.add_constant_source(self, source) # we should reflow from all the reader's position, # but as an optimization we try to see if the attribute # has really been generalized if attrdef.s_value != s_prev_value: self.bookkeeper.update_attr(cdef, attrdef) return else: # remember the source in self.attr_sources sources = self.attr_sources.setdefault(attr, []) sources.append(source) # register the source in any Attribute found in subclasses, # to restore invariant (III) # NB. add_constant_source() may discover new subdefs but the # right thing will happen to them because self.attr_sources # was already updated if not source.instance_level: for subdef in self.getallsubdefs(): if attr in subdef.attrs: attrdef = subdef.attrs[attr] s_prev_value = attrdef.s_value attrdef.add_constant_source(self, source) if attrdef.s_value != s_prev_value: self.bookkeeper.update_attr(subdef, attrdef) def get_owner(self, attrname): """Return the classdef owning the attribute `attrname`.""" for cdef in self.getmro(): if attrname in cdef.attrs: return cdef else: return None def locate_attribute(self, attr): cdef = self.get_owner(attr) if cdef: return cdef else: self._generalize_attr(attr, s_value=None) return self def find_attribute(self, attr): return self.locate_attribute(attr).attrs[attr] def __repr__(self): return "<ClassDef '%s'>" % (self.name, ) def has_no_attrs(self): for clsdef in self.getmro(): if clsdef.attrs: return False return True def commonbase(self, other): while other is not None and not self.issubclass(other): other = other.basedef return other def getmro(self): while self is not None: yield self self = self.basedef def issubclass(self, other): return self.classdesc.issubclass(other.classdesc) def getallsubdefs(self): pending = [self] seen = {} for clsdef in pending: yield clsdef for sub in clsdef.subdefs: if sub not in seen: pending.append(sub) seen[sub] = True def _generalize_attr(self, attr, s_value): # create the Attribute and do the generalization asked for newattr = Attribute(attr) if s_value: newattr.s_value = s_value # remove the attribute from subclasses -- including us! # invariant (I) constant_sources = [] # [(classdef-of-origin, source)] for subdef in self.getallsubdefs(): if attr in subdef.attrs: subattr = subdef.attrs[attr] newattr.merge(subattr, classdef=self) del subdef.attrs[attr] if attr in subdef.attr_sources: # accumulate attr_sources for this attribute from all subclasses lst = subdef.attr_sources[attr] for source in lst: constant_sources.append((subdef, source)) del lst[:] # invariant (II) # accumulate attr_sources for this attribute from all parents, too # invariant (III) for superdef in self.getmro(): if attr in superdef.attr_sources: for source in superdef.attr_sources[attr]: if not source.instance_level: constant_sources.append((superdef, source)) # store this new Attribute, generalizing the previous ones from # subclasses -- invariant (A) self.attrs[attr] = newattr # add the values of the pending constant attributes # completes invariants (II) and (III) for origin_classdef, source in constant_sources: newattr.add_constant_source(origin_classdef, source) # reflow from all read positions self.bookkeeper.update_attr(self, newattr) def generalize_attr(self, attr, s_value=None): # if the attribute exists in a superclass, generalize there, # as imposed by invariant (I) clsdef = self.get_owner(attr) if clsdef: clsdef._generalize_attr(attr, s_value) else: self._generalize_attr(attr, s_value) def about_attribute(self, name): """This is the interface for the code generators to ask about the annotation given to a attribute.""" for cdef in self.getmro(): if name in cdef.attrs: s_result = cdef.attrs[name].s_value if s_result != s_ImpossibleValue: return s_result else: return None return None def lookup_filter(self, pbc, name=None, flags={}): """Selects the methods in the pbc that could possibly be seen by a lookup performed on an instance of 'self', removing the ones that cannot appear. """ d = [] uplookup = None updesc = None for desc in pbc.descriptions: # pick methods but ignore already-bound methods, which can come # from an instance attribute if (isinstance(desc, MethodDesc) and desc.selfclassdef is None): methclassdef = desc.originclassdef if methclassdef is not self and methclassdef.issubclass(self): pass # subclasses methods are always candidates elif self.issubclass(methclassdef): # upward consider only the best match if uplookup is None or methclassdef.issubclass(uplookup): uplookup = methclassdef updesc = desc continue # for clsdef1 >= clsdef2, we guarantee that # clsdef1.lookup_filter(pbc) includes # clsdef2.lookup_filter(pbc) (see formal proof...) else: continue # not matching # bind the method by giving it a selfclassdef. Use the # more precise subclass that it's coming from. desc = desc.bind_self(methclassdef, flags) d.append(desc) if uplookup is not None: d.append(updesc.bind_self(self, flags)) if d: return SomePBC(d, can_be_None=pbc.can_be_None) elif pbc.can_be_None: return s_None else: return s_ImpossibleValue def check_missing_attribute_update(self, name): # haaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaack # sometimes, new methods can show up on classes, added # e.g. by W_TypeObject._freeze_() -- the multimethod # implementations. Check that here... found = False parents = list(self.getmro()) parents.reverse() for base in parents: if base.check_attr_here(name): found = True return found def check_attr_here(self, name): source = self.classdesc.find_source_for(name) if source is not None: # oups! new attribute showed up self.add_source_for_attribute(name, source) # maybe it also showed up in some subclass? for subdef in self.getallsubdefs(): if subdef is not self: subdef.check_attr_here(name) return True else: return False _see_instance_flattenrec = FlattenRecursion() def see_instance(self, x): assert isinstance(x, self.classdesc.pyobj) key = Hashable(x) if key in self.instances_seen: return self.instances_seen.add(key) self.bookkeeper.event('mutable', x) source = InstanceSource(self.bookkeeper, x) def delayed(): for attr in source.all_instance_attributes(): self.add_source_for_attribute(attr, source) # ^^^ can trigger reflowing self._see_instance_flattenrec(delayed) def see_new_subclass(self, classdef): for position in self.read_locations_of__class__: self.bookkeeper.annotator.reflowfromposition(position) if self.basedef is not None: self.basedef.see_new_subclass(classdef) def read_attr__class__(self): position = self.bookkeeper.position_key self.read_locations_of__class__[position] = True return SomePBC([subdef.classdesc for subdef in self.getallsubdefs()]) def _freeze_(self): raise Exception("ClassDefs are used as knowntype for instances but " "cannot be used as immutablevalue arguments directly")
class Bookkeeper(object): """The log of choices that have been made while analysing the operations. It ensures that the same 'choice objects' will be returned if we ask again during reflowing. Like ExecutionContext, there is an implicit Bookkeeper that can be obtained from a thread-local variable. Currently used for factories and user-defined classes.""" def __setstate__(self, dic): self.__dict__.update(dic) # normal action self.register_builtins() def __init__(self, annotator): self.annotator = annotator self.policy = annotator.policy self.descs = {} # map Python objects to their XxxDesc wrappers self.methoddescs = {} # map (funcdesc, classdef) to the MethodDesc self.classdefs = [] # list of all ClassDefs self.seen_mutable = {} self.listdefs = {} # map position_keys to ListDefs self.dictdefs = {} # map position_keys to DictDefs self.immutable_cache = {} self.classpbc_attr_families = { } # {'attr': UnionFind(ClassAttrFamily)} self.frozenpbc_attr_families = UnionFind(description.FrozenAttrFamily) self.pbc_maximal_call_families = UnionFind(description.CallFamily) self.emulated_pbc_calls = {} self.all_specializations = {} # {FuncDesc: specialization-info} self.pending_specializations = [] # list of callbacks self.external_class_cache = {} # cache of ExternalType classes self.needs_generic_instantiate = {} self.thread_local_fields = set() self.register_builtins() def register_builtins(self): import rpython.annotator.builtin # for side-effects from rpython.annotator.exception import standardexceptions for cls in standardexceptions: self.getuniqueclassdef(cls) def enter(self, position_key): """Start of an operation. The operation is uniquely identified by the given key.""" assert not hasattr(self, 'position_key'), "don't call enter() nestedly" self.position_key = position_key TLS.bookkeeper = self def leave(self): """End of an operation.""" del TLS.bookkeeper del self.position_key def compute_at_fixpoint(self): # getbookkeeper() needs to work during this function, so provide # one with a dummy position self.enter(None) try: def call_sites(): newblocks = self.annotator.added_blocks if newblocks is None: newblocks = self.annotator.annotated # all of them annotation = self.annotator.annotation for block in newblocks: for op in block.operations: if op.opname in ('simple_call', 'call_args'): yield op # some blocks are partially annotated if annotation(op.result) is None: break # ignore the unannotated part for call_op in call_sites(): self.consider_call_site(call_op) for pbc, args_s in self.emulated_pbc_calls.itervalues(): args = simple_args(args_s) pbc.consider_call_site(args, s_ImpossibleValue, None) self.emulated_pbc_calls = {} finally: self.leave() def check_no_flags_on_instances(self): # sanity check: no flags attached to heap stored instances seen = set() def check_no_flags(s_value_or_def): if isinstance(s_value_or_def, SomeInstance): assert not s_value_or_def.flags, "instance annotation with flags escaped to the heap" check_no_flags(s_value_or_def.classdef) elif isinstance(s_value_or_def, SomeList): check_no_flags(s_value_or_def.listdef.listitem) elif isinstance(s_value_or_def, SomeDict): check_no_flags(s_value_or_def.dictdef.dictkey) check_no_flags(s_value_or_def.dictdef.dictvalue) elif isinstance(s_value_or_def, SomeTuple): for s_item in s_value_or_def.items: check_no_flags(s_item) elif isinstance(s_value_or_def, ClassDef): if s_value_or_def in seen: return seen.add(s_value_or_def) for attr in s_value_or_def.attrs.itervalues(): s_attr = attr.s_value check_no_flags(s_attr) elif isinstance(s_value_or_def, ListItem): if s_value_or_def in seen: return seen.add(s_value_or_def) check_no_flags(s_value_or_def.s_value) for clsdef in self.classdefs: check_no_flags(clsdef) def consider_call_site(self, call_op): from rpython.rtyper.llannotation import SomeLLADTMeth, lltype_to_annotation annotation = self.annotator.annotation s_callable = annotation(call_op.args[0]) args_s = [annotation(arg) for arg in call_op.args[1:]] if isinstance(s_callable, SomeLLADTMeth): adtmeth = s_callable s_callable = self.immutablevalue(adtmeth.func) args_s = [lltype_to_annotation(adtmeth.ll_ptrtype)] + args_s if isinstance(s_callable, SomePBC): s_result = annotation(call_op.result) if s_result is None: s_result = s_ImpossibleValue args = call_op.build_args(args_s) s_callable.consider_call_site(args, s_result, call_op) def getuniqueclassdef(self, cls): """Get the ClassDef associated with the given user cls. Avoid using this! It breaks for classes that must be specialized. """ assert cls is not object desc = self.getdesc(cls) return desc.getuniqueclassdef() def getlistdef(self, **flags_if_new): """Get the ListDef associated with the current position.""" try: listdef = self.listdefs[self.position_key] except KeyError: listdef = self.listdefs[self.position_key] = ListDef(self) listdef.listitem.__dict__.update(flags_if_new) return listdef def newlist(self, *s_values, **flags): """Make a SomeList associated with the current position, general enough to contain the s_values as items.""" listdef = self.getlistdef(**flags) for s_value in s_values: listdef.generalize(s_value) if flags: assert flags.keys() == ['range_step'] listdef.generalize_range_step(flags['range_step']) return SomeList(listdef) def getdictdef(self, is_r_dict=False, force_non_null=False): """Get the DictDef associated with the current position.""" try: dictdef = self.dictdefs[self.position_key] except KeyError: dictdef = DictDef(self, is_r_dict=is_r_dict, force_non_null=force_non_null) self.dictdefs[self.position_key] = dictdef return dictdef def newdict(self): """Make a so-far empty SomeDict associated with the current position.""" return SomeDict(self.getdictdef()) def immutablevalue(self, x): """The most precise SomeValue instance that contains the immutable value x.""" # convert unbound methods to the underlying function if hasattr(x, 'im_self') and x.im_self is None: x = x.im_func assert not hasattr(x, 'im_self') tp = type(x) if issubclass(tp, Symbolic): # symbolic constants support result = x.annotation() result.const_box = Constant(x) return result if tp is bool: result = SomeBool() elif tp is int: result = SomeInteger(nonneg=x >= 0) elif tp is long: if -sys.maxint - 1 <= x <= sys.maxint: x = int(x) result = SomeInteger(nonneg=x >= 0) else: raise Exception("seeing a prebuilt long (value %s)" % hex(x)) elif issubclass(tp, str): # py.lib uses annotated str subclasses no_nul = not '\x00' in x if len(x) == 1: result = SomeChar(no_nul=no_nul) else: result = SomeString(no_nul=no_nul) elif tp is unicode: if len(x) == 1: result = SomeUnicodeCodePoint() else: result = SomeUnicodeString() elif tp is bytearray: result = SomeByteArray() elif tp is tuple: result = SomeTuple(items=[self.immutablevalue(e) for e in x]) elif tp is float: result = SomeFloat() elif tp is list: key = Constant(x) try: return self.immutable_cache[key] except KeyError: result = SomeList(ListDef(self, s_ImpossibleValue)) self.immutable_cache[key] = result for e in x: result.listdef.generalize(self.immutablevalue(e)) result.const_box = key return result elif (tp is dict or tp is r_dict or tp is SomeOrderedDict.knowntype or tp is r_ordereddict): key = Constant(x) try: return self.immutable_cache[key] except KeyError: if tp is SomeOrderedDict.knowntype or tp is r_ordereddict: cls = SomeOrderedDict else: cls = SomeDict is_r_dict = issubclass(tp, r_dict) result = cls( DictDef(self, s_ImpossibleValue, s_ImpossibleValue, is_r_dict=is_r_dict)) self.immutable_cache[key] = result if is_r_dict: s_eqfn = self.immutablevalue(x.key_eq) s_hashfn = self.immutablevalue(x.key_hash) result.dictdef.dictkey.update_rdict_annotations( s_eqfn, s_hashfn) seen_elements = 0 while seen_elements != len(x): items = x.items() for ek, ev in items: result.dictdef.generalize_key(self.immutablevalue(ek)) result.dictdef.generalize_value( self.immutablevalue(ev)) result.dictdef.seen_prebuilt_key(ek) seen_elements = len(items) # if the dictionary grew during the iteration, # start over again result.const_box = key return result elif tp is weakref.ReferenceType: x1 = x() if x1 is None: result = SomeWeakRef(None) # dead weakref else: s1 = self.immutablevalue(x1) assert isinstance(s1, SomeInstance) result = SomeWeakRef(s1.classdef) elif tp is property: return SomeProperty(x) elif ishashable(x) and x in BUILTIN_ANALYZERS: _module = getattr(x, "__module__", "unknown") result = SomeBuiltin(BUILTIN_ANALYZERS[x], methodname="%s.%s" % (_module, x.__name__)) elif extregistry.is_registered(x): entry = extregistry.lookup(x) result = entry.compute_annotation_bk(self) elif tp is type: result = SomeConstantType(x, self) elif callable(x): if hasattr(x, 'im_self') and hasattr(x, 'im_func'): # on top of PyPy, for cases like 'l.append' where 'l' is a # global constant list, the find_method() returns non-None s_self = self.immutablevalue(x.im_self) result = s_self.find_method(x.im_func.__name__) elif hasattr(x, '__self__') and x.__self__ is not None: # for cases like 'l.append' where 'l' is a global constant list s_self = self.immutablevalue(x.__self__) result = s_self.find_method(x.__name__) assert result is not None else: result = None if result is None: result = SomePBC([self.getdesc(x)]) elif hasattr(x, '_freeze_'): assert x._freeze_() is True # user-defined classes can define a method _freeze_(), which # is called when a prebuilt instance is found. If the method # returns True, the instance is considered immutable and becomes # a SomePBC(). Otherwise it's just SomeInstance(). result = SomePBC([self.getdesc(x)]) elif hasattr(x, '__class__') \ and x.__class__.__module__ != '__builtin__': if hasattr(x, '_cleanup_'): x._cleanup_() self.see_mutable(x) result = SomeInstance(self.getuniqueclassdef(x.__class__)) elif x is None: return s_None else: raise Exception("Don't know how to represent %r" % (x, )) result.const = x return result def getdesc(self, pyobj): # get the XxxDesc wrapper for the given Python object, which must be # one of: # * a user-defined Python function # * a Python type or class (but not a built-in one like 'int') # * a user-defined bound or unbound method object # * a frozen pre-built constant (with _freeze_() == True) # * a bound method of a frozen pre-built constant try: return self.descs[pyobj] except KeyError: if isinstance(pyobj, types.FunctionType): result = description.FunctionDesc(self, pyobj) elif isinstance(pyobj, (type, types.ClassType)): if pyobj is object: raise Exception("ClassDesc for object not supported") if pyobj.__module__ == '__builtin__': # avoid making classdefs for builtin types result = self.getfrozen(pyobj) else: result = description.ClassDesc(self, pyobj) elif isinstance(pyobj, types.MethodType): if pyobj.im_self is None: # unbound return self.getdesc(pyobj.im_func) if hasattr(pyobj.im_self, '_cleanup_'): pyobj.im_self._cleanup_() if hasattr(pyobj.im_self, '_freeze_'): # method of frozen assert pyobj.im_self._freeze_() is True result = description.MethodOfFrozenDesc( self, self.getdesc(pyobj.im_func), # funcdesc self.getdesc(pyobj.im_self)) # frozendesc else: # regular method origincls, name = origin_of_meth(pyobj) self.see_mutable(pyobj.im_self) assert pyobj == getattr( pyobj.im_self, name), ("%r is not %s.%s ??" % (pyobj, pyobj.im_self, name)) # emulate a getattr to make sure it's on the classdef classdef = self.getuniqueclassdef(pyobj.im_class) classdef.find_attribute(name) result = self.getmethoddesc( self.getdesc(pyobj.im_func), # funcdesc self.getuniqueclassdef(origincls), # originclassdef classdef, # selfclassdef name) else: # must be a frozen pre-built constant, but let's check if hasattr(pyobj, '_freeze_'): assert pyobj._freeze_() is True else: if hasattr(pyobj, '__call__'): msg = "object with a __call__ is not RPython" else: msg = "unexpected prebuilt constant" raise Exception("%s: %r" % (msg, pyobj)) result = self.getfrozen(pyobj) self.descs[pyobj] = result return result def have_seen(self, x): # this might need to expand some more. if x in self.descs: return True elif (x.__class__, x) in self.seen_mutable: return True else: return False def getfrozen(self, pyobj): return description.FrozenDesc(self, pyobj) def getmethoddesc(self, funcdesc, originclassdef, selfclassdef, name, flags={}): flagskey = flags.items() flagskey.sort() key = funcdesc, originclassdef, selfclassdef, name, tuple(flagskey) try: return self.methoddescs[key] except KeyError: result = description.MethodDesc(self, funcdesc, originclassdef, selfclassdef, name, flags) self.methoddescs[key] = result return result _see_mutable_flattenrec = FlattenRecursion() def see_mutable(self, x): key = (x.__class__, x) if key in self.seen_mutable: return clsdef = self.getuniqueclassdef(x.__class__) self.seen_mutable[key] = True self.event('mutable', x) source = InstanceSource(self, x) def delayed(): for attr in source.all_instance_attributes(): clsdef.add_source_for_attribute(attr, source) # ^^^ can trigger reflowing self._see_mutable_flattenrec(delayed) def valueoftype(self, t): return annotationoftype(t, self) def get_classpbc_attr_families(self, attrname): """Return the UnionFind for the ClassAttrFamilies corresponding to attributes of the given name. """ map = self.classpbc_attr_families try: access_sets = map[attrname] except KeyError: access_sets = map[attrname] = UnionFind( description.ClassAttrFamily) return access_sets def pbc_getattr(self, pbc, s_attr): assert s_attr.is_constant() attr = s_attr.const descs = list(pbc.descriptions) first = descs[0] if len(descs) == 1: return first.s_read_attribute(attr) change = first.mergeattrfamilies(descs[1:], attr) attrfamily = first.getattrfamily(attr) position = self.position_key attrfamily.read_locations[position] = True actuals = [] for desc in descs: actuals.append(desc.s_read_attribute(attr)) s_result = unionof(*actuals) s_oldvalue = attrfamily.get_s_value(attr) attrfamily.set_s_value(attr, unionof(s_result, s_oldvalue)) if change: for position in attrfamily.read_locations: self.annotator.reflowfromposition(position) if isinstance(s_result, SomeImpossibleValue): for desc in descs: try: attrs = desc.read_attribute('_attrs_') except AttributeError: continue if isinstance(attrs, Constant): attrs = attrs.value if attr in attrs: raise HarmlesslyBlocked("getattr on enforced attr") return s_result def pbc_call(self, pbc, args, emulated=None): """Analyse a call to a SomePBC() with the given args (list of annotations). """ descs = list(pbc.descriptions) first = descs[0] first.mergecallfamilies(*descs[1:]) if emulated is None: whence = self.position_key # fish the existing annotation for the result variable, # needed by some kinds of specialization. fn, block, i = self.position_key op = block.operations[i] s_previous_result = self.annotator.annotation(op.result) if s_previous_result is None: s_previous_result = s_ImpossibleValue else: if emulated is True: whence = None else: whence = emulated # callback case op = None s_previous_result = s_ImpossibleValue def schedule(graph, inputcells): return self.annotator.recursivecall(graph, whence, inputcells) results = [] for desc in descs: results.append(desc.pycall(schedule, args, s_previous_result, op)) s_result = unionof(*results) return s_result def emulate_pbc_call(self, unique_key, pbc, args_s, replace=[], callback=None): """For annotating some operation that causes indirectly a Python function to be called. The annotation of the function is "pbc", and the list of annotations of arguments is "args_s". Can be called in various contexts, but from compute_annotation() or compute_result_annotation() of an ExtRegistryEntry, call it with both "unique_key" and "callback" set to "self.bookkeeper.position_key". If there are several calls from the same operation, they need their own "unique_key", like (position_key, "first") and (position_key, "second"). In general, "unique_key" should somehow uniquely identify where the call is in the source code, and "callback" can be either a position_key to reflow from when we see more general results, or a real callback function that will be called with arguments # "(annotator, called_graph)" whenever the result is generalized. "replace" can be set to a list of old unique_key values to forget now, because the given "unique_key" replaces them. """ emulate_enter = not hasattr(self, 'position_key') if emulate_enter: self.enter(None) try: emulated_pbc_calls = self.emulated_pbc_calls prev = [unique_key] prev.extend(replace) for other_key in prev: if other_key in emulated_pbc_calls: del emulated_pbc_calls[other_key] emulated_pbc_calls[unique_key] = pbc, args_s args = simple_args(args_s) if callback is None: emulated = True else: emulated = callback return self.pbc_call(pbc, args, emulated=emulated) finally: if emulate_enter: self.leave() def _find_current_op(self, opname=None, arity=None, pos=None, s_type=None): """ Find operation that is currently being annotated. Do some sanity checks to see whether the correct op was found.""" # XXX XXX HACK HACK HACK fn, block, i = self.position_key op = block.operations[i] if opname is not None: assert op.opname == opname if arity is not None: assert len(op.args) == arity if pos is not None: assert self.annotator.binding(op.args[pos]) == s_type return op def whereami(self): return self.annotator.whereami(self.position_key) def event(self, what, x): return self.annotator.policy.event(self, what, x) def warning(self, msg): return self.annotator.warning(msg)