def initialize_prebuilt_data(self, value, classdef, result): # then add instance attributes from this level classrepr = getclassrepr(self.rtyper, self.classdef) for mangled, (oot, default) in self.lowleveltype._allfields().items(): if oot is ootype.Void: llattrvalue = None elif mangled == 'meta': llattrvalue = classrepr.get_meta_instance() else: name = unmangle(mangled, self.rtyper.getconfig()) try: attrvalue = getattr(value, name) except AttributeError: attrvalue = self.classdef.classdesc.read_attribute( name, None) if attrvalue is None: warning("prebuilt instance %r has no attribute %r" % (value, name)) continue llattrvalue = self.allfields[ mangled].convert_desc_or_const(attrvalue) else: llattrvalue = self.allfields[mangled].convert_const( attrvalue) setattr(result, mangled, llattrvalue)
def initialize_prebuilt_data(self, value, classdef, result): if self.classdef is not None: # recursively build the parent part of the instance self.rbase.initialize_prebuilt_data(value, classdef, result.super) # then add instance attributes from this level for name, (mangled_name, r) in self.fields.items(): if r.lowleveltype is Void: llattrvalue = None else: try: attrvalue = getattr(value, name) except AttributeError: attrvalue = self.classdef.classdesc.read_attribute(name, None) if attrvalue is None: # Ellipsis from get_reusable_prebuilt_instance() if value is not Ellipsis: warning("prebuilt instance %r has no " "attribute %r" % (value, name)) llattrvalue = r.lowleveltype._defl() else: llattrvalue = r.convert_desc_or_const(attrvalue) else: llattrvalue = r.convert_const(attrvalue) setattr(result, mangled_name, llattrvalue) else: # OBJECT part rclass = getclassrepr(self.rtyper, classdef) result.typeptr = rclass.getvtable()
def initialize_prebuilt_data(self, value, classdef, result): if self.classdef is not None: # recursively build the parent part of the instance self.rbase.initialize_prebuilt_data(value, classdef, result.super) # then add instance attributes from this level for name, (mangled_name, r) in self.fields.items(): if r.lowleveltype is Void: llattrvalue = None else: try: attrvalue = getattr(value, name) except AttributeError: attrvalue = self.classdef.classdesc.read_attribute( name, None) if attrvalue is None: # Ellipsis from get_reusable_prebuilt_instance() if value is not Ellipsis: warning("prebuilt instance %r has no " "attribute %r" % (value, name)) llattrvalue = r.lowleveltype._defl() else: llattrvalue = r.convert_desc_or_const(attrvalue) else: llattrvalue = r.convert_const(attrvalue) setattr(result, mangled_name, llattrvalue) else: # OBJECT part rclass = getclassrepr(self.rtyper, classdef) result.typeptr = rclass.getvtable()
def convert_desc(self, frozendesc): if self.access_set is not None and frozendesc not in self.access_set.descs: raise TyperError("not found in PBC access set: %r" % (frozendesc,)) try: return self.pbc_cache[frozendesc] except KeyError: self.setup() result = self.create_instance() self.pbc_cache[frozendesc] = result for attr, (mangled_name, r_value) in self.fieldmap.items(): if r_value.lowleveltype is Void: continue try: thisattrvalue = frozendesc.attrcache[attr] except KeyError: if frozendesc.warn_missing_attribute(attr): warning("Desc %r has no attribute %r" % (frozendesc, attr)) continue llvalue = r_value.convert_const(thisattrvalue) setattr(result, mangled_name, llvalue) return result
def initialize_prebuilt_data(self, value, classdef, result): # then add instance attributes from this level classrepr = getclassrepr(self.rtyper, self.classdef) for mangled, (oot, default) in self.lowleveltype._allfields().items(): if oot is ootype.Void: llattrvalue = None elif mangled == 'meta': llattrvalue = classrepr.get_meta_instance() else: name = unmangle(mangled, self.rtyper.getconfig()) try: attrvalue = getattr(value, name) except AttributeError: attrvalue = self.classdef.classdesc.read_attribute(name, None) if attrvalue is None: warning("prebuilt instance %r has no attribute %r" % ( value, name)) continue llattrvalue = self.allfields[mangled].convert_desc_or_const(attrvalue) else: llattrvalue = self.allfields[mangled].convert_const(attrvalue) setattr(result, mangled, llattrvalue)
def convert_desc(self, frozendesc): if (self.access_set is not None and frozendesc not in self.access_set.descs): raise TyperError("not found in PBC access set: %r" % (frozendesc,)) try: return self.pbc_cache[frozendesc] except KeyError: self.setup() result = self.create_instance() self.pbc_cache[frozendesc] = result for attr, (mangled_name, r_value) in self.fieldmap.items(): if r_value.lowleveltype is Void: continue try: thisattrvalue = frozendesc.attrcache[attr] except KeyError: if frozendesc.warn_missing_attribute(attr): warning("Desc %r has no attribute %r" % (frozendesc, attr)) continue llvalue = r_value.convert_const(thisattrvalue) setattr(result, mangled_name, llvalue) return result
def cast_object_to_ptr(PTR, object): """NOT_RPYTHON: hack. The object may be disguised as a PTR now. Limited to casting a given object to a single type. """ if hasattr(object, '_freeze_'): warning("Trying to cast a frozen object to pointer") if isinstance(PTR, lltype.Ptr): TO = PTR.TO else: TO = PTR if not hasattr(object, '_carry_around_for_tests'): if object is None: return lltype.nullptr(PTR.TO) assert not hasattr(object, '_TYPE') object._carry_around_for_tests = True object._TYPE = TO else: assert object._TYPE == TO # if isinstance(PTR, lltype.Ptr): return lltype._ptr(PTR, object, True) else: raise NotImplementedError("cast_object_to_ptr(%r, ...)" % PTR)
def _setup_repr(self): if 'key_repr' not in self.__dict__: key_repr = self._key_repr_computer() self.external_key_repr, self.key_repr = self.pickkeyrepr(key_repr) if 'value_repr' not in self.__dict__: self.external_value_repr, self.value_repr = self.pickrepr(self._value_repr_computer()) if isinstance(self.DICT, lltype.GcForwardReference): self.DICTKEY = self.key_repr.lowleveltype self.DICTVALUE = self.value_repr.lowleveltype # compute the shape of the DICTENTRY structure entryfields = [] entrymeths = { 'allocate': lltype.typeMethod(_ll_malloc_entries), 'delete': _ll_free_entries, 'must_clear_key': (isinstance(self.DICTKEY, lltype.Ptr) and self.DICTKEY._needsgc()), 'must_clear_value': (isinstance(self.DICTVALUE, lltype.Ptr) and self.DICTVALUE._needsgc()), } # * the key entryfields.append(("key", self.DICTKEY)) # * if NULL is not a valid ll value for the key or the value # field of the entry, it can be used as a marker for # never-used entries. Otherwise, we need an explicit flag. s_key = self.dictkey.s_value s_value = self.dictvalue.s_value nullkeymarker = not self.key_repr.can_ll_be_null(s_key) nullvaluemarker = not self.value_repr.can_ll_be_null(s_value) if self.force_non_null: if not nullkeymarker: rmodel.warning("%s can be null, but forcing non-null in dict key" % s_key) nullkeymarker = True if not nullvaluemarker: rmodel.warning("%s can be null, but forcing non-null in dict value" % s_value) nullvaluemarker = True dummykeyobj = self.key_repr.get_ll_dummyval_obj(self.rtyper, s_key) dummyvalueobj = self.value_repr.get_ll_dummyval_obj(self.rtyper, s_value) # * the state of the entry - trying to encode it as dummy objects if nullkeymarker and dummykeyobj: # all the state can be encoded in the key entrymeths['everused'] = ll_everused_from_key entrymeths['dummy_obj'] = dummykeyobj entrymeths['valid'] = ll_valid_from_key entrymeths['mark_deleted'] = ll_mark_deleted_in_key # the key is overwritten by 'dummy' when the entry is deleted entrymeths['must_clear_key'] = False elif nullvaluemarker and dummyvalueobj: # all the state can be encoded in the value entrymeths['everused'] = ll_everused_from_value entrymeths['dummy_obj'] = dummyvalueobj entrymeths['valid'] = ll_valid_from_value entrymeths['mark_deleted'] = ll_mark_deleted_in_value # value is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_value'] = False else: # we need a flag to know if the entry was ever used # (we cannot use a NULL as a marker for this, because # the key and value will be reset to NULL to clear their # reference) entryfields.append(("f_everused", lltype.Bool)) entrymeths['everused'] = ll_everused_from_flag # can we still rely on a dummy obj to mark deleted entries? if dummykeyobj: entrymeths['dummy_obj'] = dummykeyobj entrymeths['valid'] = ll_valid_from_key entrymeths['mark_deleted'] = ll_mark_deleted_in_key # key is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_key'] = False elif dummyvalueobj: entrymeths['dummy_obj'] = dummyvalueobj entrymeths['valid'] = ll_valid_from_value entrymeths['mark_deleted'] = ll_mark_deleted_in_value # value is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_value'] = False else: entryfields.append(("f_valid", lltype.Bool)) entrymeths['valid'] = ll_valid_from_flag entrymeths['mark_deleted'] = ll_mark_deleted_in_flag # * the value entryfields.append(("value", self.DICTVALUE)) # * the hash, if needed if self.custom_eq_hash: fasthashfn = None else: fasthashfn = self.key_repr.get_ll_fasthash_function() if getattr(self.key_repr.get_ll_eq_function(), 'no_direct_compare', False): entrymeths['no_direct_compare'] = True if fasthashfn is None: entryfields.append(("f_hash", lltype.Signed)) entrymeths['hash'] = ll_hash_from_cache else: entrymeths['hash'] = ll_hash_recomputed entrymeths['fasthashfn'] = fasthashfn # Build the lltype data structures self.DICTENTRY = lltype.Struct("dictentry", *entryfields) self.DICTENTRYARRAY = lltype.GcArray(self.DICTENTRY, adtmeths=entrymeths) fields = [ ("num_items", lltype.Signed), ("resize_counter", lltype.Signed), ("entries", lltype.Ptr(self.DICTENTRYARRAY)) ] if self.custom_eq_hash: self.r_rdict_eqfn, self.r_rdict_hashfn = self._custom_eq_hash_repr() fields.extend([ ("fnkeyeq", self.r_rdict_eqfn.lowleveltype), ("fnkeyhash", self.r_rdict_hashfn.lowleveltype) ]) adtmeths = { 'keyhash': ll_keyhash_custom, 'keyeq': ll_keyeq_custom, 'r_rdict_eqfn': self.r_rdict_eqfn, 'r_rdict_hashfn': self.r_rdict_hashfn, 'paranoia': True, } else: # figure out which functions must be used to hash and compare ll_keyhash = self.key_repr.get_ll_hash_function() ll_keyeq = self.key_repr.get_ll_eq_function() # can be None ll_keyhash = lltype.staticAdtMethod(ll_keyhash) if ll_keyeq is not None: ll_keyeq = lltype.staticAdtMethod(ll_keyeq) adtmeths = { 'keyhash': ll_keyhash, 'keyeq': ll_keyeq, 'paranoia': False, } adtmeths['KEY'] = self.DICTKEY adtmeths['VALUE'] = self.DICTVALUE adtmeths['allocate'] = lltype.typeMethod(_ll_malloc_dict) self.DICT.become(lltype.GcStruct("dicttable", adtmeths=adtmeths, *fields))
def _setup_repr(self): if 'key_repr' not in self.__dict__: key_repr = self._key_repr_computer() self.external_key_repr, self.key_repr = self.pickkeyrepr(key_repr) if 'value_repr' not in self.__dict__: self.external_value_repr, self.value_repr = self.pickrepr(self._value_repr_computer()) if isinstance(self.DICT, lltype.GcForwardReference): self.DICTKEY = self.key_repr.lowleveltype self.DICTVALUE = self.value_repr.lowleveltype # compute the shape of the DICTENTRY structure entryfields = [] entrymeths = { 'allocate': lltype.typeMethod(_ll_malloc_entries), 'delete': _ll_free_entries, 'must_clear_key': (isinstance(self.DICTKEY, lltype.Ptr) and self.DICTKEY._needsgc()), 'must_clear_value': (isinstance(self.DICTVALUE, lltype.Ptr) and self.DICTVALUE._needsgc()), } # * the key entryfields.append(("key", self.DICTKEY)) # * if NULL is not a valid ll value for the key or the value # field of the entry, it can be used as a marker for # never-used entries. Otherwise, we need an explicit flag. s_key = self.dictkey.s_value s_value = self.dictvalue.s_value nullkeymarker = not self.key_repr.can_ll_be_null(s_key) nullvaluemarker = not self.value_repr.can_ll_be_null(s_value) if self.force_non_null: if not nullkeymarker: rmodel.warning("%s can be null, but forcing non-null in dict key" % s_key) nullkeymarker = True if not nullvaluemarker: rmodel.warning("%s can be null, but forcing non-null in dict value" % s_value) nullvaluemarker = True dummykeyobj = self.key_repr.get_ll_dummyval_obj(self.rtyper, s_key) dummyvalueobj = self.value_repr.get_ll_dummyval_obj(self.rtyper, s_value) # * the state of the entry - trying to encode it as dummy objects if nullkeymarker and dummykeyobj: # all the state can be encoded in the key entrymeths['everused'] = ll_everused_from_key entrymeths['dummy_obj'] = dummykeyobj entrymeths['valid'] = ll_valid_from_key entrymeths['mark_deleted'] = ll_mark_deleted_in_key # the key is overwritten by 'dummy' when the entry is deleted entrymeths['must_clear_key'] = False elif nullvaluemarker and dummyvalueobj: # all the state can be encoded in the value entrymeths['everused'] = ll_everused_from_value entrymeths['dummy_obj'] = dummyvalueobj entrymeths['valid'] = ll_valid_from_value entrymeths['mark_deleted'] = ll_mark_deleted_in_value # value is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_value'] = False else: # we need a flag to know if the entry was ever used # (we cannot use a NULL as a marker for this, because # the key and value will be reset to NULL to clear their # reference) entryfields.append(("f_everused", lltype.Bool)) entrymeths['everused'] = ll_everused_from_flag # can we still rely on a dummy obj to mark deleted entries? if dummykeyobj: entrymeths['dummy_obj'] = dummykeyobj entrymeths['valid'] = ll_valid_from_key entrymeths['mark_deleted'] = ll_mark_deleted_in_key # key is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_key'] = False elif dummyvalueobj: entrymeths['dummy_obj'] = dummyvalueobj entrymeths['valid'] = ll_valid_from_value entrymeths['mark_deleted'] = ll_mark_deleted_in_value # value is overwritten by 'dummy' when entry is deleted entrymeths['must_clear_value'] = False else: entryfields.append(("f_valid", lltype.Bool)) entrymeths['valid'] = ll_valid_from_flag entrymeths['mark_deleted'] = ll_mark_deleted_in_flag # * the value entryfields.append(("value", self.DICTVALUE)) # * the hash, if needed if self.custom_eq_hash: fasthashfn = None else: fasthashfn = self.key_repr.get_ll_fasthash_function() if fasthashfn is None: entryfields.append(("f_hash", lltype.Signed)) entrymeths['hash'] = ll_hash_from_cache else: entrymeths['hash'] = ll_hash_recomputed entrymeths['fasthashfn'] = fasthashfn # Build the lltype data structures self.DICTENTRY = lltype.Struct("dictentry", *entryfields) self.DICTENTRYARRAY = lltype.GcArray(self.DICTENTRY, adtmeths=entrymeths) fields = [ ("num_items", lltype.Signed), ("resize_counter", lltype.Signed), ("entries", lltype.Ptr(self.DICTENTRYARRAY)) ] if self.custom_eq_hash: self.r_rdict_eqfn, self.r_rdict_hashfn = self._custom_eq_hash_repr() fields.extend([ ("fnkeyeq", self.r_rdict_eqfn.lowleveltype), ("fnkeyhash", self.r_rdict_hashfn.lowleveltype) ]) adtmeths = { 'keyhash': ll_keyhash_custom, 'keyeq': ll_keyeq_custom, 'r_rdict_eqfn': self.r_rdict_eqfn, 'r_rdict_hashfn': self.r_rdict_hashfn, 'paranoia': True, } else: # figure out which functions must be used to hash and compare ll_keyhash = self.key_repr.get_ll_hash_function() ll_keyeq = self.key_repr.get_ll_eq_function() # can be None ll_keyhash = lltype.staticAdtMethod(ll_keyhash) if ll_keyeq is not None: ll_keyeq = lltype.staticAdtMethod(ll_keyeq) adtmeths = { 'keyhash': ll_keyhash, 'keyeq': ll_keyeq, 'paranoia': False, } adtmeths['KEY'] = self.DICTKEY adtmeths['VALUE'] = self.DICTVALUE adtmeths['allocate'] = lltype.typeMethod(_ll_malloc_dict) self.DICT.become(lltype.GcStruct("dicttable", adtmeths=adtmeths, *fields))