def test_gc_pointers_inside(): from rpython.rtyper import rclass PT = lltype.Ptr(lltype.GcStruct('T')) S1 = lltype.GcStruct('S', ('x', PT), ('y', PT)) S2 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable': True}) accessor = rclass.FieldListAccessor() S3 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable_fields': accessor}) accessor.initialize(S3, {'x': IR_IMMUTABLE, 'y': IR_QUASIIMMUTABLE}) # s1 = lltype.malloc(S1) adr = llmemory.cast_ptr_to_adr(s1) lst = list(gc_pointers_inside(s1._obj, adr, mutable_only=True)) expected = [adr + llmemory.offsetof(S1, 'x'), adr + llmemory.offsetof(S1, 'y')] assert lst == expected or lst == expected[::-1] # s2 = lltype.malloc(S2) adr = llmemory.cast_ptr_to_adr(s2) lst = list(gc_pointers_inside(s2._obj, adr, mutable_only=True)) assert lst == [] # s3 = lltype.malloc(S3) adr = llmemory.cast_ptr_to_adr(s3) lst = list(gc_pointers_inside(s3._obj, adr, mutable_only=True)) assert lst == [adr + llmemory.offsetof(S3, 'y')]
def gc_pointers_inside(v, adr, mutable_only=False): t = lltype.typeOf(v) if isinstance(t, lltype.Struct): skip = () if mutable_only: if t._hints.get('immutable'): return if 'immutable_fields' in t._hints: skip = t._hints['immutable_fields'].all_immutable_fields() for n, t2 in t._flds.iteritems(): if isinstance(t2, lltype.Ptr) and t2.TO._gckind == 'gc': if n not in skip: yield adr + llmemory.offsetof(t, n) elif isinstance(t2, (lltype.Array, lltype.Struct)): for a in gc_pointers_inside(getattr(v, n), adr + llmemory.offsetof(t, n), mutable_only): yield a elif isinstance(t, lltype.Array): if mutable_only and t._hints.get('immutable'): return if isinstance(t.OF, lltype.Ptr) and t.OF.TO._gckind == 'gc': for i in range(len(v.items)): yield adr + llmemory.itemoffsetof(t, i) elif isinstance(t.OF, lltype.Struct): for i in range(len(v.items)): for a in gc_pointers_inside(v.items[i], adr + llmemory.itemoffsetof(t, i), mutable_only): yield a
def test_offsetof(): STRUCT = lltype.GcStruct("s", ("x", lltype.Signed), ("y", lltype.Signed)) offsetx = llmemory.offsetof(STRUCT, 'x') offsety = llmemory.offsetof(STRUCT, 'y') def f(): s = lltype.malloc(STRUCT) s.x = 1 adr = llmemory.cast_ptr_to_adr(s) result = (adr + offsetx).signed[0] (adr + offsety).signed[0] = 2 return result * 10 + s.y fn = compile(f, []) res = fn() assert res == 12
def test_look_inside_object(): # this code is also used in translation tests below myarenasize = 50 a = arena_malloc(myarenasize, False) b = a + round_up_for_allocation(llmemory.sizeof(lltype.Char)) arena_reserve(b, precomputed_size) (b + llmemory.offsetof(SX, 'x')).signed[0] = 123 assert llmemory.cast_adr_to_ptr(b, SPTR).x == 123 llmemory.cast_adr_to_ptr(b, SPTR).x += 1 assert (b + llmemory.offsetof(SX, 'x')).signed[0] == 124 arena_reset(a, myarenasize, True) arena_reserve(b, round_up_for_allocation(llmemory.sizeof(SX))) assert llmemory.cast_adr_to_ptr(b, SPTR).x == 0 arena_free(a) return 42
def offsets_to_gc_pointers(TYPE): offsets = [] if isinstance(TYPE, lltype.Struct): for name in TYPE._names: FIELD = getattr(TYPE, name) if isinstance(FIELD, lltype.Array): continue # skip inlined array baseofs = llmemory.offsetof(TYPE, name) suboffsets = offsets_to_gc_pointers(FIELD) for s in suboffsets: try: knownzero = s == 0 except TypeError: knownzero = False if knownzero: offsets.append(baseofs) else: offsets.append(baseofs + s) # sanity check #ex = lltype.Ptr(TYPE)._example() #adr = llmemory.cast_ptr_to_adr(ex) #for off in offsets: # (adr + off) elif isinstance(TYPE, lltype.Ptr) and TYPE.TO._gckind == 'gc': offsets.append(0) return offsets
def get_field_token(STRUCT, fieldname, translate_support_code): if translate_support_code: return (llmemory.offsetof(STRUCT, fieldname), get_size(getattr(STRUCT, fieldname), True)) cstruct = ll2ctypes.get_ctypes_type(STRUCT) cfield = getattr(cstruct, fieldname) return (cfield.offset, cfield.size)
def test_address_order(): a = arena_malloc(24, False) assert eq(a, a) assert lt(a, a+1) assert lt(a+5, a+20) b = arena_malloc(24, False) if a > b: a, b = b, a assert lt(a, b) assert lt(a+19, b) assert lt(a, b+19) c = b + round_up_for_allocation(llmemory.sizeof(lltype.Char)) arena_reserve(c, precomputed_size) assert lt(b, c) assert lt(a, c) assert lt(llmemory.NULL, c) d = c + llmemory.offsetof(SX, 'x') assert lt(c, d) assert lt(b, d) assert lt(a, d) assert lt(llmemory.NULL, d) e = c + precomputed_size assert lt(d, e) assert lt(c, e) assert lt(b, e) assert lt(a, e) assert lt(llmemory.NULL, e)
def ll_string2list(RESLIST, src): length = len(src.chars) lst = RESLIST.ll_newlist(length) dst = lst.ll_items() SRC = typeOf(src).TO # STR or UNICODE DST = typeOf(dst).TO # GcArray assert DST.OF is SRC.chars.OF # # If the 'split_gc_address_space' option is set, we must copy # manually, character-by-character if rgc.must_split_gc_address_space(): i = 0 while i < length: dst[i] = src.chars[i] i += 1 return lst # # from here, no GC operations can happen asrc = llmemory.cast_ptr_to_adr(src) + (llmemory.offsetof( SRC, 'chars') + llmemory.itemoffsetof(SRC.chars, 0)) adst = llmemory.cast_ptr_to_adr(dst) + llmemory.itemoffsetof(DST, 0) llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(DST.OF) * length) # end of "no GC" section keepalive_until_here(src) keepalive_until_here(dst) return lst
def define_custom_trace(cls): # S = lltype.GcStruct('S', ('x', llmemory.Address)) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(gc, obj, callback, arg): gc._trace_callback(callback, arg, obj + offset_of_x) lambda_customtrace = lambda: customtrace # def setup(): rgc.register_custom_trace_hook(S, lambda_customtrace) tx = lltype.malloc(T) tx.z = 4243 s1 = lltype.malloc(S) s1.x = llmemory.cast_ptr_to_adr(tx) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(s1.x, lltype.Ptr(T)).z return f
def f(): a = llstr("xyz") b = (llmemory.cast_ptr_to_adr(a) + llmemory.offsetof(STR, 'chars') + llmemory.itemoffsetof(STR.chars, 0)) buf = rffi.cast(rffi.VOIDP, b) return buf[2]
def get_array_token(T, translate_support_code): # T can be an array or a var-sized structure if translate_support_code: basesize = llmemory.sizeof(T, 0) # this includes +1 for STR if isinstance(T, lltype.Struct): SUBARRAY = getattr(T, T._arrayfld) itemsize = llmemory.sizeof(SUBARRAY.OF) ofs_length = (llmemory.offsetof(T, T._arrayfld) + llmemory.ArrayLengthOffset(SUBARRAY)) else: if T._hints.get('nolength', None): ofs_length = -1 else: ofs_length = llmemory.ArrayLengthOffset(T) itemsize = llmemory.sizeof(T.OF) else: if isinstance(T, lltype.Struct): assert T._arrayfld is not None, "%r is not variable-sized" % (T,) cstruct = ll2ctypes.get_ctypes_type(T) cfield = getattr(cstruct, T._arrayfld) before_array_part = cfield.offset T = getattr(T, T._arrayfld) else: before_array_part = 0 carray = ll2ctypes.get_ctypes_type(T) if T._hints.get('nolength', None): ofs_length = -1 else: assert carray.length.size == WORD ofs_length = before_array_part + carray.length.offset basesize = before_array_part + carray.items.offset basesize += T._hints.get('extra_item_after_alloc', 0) # +1 for STR carrayitem = ll2ctypes.get_ctypes_type(T.OF) itemsize = ctypes.sizeof(carrayitem) return basesize, itemsize, ofs_length
def ll_shrink_array(p, smallerlength): from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rlib.objectmodel import keepalive_until_here if llop.shrink_array(lltype.Bool, p, smallerlength): return p # done by the GC # XXX we assume for now that the type of p is GcStruct containing a # variable array, with no further pointers anywhere, and exactly one # field in the fixed part -- like STR and UNICODE. TP = lltype.typeOf(p).TO newp = lltype.malloc(TP, smallerlength) assert len(TP._names) == 2 field = getattr(p, TP._names[0]) setattr(newp, TP._names[0], field) ARRAY = getattr(TP, TP._arrayfld) offset = (llmemory.offsetof(TP, TP._arrayfld) + llmemory.itemoffsetof(ARRAY, 0)) source_addr = llmemory.cast_ptr_to_adr(p) + offset dest_addr = llmemory.cast_ptr_to_adr(newp) + offset llmemory.raw_memcopy(source_addr, dest_addr, llmemory.sizeof(ARRAY.OF) * smallerlength) keepalive_until_here(p) keepalive_until_here(newp) return newp
def test_custom_trace(self): from rpython.rtyper.lltypesystem import llmemory from rpython.rtyper.lltypesystem.llarena import ArenaError # S = lltype.GcStruct('S', ('x', llmemory.Address), ('y', llmemory.Address)) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(gc, obj, callback, arg): gc._trace_callback(callback, arg, obj + offset_of_x) lambda_customtrace = lambda: customtrace # for attrname in ['x', 'y']: def setup(): rgc.register_custom_trace_hook(S, lambda_customtrace) s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 42 ty = lltype.malloc(T) s1.x = llmemory.cast_ptr_to_adr(tx) s1.y = llmemory.cast_ptr_to_adr(ty) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(getattr(s1, attrname), lltype.Ptr(T)) if attrname == 'x': res = self.interpret(f, []) assert res.z == 42 else: py.test.raises((RuntimeError, ArenaError), self.interpret, f, [])
def define_custom_trace(cls): from rpython.rtyper.annlowlevel import llhelper # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s = lltype.nullptr(S) for i in range(10000): t = lltype.malloc(S) t.x = llmemory.cast_ptr_to_adr(s) s = t return s def measure_length(s): res = 0 while s: res += 1 s = llmemory.cast_adr_to_ptr(s.x, lltype.Ptr(S)) return res def f(n): s1 = setup() llop.gc__collect(lltype.Void) return measure_length(s1) return f
def ll_shrink_array(p, smallerlength): from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rlib.objectmodel import keepalive_until_here if llop.shrink_array(lltype.Bool, p, smallerlength): return p # done by the GC # XXX we assume for now that the type of p is GcStruct containing a # variable array, with no further pointers anywhere, and exactly one # field in the fixed part -- like STR and UNICODE. TP = lltype.typeOf(p).TO newp = lltype.malloc(TP, smallerlength) assert len(TP._names) == 2 field = getattr(p, TP._names[0]) setattr(newp, TP._names[0], field) ARRAY = getattr(TP, TP._arrayfld) offset = llmemory.offsetof(TP, TP._arrayfld) + llmemory.itemoffsetof(ARRAY, 0) source_addr = llmemory.cast_ptr_to_adr(p) + offset dest_addr = llmemory.cast_ptr_to_adr(newp) + offset llmemory.raw_memcopy(source_addr, dest_addr, llmemory.sizeof(ARRAY.OF) * smallerlength) keepalive_until_here(p) keepalive_until_here(newp) return newp
def define_custom_trace(cls): from rpython.rtyper.annlowlevel import llhelper from rpython.rtyper.lltypesystem import llmemory # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 4243 s1.x = llmemory.cast_ptr_to_adr(tx) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(s1.x, lltype.Ptr(T)).z return f
def get_memory_pressure_ofs(TYPE): T = TYPE while True: if 'special_memory_pressure' in T._flds: return llmemory.offsetof(T, 'special_memory_pressure') if 'super' not in T._flds: assert False, "get_ and has_memory_pressure disagree" T = T._flds['super']
def str_storage_getitem(TP, s, byte_offset): # WARNING: the 'byte_offset' is, as its name says, measured in bytes; # however, it should be aligned for TP, otherwise on some platforms this # code will crash! lls = llstr(s) base_ofs = (llmemory.offsetof(STR, 'chars') + llmemory.itemoffsetof(STR.chars, 0)) scale_factor = llmemory.sizeof(lltype.Char) return llop.gc_load_indexed(TP, lls, byte_offset, scale_factor, base_ofs)
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) if len(offsets) > 0: infobits |= T_HAS_GCPTR # fptrs = builder.special_funcptr_for_type(TYPE) if fptrs: if "destructor" in fptrs: info.customfunc = fptrs["destructor"] if "old_style_finalizer" in fptrs: info.customfunc = fptrs["old_style_finalizer"] infobits |= T_HAS_OLDSTYLE_FINALIZER # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE | T_HAS_GCPTR varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def getinneraddr(self, obj, *offsets): TYPE = lltype.typeOf(obj).TO addr = llmemory.cast_ptr_to_adr(obj) for o in offsets: if isinstance(o, str): addr += llmemory.offsetof(TYPE, o) TYPE = getattr(TYPE, o) else: addr += llmemory.itemoffsetof(TYPE, o) TYPE = TYPE.OF return addr, TYPE
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) if len(offsets) > 0: infobits |= T_HAS_GCPTR # fptrs = builder.special_funcptr_for_type(TYPE) if fptrs: if "finalizer" in fptrs: info.finalizer = fptrs["finalizer"] if "light_finalizer" in fptrs: info.finalizer = fptrs["light_finalizer"] infobits |= T_HAS_LIGHTWEIGHT_FINALIZER # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE | T_HAS_GCPTR varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def offsetof(STRUCT, fieldname): """Similar to llmemory.offsetof() but tries hard to return a integer instead of a symbolic value. """ # the hint is present in structures probed by rffi_platform. fieldoffsets = STRUCT._hints.get('fieldoffsets') if fieldoffsets is not None: # a numeric result when known for index, name in enumerate(STRUCT._names): if name == fieldname: return fieldoffsets[index] # a symbolic result as a fallback return llmemory.offsetof(STRUCT, fieldname)
def ll_write_final_null_char(s): """'s' is a low-level STR; writes a terminating NULL character after the other characters in 's'. Warning, this only works because of the 'extra_item_after_alloc' hack inside the definition of STR. """ from rpython.rtyper.lltypesystem import rffi PSTR = lltype.typeOf(s) assert has_final_null_char(PSTR) == 1 n = llmemory.offsetof(PSTR.TO, 'chars') n += llmemory.itemoffsetof(PSTR.TO.chars, 0) n = llmemory.raw_malloc_usage(n) n += len(s.chars) # no GC operation from here! ptr = rffi.cast(rffi.CCHARP, s) ptr[n] = '\x00'
def ll_string2list(RESLIST, src): length = len(src.chars) lst = RESLIST.ll_newlist(length) dst = lst.ll_items() SRC = typeOf(src).TO # STR or UNICODE DST = typeOf(dst).TO # GcArray assert DST.OF is SRC.chars.OF # from here, no GC operations can happen asrc = llmemory.cast_ptr_to_adr(src) + (llmemory.offsetof( SRC, 'chars') + llmemory.itemoffsetof(SRC.chars, 0)) adst = llmemory.cast_ptr_to_adr(dst) + llmemory.itemoffsetof(DST, 0) llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(DST.OF) * length) # end of "no GC" section keepalive_until_here(src) keepalive_until_here(dst) return lst
def test_sizeof_constsize_struct(): # _not_ a GcStruct, since we want to raw_malloc it STRUCT = lltype.Struct("s", ("x", lltype.Signed), ("y", lltype.Signed)) STRUCTPTR = lltype.Ptr(STRUCT) sizeofs = llmemory.sizeof(STRUCT) offsety = llmemory.offsetof(STRUCT, 'y') def f(): adr = llmemory.raw_malloc(sizeofs) s = llmemory.cast_adr_to_ptr(adr, STRUCTPTR) s.y = 5 # does not crash result = (adr + offsety).signed[0] * 10 + int(offsety < sizeofs) llmemory.raw_free(adr) return result fn = compile(f, []) res = fn() assert res == 51
def ll_string2list(RESLIST, src): length = len(src.chars) lst = RESLIST.ll_newlist(length) dst = lst.ll_items() SRC = typeOf(src).TO # STR or UNICODE DST = typeOf(dst).TO # GcArray assert DST.OF is SRC.chars.OF # from here, no GC operations can happen asrc = llmemory.cast_ptr_to_adr(src) + ( llmemory.offsetof(SRC, 'chars') + llmemory.itemoffsetof(SRC.chars, 0)) adst = llmemory.cast_ptr_to_adr(dst) + llmemory.itemoffsetof(DST, 0) llmemory.raw_memcopy(asrc, adst, llmemory.sizeof(DST.OF) * length) # end of "no GC" section keepalive_until_here(src) keepalive_until_here(dst) return lst
def test_partial_arena_reset(): a = arena_malloc(72, False) def reserve(i): b = a + i * llmemory.raw_malloc_usage(precomputed_size) arena_reserve(b, precomputed_size) return b blist = [] plist = [] for i in range(4): b = reserve(i) (b + llmemory.offsetof(SX, 'x')).signed[0] = 100 + i blist.append(b) plist.append(llmemory.cast_adr_to_ptr(b, SPTR)) # clear blist[1] and blist[2] but not blist[0] nor blist[3] arena_reset(blist[1], llmemory.raw_malloc_usage(precomputed_size) * 2, False) py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # re-reserve object at index 1 and 2 blist[1] = reserve(1) blist[2] = reserve(2) # check via object pointers assert plist[0].x == 100 assert plist[3].x == 103 py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # but we can still cast the old ptrs to addresses, which compare equal # to the new ones we gotq assert llmemory.cast_ptr_to_adr(plist[1]) == blist[1] assert llmemory.cast_ptr_to_adr(plist[2]) == blist[2] # check via addresses assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 100 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[1] + llmemory.offsetof(SX, 'x')).signed[0]") py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]") # clear and zero-fill the area over blist[0] and blist[1] arena_reset(blist[0], llmemory.raw_malloc_usage(precomputed_size) * 2, True) # re-reserve and check it's zero blist[0] = reserve(0) blist[1] = reserve(1) assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[1] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]")
def test_custom_trace(self): from rpython.rtyper.annlowlevel import llhelper from rpython.rtyper.lltypesystem import llmemory from rpython.rtyper.lltypesystem.llarena import ArenaError # S = lltype.GcStruct('S', ('x', llmemory.Address), ('y', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # for attrname in ['x', 'y']: def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 42 ty = lltype.malloc(T) s1.x = llmemory.cast_ptr_to_adr(tx) s1.y = llmemory.cast_ptr_to_adr(ty) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(getattr(s1, attrname), lltype.Ptr(T)) if attrname == 'x': res = self.interpret(f, []) assert res.z == 42 else: py.test.raises((RuntimeError, ArenaError), self.interpret, f, [])
def test_partial_arena_reset(): a = arena_malloc(72, False) def reserve(i): b = a + i * llmemory.raw_malloc_usage(precomputed_size) arena_reserve(b, precomputed_size) return b blist = [] plist = [] for i in range(4): b = reserve(i) (b + llmemory.offsetof(SX, 'x')).signed[0] = 100 + i blist.append(b) plist.append(llmemory.cast_adr_to_ptr(b, SPTR)) # clear blist[1] and blist[2] but not blist[0] nor blist[3] arena_reset(blist[1], llmemory.raw_malloc_usage(precomputed_size)*2, False) py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # re-reserve object at index 1 and 2 blist[1] = reserve(1) blist[2] = reserve(2) # check via object pointers assert plist[0].x == 100 assert plist[3].x == 103 py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # but we can still cast the old ptrs to addresses, which compare equal # to the new ones we gotq assert llmemory.cast_ptr_to_adr(plist[1]) == blist[1] assert llmemory.cast_ptr_to_adr(plist[2]) == blist[2] # check via addresses assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 100 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[1] + llmemory.offsetof(SX, 'x')).signed[0]") py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]") # clear and zero-fill the area over blist[0] and blist[1] arena_reset(blist[0], llmemory.raw_malloc_usage(precomputed_size)*2, True) # re-reserve and check it's zero blist[0] = reserve(0) blist[1] = reserve(1) assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[1] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]")
def rpyexc_get_exc_value_addr(): return (llmemory.cast_ptr_to_adr(exc_data) + llmemory.offsetof(EXCDATA, 'exc_value'))
def _str_ofs(TP, item): return (llmemory.offsetof(TP, 'chars') + llmemory.itemoffsetof(TP.chars, 0) + llmemory.sizeof(CHAR_TP) * item)
def rpyexc_get_exception_addr(): return (llmemory.cast_ptr_to_adr(exc_data) + llmemory.offsetof(EXCDATA, 'exc_type'))
def setfield(self, obj, fieldname, fieldvalue): STRUCT = lltype.typeOf(obj).TO addr = llmemory.cast_ptr_to_adr(obj) addr += llmemory.offsetof(STRUCT, fieldname) self.setinterior(obj, addr, getattr(STRUCT, fieldname), fieldvalue)
def rtype_offsetof(hop): TYPE, field = hop.inputargs(lltype.Void, lltype.Void) hop.exception_cannot_occur() return hop.inputconst(lltype.Signed, llmemory.offsetof(TYPE.value, field.value))
def _get_gc_data_offset(): return (llmemory.offsetof(STR, 'chars') + llmemory.itemoffsetof(STR.chars, 0))
def is_subclass_of_object(TYPE): while isinstance(TYPE, lltype.GcStruct): if TYPE is rclass.OBJECT: return True _, TYPE = TYPE._first_struct() return False ########## weakrefs ########## # framework: weakref objects are small structures containing only an address WEAKREF = lltype.GcStruct("weakref", ("weakptr", llmemory.Address)) WEAKREFPTR = lltype.Ptr(WEAKREF) sizeof_weakref= llmemory.sizeof(WEAKREF) empty_weakref = lltype.malloc(WEAKREF, immortal=True) empty_weakref.weakptr = llmemory.NULL weakptr_offset = llmemory.offsetof(WEAKREF, "weakptr") def ll_weakref_deref(wref): wref = llmemory.cast_weakrefptr_to_ptr(WEAKREFPTR, wref) return wref.weakptr def convert_weakref_to(targetptr): # Prebuilt weakrefs don't really need to be weak at all, # but we need to emulate the structure expected by ll_weakref_deref(). if not targetptr: return empty_weakref else: link = lltype.malloc(WEAKREF, immortal=True) link.weakptr = llmemory.cast_ptr_to_adr(targetptr) return link
def str_gc_load(TYPE, buf, offset): base_ofs = (llmemory.offsetof(STR, 'chars') + llmemory.itemoffsetof(STR.chars, 0)) scale_factor = llmemory.sizeof(lltype.Char) lls = llstr(buf) return llop.gc_load_indexed(TYPE, lls, offset, scale_factor, base_ofs)
return ArrayBuffer(self, readonly) def astype(self, space, dtype): strides, backstrides = calc_strides(self.get_shape(), dtype, self.order) impl = ConcreteArray(self.get_shape(), dtype, self.order, strides, backstrides) loop.setslice(space, impl.get_shape(), impl, self) return impl OBJECTSTORE = lltype.GcStruct('ObjectStore', ('length', lltype.Signed), ('step', lltype.Signed), ('storage', llmemory.Address), rtti=True) offset_of_storage = llmemory.offsetof(OBJECTSTORE, 'storage') offset_of_length = llmemory.offsetof(OBJECTSTORE, 'length') offset_of_step = llmemory.offsetof(OBJECTSTORE, 'step') V_OBJECTSTORE = lltype.nullptr(OBJECTSTORE) def customtrace(gc, obj, callback, arg): #debug_print('in customtrace w/obj', obj) length = (obj + offset_of_length).signed[0] step = (obj + offset_of_step).signed[0] storage = (obj + offset_of_storage).address[0] #debug_print('tracing', length, 'objects in ndarray.storage') i = 0 while i < length: gc._trace_callback(callback, arg, storage)
while isinstance(TYPE, lltype.GcStruct): if TYPE is rclass.OBJECT: return True _, TYPE = TYPE._first_struct() return False ########## weakrefs ########## # framework: weakref objects are small structures containing only an address WEAKREF = lltype.GcStruct("weakref", ("weakptr", llmemory.Address)) WEAKREFPTR = lltype.Ptr(WEAKREF) sizeof_weakref = llmemory.sizeof(WEAKREF) empty_weakref = lltype.malloc(WEAKREF, immortal=True) empty_weakref.weakptr = llmemory.NULL weakptr_offset = llmemory.offsetof(WEAKREF, "weakptr") def ll_weakref_deref(wref): wref = llmemory.cast_weakrefptr_to_ptr(WEAKREFPTR, wref) return wref.weakptr def convert_weakref_to(targetptr): # Prebuilt weakrefs don't really need to be weak at all, # but we need to emulate the structure expected by ll_weakref_deref(). if not targetptr: return empty_weakref else: link = lltype.malloc(WEAKREF, immortal=True) link.weakptr = llmemory.cast_ptr_to_adr(targetptr)
def getofs(name): return llmemory.offsetof(JITFRAME, name)