def test_gc_pointers_inside(): from pypy.rpython import rclass PT = lltype.Ptr(lltype.GcStruct('T')) S1 = lltype.GcStruct('S', ('x', PT), ('y', PT)) S2 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable': True}) accessor = rclass.FieldListAccessor() S3 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable_fields': accessor}) accessor.initialize(S3, ['x']) # s1 = lltype.malloc(S1) adr = llmemory.cast_ptr_to_adr(s1) lst = list(gc_pointers_inside(s1._obj, adr, mutable_only=True)) expected = [ adr + llmemory.offsetof(S1, 'x'), adr + llmemory.offsetof(S1, 'y') ] assert lst == expected or lst == expected[::-1] # s2 = lltype.malloc(S2) adr = llmemory.cast_ptr_to_adr(s2) lst = list(gc_pointers_inside(s2._obj, adr, mutable_only=True)) assert lst == [] # s3 = lltype.malloc(S3) adr = llmemory.cast_ptr_to_adr(s3) lst = list(gc_pointers_inside(s3._obj, adr, mutable_only=True)) assert lst == [adr + llmemory.offsetof(S3, 'y')]
def gc_pointers_inside(v, adr, mutable_only=False): t = lltype.typeOf(v) if isinstance(t, lltype.Struct): if mutable_only and t._hints.get('immutable'): return for n, t2 in t._flds.iteritems(): if isinstance(t2, lltype.Ptr) and t2.TO._gckind == 'gc': yield adr + llmemory.offsetof(t, n) elif isinstance(t2, (lltype.Array, lltype.Struct)): for a in gc_pointers_inside(getattr(v, n), adr + llmemory.offsetof(t, n), mutable_only): yield a elif isinstance(t, lltype.Array): if mutable_only and t._hints.get('immutable'): return if isinstance(t.OF, lltype.Ptr) and t.OF.TO._gckind == 'gc': for i in range(len(v.items)): yield adr + llmemory.itemoffsetof(t, i) elif isinstance(t.OF, lltype.Struct): for i in range(len(v.items)): for a in gc_pointers_inside(v.items[i], adr + llmemory.itemoffsetof(t, i), mutable_only): yield a
def test_gc_pointers_inside(): from pypy.rpython import rclass PT = lltype.Ptr(lltype.GcStruct('T')) S1 = lltype.GcStruct('S', ('x', PT), ('y', PT)) S2 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable': True}) accessor = rclass.FieldListAccessor() S3 = lltype.GcStruct('S', ('x', PT), ('y', PT), hints={'immutable_fields': accessor}) accessor.initialize(S3, {'x': IR_IMMUTABLE}) # s1 = lltype.malloc(S1) adr = llmemory.cast_ptr_to_adr(s1) lst = list(gc_pointers_inside(s1._obj, adr, mutable_only=True)) expected = [adr + llmemory.offsetof(S1, 'x'), adr + llmemory.offsetof(S1, 'y')] assert lst == expected or lst == expected[::-1] # s2 = lltype.malloc(S2) adr = llmemory.cast_ptr_to_adr(s2) lst = list(gc_pointers_inside(s2._obj, adr, mutable_only=True)) assert lst == [] # s3 = lltype.malloc(S3) adr = llmemory.cast_ptr_to_adr(s3) lst = list(gc_pointers_inside(s3._obj, adr, mutable_only=True)) assert lst == [adr + llmemory.offsetof(S3, 'y')]
def gc_pointers_inside(v, adr, mutable_only=False): t = lltype.typeOf(v) if isinstance(t, lltype.Struct): skip = () if mutable_only: if t._hints.get("immutable"): return if "immutable_fields" in t._hints: skip = t._hints["immutable_fields"].all_immutable_fields() for n, t2 in t._flds.iteritems(): if isinstance(t2, lltype.Ptr) and t2.TO._gckind == "gc": if n not in skip: yield adr + llmemory.offsetof(t, n) elif isinstance(t2, (lltype.Array, lltype.Struct)): for a in gc_pointers_inside(getattr(v, n), adr + llmemory.offsetof(t, n), mutable_only): yield a elif isinstance(t, lltype.Array): if mutable_only and t._hints.get("immutable"): return if isinstance(t.OF, lltype.Ptr) and t.OF.TO._gckind == "gc": for i in range(len(v.items)): yield adr + llmemory.itemoffsetof(t, i) elif isinstance(t.OF, lltype.Struct): for i in range(len(v.items)): for a in gc_pointers_inside(v.items[i], adr + llmemory.itemoffsetof(t, i), mutable_only): yield a
def test_add_offsetofs(self): from pypy.rpython.lltypesystem.llmemory import offsetof S = lltype.GcStruct("struct", ('a', lltype.Signed), ('b', lltype.Signed)) addr = raw_malloc(100) (addr + offsetof(S, 'b')).signed[0] = 42 assert (addr + offsetof(S, 'b')).signed[0] == 42 addr.signed[5] = offsetof(S, 'b') offset = addr.signed[5] assert (addr + offset).signed[0] == 42
def offsets_to_gc_pointers(TYPE): if isinstance(TYPE, lltype.Struct): offsets = [] for name in TYPE._names: FIELD = getattr(TYPE, name) if isinstance(FIELD, lltype.Ptr) and FIELD.TO._gckind == 'gc': offsets.append(llmemory.offsetof(TYPE, name)) elif isinstance(FIELD, lltype.Struct): suboffsets = offsets_to_gc_pointers(FIELD) offsets += [s + llmemory.offsetof(TYPE, name) for s in suboffsets] return offsets return []
def test_offsetof(): STRUCT = lltype.GcStruct("s", ("x", lltype.Signed), ("y", lltype.Signed)) offsetx = llmemory.offsetof(STRUCT, 'x') offsety = llmemory.offsetof(STRUCT, 'y') def f(): s = lltype.malloc(STRUCT) s.x = 1 adr = llmemory.cast_ptr_to_adr(s) result = (adr + offsetx).signed[0] (adr + offsety).signed[0] = 2 return result * 10 + s.y fn = compile_function(f, []) res = fn() assert res == 12
def test_offsetof(): STRUCT = lltype.GcStruct("s", ("x", lltype.Signed), ("y", lltype.Signed)) offsetx = llmemory.offsetof(STRUCT, 'x') offsety = llmemory.offsetof(STRUCT, 'y') def f(): s = lltype.malloc(STRUCT) s.x = 1 adr = llmemory.cast_ptr_to_adr(s) result = (adr + offsetx).signed[0] (adr + offsety).signed[0] = 2 return result * 10 + s.y fn, t = getcompiled(f, []) res = fn() assert res == 12
def test_look_inside_object(): # this code is also used in translation tests below myarenasize = 50 a = arena_malloc(myarenasize, False) b = a + round_up_for_allocation(llmemory.sizeof(lltype.Char)) arena_reserve(b, precomputed_size) (b + llmemory.offsetof(SX, 'x')).signed[0] = 123 assert llmemory.cast_adr_to_ptr(b, SPTR).x == 123 llmemory.cast_adr_to_ptr(b, SPTR).x += 1 assert (b + llmemory.offsetof(SX, 'x')).signed[0] == 124 arena_reset(a, myarenasize, True) arena_reserve(b, round_up_for_allocation(llmemory.sizeof(SX))) assert llmemory.cast_adr_to_ptr(b, SPTR).x == 0 arena_free(a) return 42
def encode_type_shape(builder, info, TYPE): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) info.ofstoptrs = builder.offsets2table(offsets, TYPE) info.finalizer = builder.make_finalizer_funcptr_for_type(TYPE) info.weakptrofs = weakpointer_offset(TYPE) if not TYPE._is_varsize(): #info.isvarsize = False #info.gcptrinvarsize = False info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE)) info.ofstolength = -1 # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: #info.isvarsize = True info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) info.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) info.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: ARRAY = TYPE info.ofstolength = llmemory.ArrayLengthOffset(ARRAY) info.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () info.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) info.varitemsize = llmemory.sizeof(ARRAY.OF)
def test_gc_offsets(): STRUCT = lltype.GcStruct('S1', ('x', lltype.Signed), ('y', lltype.Char)) ARRAY = lltype.GcArray(lltype.Signed) s1 = llarena.round_up_for_allocation(llmemory.sizeof(STRUCT)) s2 = llmemory.offsetof(STRUCT, 'x') s3 = llmemory.ArrayLengthOffset(ARRAY) s4 = llmemory.sizeof(ARRAY, 0) s5 = llmemory.ArrayItemsOffset(ARRAY) def fn(): return (s1 * 100000000 + s2 * 1000000 + s3 * 10000 + s4 * 100 + s5) mod, f = compile_test(fn, [], gcpolicy="semispace") res = f() i1 = (res // 100000000) % 100 i2 = (res // 1000000) % 100 i3 = (res // 10000) % 100 i4 = (res // 100) % 100 i5 = (res // 1) % 100 assert i1 % 4 == 0 assert 12 <= i1 <= 24 assert 4 <= i2 <= i1 - 8 assert 4 <= i3 <= 12 assert i4 == i5 assert i3 + 4 <= i5
def str_from_buffer(raw_buf, gc_buf, allocated_size, needed_size): """ Converts from a pair returned by alloc_buffer to a high-level string. The returned string will be truncated to needed_size. """ assert allocated_size >= needed_size if gc_buf and (allocated_size == needed_size): return hlstrtype(gc_buf) new_buf = lltype.malloc(STRTYPE, needed_size) try: str_chars_offset = (offsetof(STRTYPE, 'chars') + \ itemoffsetof(STRTYPE.chars, 0)) if gc_buf: src = cast_ptr_to_adr(gc_buf) + str_chars_offset else: src = cast_ptr_to_adr(raw_buf) + itemoffsetof(TYPEP.TO, 0) dest = cast_ptr_to_adr(new_buf) + str_chars_offset ## FIXME: This is bad, because dest could potentially move ## if there are threads involved. raw_memcopy(src, dest, llmemory.sizeof(ll_char_type) * needed_size) return hlstrtype(new_buf) finally: keepalive_until_here(new_buf)
def define_custom_trace(cls): from pypy.rpython.annlowlevel import llhelper from pypy.rpython.lltypesystem import llmemory # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 4243 s1.x = llmemory.cast_ptr_to_adr(tx) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(s1.x, lltype.Ptr(T)).z return f
def ll_shrink_array(p, smallerlength): from pypy.rpython.lltypesystem.lloperation import llop from pypy.rlib.objectmodel import keepalive_until_here if llop.shrink_array(lltype.Bool, p, smallerlength): return p # done by the GC # XXX we assume for now that the type of p is GcStruct containing a # variable array, with no further pointers anywhere, and exactly one # field in the fixed part -- like STR and UNICODE. TP = lltype.typeOf(p).TO newp = lltype.malloc(TP, smallerlength) assert len(TP._names) == 2 field = getattr(p, TP._names[0]) setattr(newp, TP._names[0], field) ARRAY = getattr(TP, TP._arrayfld) offset = (llmemory.offsetof(TP, TP._arrayfld) + llmemory.itemoffsetof(ARRAY, 0)) source_addr = llmemory.cast_ptr_to_adr(p) + offset dest_addr = llmemory.cast_ptr_to_adr(newp) + offset llmemory.raw_memcopy(source_addr, dest_addr, llmemory.sizeof(ARRAY.OF) * smallerlength) keepalive_until_here(p) keepalive_until_here(newp) return newp
def test_gc_offsets(): STRUCT = lltype.GcStruct("S1", ("x", lltype.Signed), ("y", lltype.Char)) ARRAY = lltype.GcArray(lltype.Signed) s1 = llarena.round_up_for_allocation(llmemory.sizeof(STRUCT)) s2 = llmemory.offsetof(STRUCT, "x") s3 = llmemory.ArrayLengthOffset(ARRAY) s4 = llmemory.sizeof(ARRAY, 0) s5 = llmemory.ArrayItemsOffset(ARRAY) def fn(): return s1 * 100000000 + s2 * 1000000 + s3 * 10000 + s4 * 100 + s5 mod, f = compile_test(fn, [], gcpolicy="semispace") res = f() i1 = (res // 100000000) % 100 i2 = (res // 1000000) % 100 i3 = (res // 10000) % 100 i4 = (res // 100) % 100 i5 = (res // 1) % 100 assert i1 % 4 == 0 assert 12 <= i1 <= 24 assert 4 <= i2 <= i1 - 8 assert 4 <= i3 <= 12 assert i4 == i5 assert i3 + 4 <= i5
def _gct_resize_buffer_no_realloc(self, hop, v_lgt): op = hop.spaceop meth = self.gct_fv_gc_malloc_varsize flags = {'flavor':'gc', 'varsize': True, 'keep_current_args': True} self.varsize_malloc_helper(hop, flags, meth, []) # fish resvar v_newbuf = hop.llops[-1].result v_src = op.args[0] TYPE = v_src.concretetype.TO c_fldname = rmodel.inputconst(lltype.Void, TYPE._arrayfld) v_adrsrc = hop.genop('cast_ptr_to_adr', [v_src], resulttype=llmemory.Address) v_adrnewbuf = hop.genop('cast_ptr_to_adr', [v_newbuf], resulttype=llmemory.Address) ofs = (llmemory.offsetof(TYPE, TYPE._arrayfld) + llmemory.itemoffsetof(getattr(TYPE, TYPE._arrayfld), 0)) v_ofs = rmodel.inputconst(lltype.Signed, ofs) v_adrsrc = hop.genop('adr_add', [v_adrsrc, v_ofs], resulttype=llmemory.Address) v_adrnewbuf = hop.genop('adr_add', [v_adrnewbuf, v_ofs], resulttype=llmemory.Address) size = llmemory.sizeof(getattr(TYPE, TYPE._arrayfld).OF) c_size = rmodel.inputconst(lltype.Signed, size) v_lgtsym = hop.genop('int_mul', [c_size, v_lgt], resulttype=lltype.Signed) vlist = [v_adrsrc, v_adrnewbuf, v_lgtsym] hop.genop('raw_memcopy', vlist)
def encode_type_shape(builder, info, TYPE): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) info.ofstoptrs = builder.offsets2table(offsets, TYPE) info.finalizer = builder.make_finalizer_funcptr_for_type(TYPE) info.weakptrofs = weakpointer_offset(TYPE) if not TYPE._is_varsize(): #info.isvarsize = False #info.gcptrinvarsize = False info.fixedsize = llarena.round_up_for_allocation(llmemory.sizeof(TYPE)) info.ofstolength = -1 # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: #info.isvarsize = True info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) info.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) info.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: ARRAY = TYPE info.ofstolength = llmemory.ArrayLengthOffset(ARRAY) info.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () info.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) info.varitemsize = llmemory.sizeof(ARRAY.OF)
def offsets_to_gc_pointers(TYPE): offsets = [] if isinstance(TYPE, lltype.Struct): for name in TYPE._names: FIELD = getattr(TYPE, name) if isinstance(FIELD, lltype.Array): continue # skip inlined array baseofs = llmemory.offsetof(TYPE, name) suboffsets = offsets_to_gc_pointers(FIELD) for s in suboffsets: try: knownzero = s == 0 except TypeError: knownzero = False if knownzero: offsets.append(baseofs) else: offsets.append(baseofs + s) # sanity check #ex = lltype.Ptr(TYPE)._example() #adr = llmemory.cast_ptr_to_adr(ex) #for off in offsets: # (adr + off) elif isinstance(TYPE, lltype.Ptr) and TYPE.TO._gckind == 'gc': offsets.append(0) return offsets
def get_array_token(T, translate_support_code): # T can be an array or a var-sized structure if translate_support_code: basesize = llmemory.sizeof(T, 0) if isinstance(T, lltype.Struct): SUBARRAY = getattr(T, T._arrayfld) itemsize = llmemory.sizeof(SUBARRAY.OF) ofs_length = (llmemory.offsetof(T, T._arrayfld) + llmemory.ArrayLengthOffset(SUBARRAY)) else: if T._hints.get('nolength', None): ofs_length = -1 else: ofs_length = llmemory.ArrayLengthOffset(T) itemsize = llmemory.sizeof(T.OF) else: if isinstance(T, lltype.Struct): assert T._arrayfld is not None, "%r is not variable-sized" % (T, ) cstruct = ll2ctypes.get_ctypes_type(T) cfield = getattr(cstruct, T._arrayfld) before_array_part = cfield.offset T = getattr(T, T._arrayfld) else: before_array_part = 0 carray = ll2ctypes.get_ctypes_type(T) if T._hints.get('nolength', None): ofs_length = -1 else: assert carray.length.size == WORD ofs_length = before_array_part + carray.length.offset basesize = before_array_part + carray.items.offset carrayitem = ll2ctypes.get_ctypes_type(T.OF) itemsize = ctypes.sizeof(carrayitem) return basesize, itemsize, ofs_length
def test_address_order(): a = arena_malloc(20, False) assert eq(a, a) assert lt(a, a+1) assert lt(a+5, a+20) b = arena_malloc(20, False) if a > b: a, b = b, a assert lt(a, b) assert lt(a+19, b) assert lt(a, b+19) c = b + round_up_for_allocation(llmemory.sizeof(lltype.Char)) arena_reserve(c, precomputed_size) assert lt(b, c) assert lt(a, c) assert lt(llmemory.NULL, c) d = c + llmemory.offsetof(SX, 'x') assert lt(c, d) assert lt(b, d) assert lt(a, d) assert lt(llmemory.NULL, d) e = c + precomputed_size assert lt(d, e) assert lt(c, e) assert lt(b, e) assert lt(a, e) assert lt(llmemory.NULL, e)
def get_array_token(T, translate_support_code): # T can be an array or a var-sized structure if translate_support_code: basesize = llmemory.sizeof(T, 0) if isinstance(T, lltype.Struct): SUBARRAY = getattr(T, T._arrayfld) itemsize = llmemory.sizeof(SUBARRAY.OF) ofs_length = (llmemory.offsetof(T, T._arrayfld) + llmemory.ArrayLengthOffset(SUBARRAY)) else: itemsize = llmemory.sizeof(T.OF) ofs_length = llmemory.ArrayLengthOffset(T) else: if isinstance(T, lltype.Struct): assert T._arrayfld is not None, "%r is not variable-sized" % (T,) cstruct = ll2ctypes.get_ctypes_type(T) cfield = getattr(cstruct, T._arrayfld) before_array_part = cfield.offset T = getattr(T, T._arrayfld) else: before_array_part = 0 carray = ll2ctypes.get_ctypes_type(T) assert carray.length.size == WORD ofs_length = before_array_part + carray.length.offset basesize = before_array_part + carray.items.offset carrayitem = ll2ctypes.get_ctypes_type(T.OF) itemsize = ctypes.sizeof(carrayitem) return basesize, itemsize, ofs_length
def fieldToken(T, name): FIELD = getattr(T, name) if isinstance(FIELD, lltype.ContainerType): fieldsize = 0 # not useful for getsubstruct else: fieldsize = llmemory.sizeof(FIELD) return (llmemory.offsetof(T, name), fieldsize)
def get_field_token(STRUCT, fieldname, translate_support_code): if translate_support_code: return (llmemory.offsetof(STRUCT, fieldname), get_size(getattr(STRUCT, fieldname), True)) cstruct = ll2ctypes.get_ctypes_type(STRUCT) cfield = getattr(cstruct, fieldname) return (cfield.offset, cfield.size)
def define_custom_trace(cls): from pypy.rpython.annlowlevel import llhelper from pypy.rpython.lltypesystem import llmemory # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s = lltype.nullptr(S) for i in range(10000): t = lltype.malloc(S) t.x = llmemory.cast_ptr_to_adr(s) s = t return s def measure_length(s): res = 0 while s: res += 1 s = llmemory.cast_adr_to_ptr(s.x, lltype.Ptr(S)) return res def f(n): s1 = setup() llop.gc__collect(lltype.Void) return measure_length(s1) return f
def _gct_resize_buffer_no_realloc(self, hop, v_lgt): op = hop.spaceop meth = self.gct_fv_gc_malloc_varsize flags = {'flavor': 'gc', 'varsize': True, 'keep_current_args': True} self.varsize_malloc_helper(hop, flags, meth, []) # fish resvar v_newbuf = hop.llops[-1].result v_src = op.args[0] TYPE = v_src.concretetype.TO c_fldname = rmodel.inputconst(lltype.Void, TYPE._arrayfld) v_adrsrc = hop.genop('cast_ptr_to_adr', [v_src], resulttype=llmemory.Address) v_adrnewbuf = hop.genop('cast_ptr_to_adr', [v_newbuf], resulttype=llmemory.Address) ofs = (llmemory.offsetof(TYPE, TYPE._arrayfld) + llmemory.itemoffsetof(getattr(TYPE, TYPE._arrayfld), 0)) v_ofs = rmodel.inputconst(lltype.Signed, ofs) v_adrsrc = hop.genop('adr_add', [v_adrsrc, v_ofs], resulttype=llmemory.Address) v_adrnewbuf = hop.genop('adr_add', [v_adrnewbuf, v_ofs], resulttype=llmemory.Address) size = llmemory.sizeof(getattr(TYPE, TYPE._arrayfld).OF) c_size = rmodel.inputconst(lltype.Signed, size) v_lgtsym = hop.genop('int_mul', [c_size, v_lgt], resulttype=lltype.Signed) vlist = [v_adrsrc, v_adrnewbuf, v_lgtsym] hop.genop('raw_memcopy', vlist)
def test_address_order(): a = arena_malloc(24, False) assert eq(a, a) assert lt(a, a + 1) assert lt(a + 5, a + 20) b = arena_malloc(24, False) if a > b: a, b = b, a assert lt(a, b) assert lt(a + 19, b) assert lt(a, b + 19) c = b + round_up_for_allocation(llmemory.sizeof(lltype.Char)) arena_reserve(c, precomputed_size) assert lt(b, c) assert lt(a, c) assert lt(llmemory.NULL, c) d = c + llmemory.offsetof(SX, 'x') assert lt(c, d) assert lt(b, d) assert lt(a, d) assert lt(llmemory.NULL, d) e = c + precomputed_size assert lt(d, e) assert lt(c, e) assert lt(b, e) assert lt(a, e) assert lt(llmemory.NULL, e)
def fieldToken(T, name): FIELD = getattr(T, name) if isinstance(FIELD, lltype.ContainerType): fieldtype = pi8 # not useful for getsubstruct else: fieldtype = RLLVMGenOp.kindToken(FIELD) return (llmemory.offsetof(T, name), fieldtype)
def f(): a = llstr("xyz") b = (llmemory.cast_ptr_to_adr(a) + llmemory.offsetof(STR, 'chars') + llmemory.itemoffsetof(STR.chars, 0)) buf = rffi.cast(rffi.VOIDP, b) return buf[2]
def gc_pointers_inside(v, adr): t = lltype.typeOf(v) if isinstance(t, lltype.Struct): for n, t2 in t._flds.iteritems(): if isinstance(t2, lltype.Ptr) and t2.TO._gckind == 'gc': yield adr + llmemory.offsetof(t, n) elif isinstance(t2, (lltype.Array, lltype.Struct)): for a in gc_pointers_inside(getattr(v, n), adr + llmemory.offsetof(t, n)): yield a elif isinstance(t, lltype.Array): if isinstance(t.OF, lltype.Ptr) and t2._needsgc(): for i in range(len(v.items)): yield adr + llmemory.itemoffsetof(t, i) elif isinstance(t.OF, lltype.Struct): for i in range(len(v.items)): for a in gc_pointers_inside(v.items[i], adr + llmemory.itemoffsetof(t, i)): yield a
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) # kind_and_fptr = builder.special_funcptr_for_type(TYPE) if kind_and_fptr is not None: kind, fptr = kind_and_fptr info.finalizer_or_customtrace = fptr if kind == "finalizer": infobits |= T_HAS_FINALIZER elif kind == 'light_finalizer': infobits |= T_HAS_FINALIZER | T_HAS_LIGHTWEIGHT_FINALIZER elif kind == "custom_trace": infobits |= T_HAS_CUSTOM_TRACE else: assert 0, kind # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def test_offset_addition(self): from pypy.rpython.lltypesystem import lltype from pypy.rpython.lltypesystem.llmemory import offsetof S = lltype.Struct('S', ('x', lltype.Bool), ('y', lltype.Signed)) T = lltype.GcStruct('T', ('r', lltype.Float), ('s1', S), ('s2', S)) def f(): return offsetof(T, 's1') + offsetof(S, 'x') f() a = RPythonAnnotator() s = a.build_types(f, []) assert isinstance(s, annmodel.SomeInteger) coff = offsetof(T, 's2') + offsetof(S, 'y') def f(): return coff f() a = RPythonAnnotator() s = a.build_types(f, []) assert isinstance(s, annmodel.SomeInteger)
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) # kind_and_fptr = builder.special_funcptr_for_type(TYPE) if kind_and_fptr is not None: kind, fptr = kind_and_fptr info.finalizer_or_customtrace = fptr if kind == "finalizer": infobits |= T_HAS_FINALIZER elif kind == "light_finalizer": infobits |= T_HAS_FINALIZER | T_HAS_LIGHTWEIGHT_FINALIZER elif kind == "custom_trace": infobits |= T_HAS_CUSTOM_TRACE else: assert 0, kind # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation(llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == "gc": infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def getinneraddr(self, obj, *offsets): TYPE = lltype.typeOf(obj).TO addr = llmemory.cast_ptr_to_adr(obj) for o in offsets: if isinstance(o, str): addr += llmemory.offsetof(TYPE, o) TYPE = getattr(TYPE, o) else: addr += llmemory.itemoffsetof(TYPE, o) TYPE = TYPE.OF return addr, TYPE
def test_vararray(): S1 = lltype.Struct('s1', ('s', lltype.Signed)) A = lltype.Array(S1) S2 = lltype.GcStruct('s2', ('b', lltype.Signed), ('a', A)) S1PTR = lltype.Ptr(S1) offset1 = (llmemory.offsetof(S2, 'a') + llmemory.ArrayItemsOffset(A) + llmemory.ItemOffset(S1, 21) + llmemory.offsetof(S1, 's')) offset2 = (llmemory.offsetof(S2, 'a') + llmemory.ArrayItemsOffset(A) + llmemory.ItemOffset(S1, 21)) def vararray(n): s = lltype.malloc(S2, n) adr = llmemory.cast_ptr_to_adr(s) s.a[n].s = n s1 = llmemory.cast_adr_to_ptr(adr + offset2, S1PTR) return (adr + offset1).signed[0] + s1.s fn = compile_function(vararray, [int]) assert fn(21) == 42
def varsizeAllocToken(T): if isinstance(T, lltype.Array): return RPPCGenOp.arrayToken(T) else: # var-sized structs arrayfield = T._arrayfld ARRAYFIELD = getattr(T, arrayfield) arraytoken = RPPCGenOp.arrayToken(ARRAYFIELD) length_offset, items_offset, item_size = arraytoken arrayfield_offset = llmemory.offsetof(T, arrayfield) return (arrayfield_offset + length_offset, arrayfield_offset + items_offset, item_size)
def test_offset_addition(self): from pypy.rpython.lltypesystem import lltype from pypy.rpython.lltypesystem.llmemory import offsetof S = lltype.Struct('S', ('x', lltype.Bool), ('y', lltype.Signed)) T = lltype.GcStruct('T', ('r', lltype.Float), ('s1', S), ('s2', S)) def f(): return offsetof(T, 's1') + offsetof(S, 'x') f() a = RPythonAnnotator() s = a.build_types(f, []) rtyper = RPythonTyper(a) rtyper.specialize() #does not raise coff = offsetof(T, 's2') + offsetof(S, 'y') def f(): return coff f() a = RPythonAnnotator() s = a.build_types(f, []) rtyper = RPythonTyper(a) rtyper.specialize() #does not raise
def offsetof(STRUCT, fieldname): """Similar to llmemory.offsetof() but tries hard to return a integer instead of a symbolic value. """ # the hint is present in structures probed by rffi_platform. fieldoffsets = STRUCT._hints.get('fieldoffsets') if fieldoffsets is not None: # a numeric result when known for index, name in enumerate(STRUCT._names): if name == fieldname: return fieldoffsets[index] # a symbolic result as a fallback return llmemory.offsetof(STRUCT, fieldname)
def varsizeAllocToken(T): if isinstance(T, lltype.Array): return RI386GenOp.arrayToken(T) else: # var-sized structs arrayfield = T._arrayfld ARRAYFIELD = getattr(T, arrayfield) arraytoken = RI386GenOp.arrayToken(ARRAYFIELD) (lengthoffset, lengthsize), itemsoffset, itemsize = arraytoken arrayfield_offset = llmemory.offsetof(T, arrayfield) return ((arrayfield_offset+lengthoffset, lengthsize), arrayfield_offset+itemsoffset, itemsize)
def varsizeAllocToken(T): #XXX TODO if isinstance(T, lltype.Array): return RLLVMGenOp.arrayToken(T) else: # var-sized structs arrayfield = T._arrayfld ARRAYFIELD = getattr(T, arrayfield) arraytoken = RLLVMGenOp.arrayToken(ARRAYFIELD) length_offset, items_offset, item_size, item_type = arraytoken arrayfield_offset = llmemory.offsetof(T, arrayfield) return (arrayfield_offset+length_offset, arrayfield_offset+items_offset, item_size, item_type)
def get_type_id(self, TYPE): try: return self.id_of_type[TYPE] except KeyError: assert not self.finished_tables assert isinstance(TYPE, (lltype.GcStruct, lltype.GcArray)) # Record the new type_id description as a small dict for now. # It will be turned into a Struct("type_info") in finish() type_id = len(self.type_info_list) info = {} self.type_info_list.append(info) self.id_of_type[TYPE] = type_id offsets = offsets_to_gc_pointers(TYPE) info["ofstoptrs"] = self.offsets2table(offsets, TYPE) info["finalyzer"] = self.finalizer_funcptr_for_type(TYPE) if not TYPE._is_varsize(): info["isvarsize"] = False info["fixedsize"] = llmemory.sizeof(TYPE) info["ofstolength"] = -1 else: info["isvarsize"] = True info["fixedsize"] = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) info["ofstolength"] = ofs1 + llmemory.ArrayLengthOffset(ARRAY) if ARRAY.OF != lltype.Void: info["ofstovar"] = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: info["fixedsize"] = ofs1 + llmemory.sizeof(lltype.Signed) if ARRAY._hints.get('isrpystring'): info["fixedsize"] = llmemory.sizeof(TYPE, 1) else: ARRAY = TYPE info["ofstolength"] = llmemory.ArrayLengthOffset(ARRAY) if ARRAY.OF != lltype.Void: info["ofstovar"] = llmemory.itemoffsetof(TYPE, 0) else: info["fixedsize"] = llmemory.ArrayLengthOffset(ARRAY) + llmemory.sizeof(lltype.Signed) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) info["varofstoptrs"] = self.offsets2table(offsets, ARRAY.OF) info["varitemsize"] = llmemory.sizeof(ARRAY.OF) else: info["varofstoptrs"] = self.offsets2table((), lltype.Void) info["varitemsize"] = llmemory.sizeof(ARRAY.OF) return type_id
def test_sizeof_constsize_struct(): # _not_ a GcStruct, since we want to raw_malloc it STRUCT = lltype.Struct("s", ("x", lltype.Signed), ("y", lltype.Signed)) STRUCTPTR = lltype.Ptr(STRUCT) sizeofs = llmemory.sizeof(STRUCT) offsety = llmemory.offsetof(STRUCT, 'y') def f(): adr = llmemory.raw_malloc(sizeofs) s = llmemory.cast_adr_to_ptr(adr, STRUCTPTR) s.y = 5 # does not crash result = (adr + offsety).signed[0] * 10 + int(offsety < sizeofs) llmemory.raw_free(adr) return result fn, t = getcompiled(f, []) res = fn() assert res == 51
def get_nonmovingbuffer(data): """ Either returns a non-moving copy or performs neccessary pointer arithmetic to return a pointer to the characters of a string if the string is already nonmovable. Must be followed by a free_nonmovingbuffer call. """ if rgc.can_move(data): count = len(data) buf = lltype.malloc(TYPEP.TO, count, flavor='raw') for i in range(count): buf[i] = data[i] return buf else: data_start = cast_ptr_to_adr(llstrtype(data)) + \ offsetof(STRTYPE, 'chars') + itemoffsetof(STRTYPE.chars, 0) return cast(TYPEP, data_start)
def alloc_buffer(count): """ Returns a (raw_buffer, gc_buffer) pair, allocated with count bytes. The raw_buffer can be safely passed to a native function which expects it to not move. Call str_from_buffer with the returned values to get a safe high-level string. When the garbage collector cooperates, this allows for the process to be performed without an extra copy. Make sure to call keep_buffer_alive_until_here on the returned values. """ str_chars_offset = (offsetof(STRTYPE, 'chars') + \ itemoffsetof(STRTYPE.chars, 0)) gc_buf = rgc.malloc_nonmovable(STRTYPE, count) if gc_buf: realbuf = cast_ptr_to_adr(gc_buf) + str_chars_offset raw_buf = cast(TYPEP, realbuf) return raw_buf, gc_buf else: raw_buf = lltype.malloc(TYPEP.TO, count, flavor='raw') return raw_buf, lltype.nullptr(STRTYPE)
def test_partial_arena_reset(): a = arena_malloc(72, False) def reserve(i): b = a + i * llmemory.raw_malloc_usage(precomputed_size) arena_reserve(b, precomputed_size) return b blist = [] plist = [] for i in range(4): b = reserve(i) (b + llmemory.offsetof(SX, 'x')).signed[0] = 100 + i blist.append(b) plist.append(llmemory.cast_adr_to_ptr(b, SPTR)) # clear blist[1] and blist[2] but not blist[0] nor blist[3] arena_reset(blist[1], llmemory.raw_malloc_usage(precomputed_size) * 2, False) py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # re-reserve object at index 1 and 2 blist[1] = reserve(1) blist[2] = reserve(2) # check via object pointers assert plist[0].x == 100 assert plist[3].x == 103 py.test.raises(RuntimeError, "plist[1].x") # marked as freed py.test.raises(RuntimeError, "plist[2].x") # marked as freed # but we can still cast the old ptrs to addresses, which compare equal # to the new ones we gotq assert llmemory.cast_ptr_to_adr(plist[1]) == blist[1] assert llmemory.cast_ptr_to_adr(plist[2]) == blist[2] # check via addresses assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 100 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[1] + llmemory.offsetof(SX, 'x')).signed[0]") py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]") # clear and zero-fill the area over blist[0] and blist[1] arena_reset(blist[0], llmemory.raw_malloc_usage(precomputed_size) * 2, True) # re-reserve and check it's zero blist[0] = reserve(0) blist[1] = reserve(1) assert (blist[0] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[1] + llmemory.offsetof(SX, 'x')).signed[0] == 0 assert (blist[3] + llmemory.offsetof(SX, 'x')).signed[0] == 103 py.test.raises(lltype.UninitializedMemoryAccess, "(blist[2] + llmemory.offsetof(SX, 'x')).signed[0]")
def encode_type_shape(builder, info, TYPE): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = 0 info.ofstoptrs = builder.offsets2table(offsets, TYPE) info.finalizer = builder.make_finalizer_funcptr_for_type(TYPE) if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if TYPE == WEAKREF: infobits |= T_IS_WEAKREF info.infobits = infobits
def test_custom_trace(self): from pypy.rpython.annlowlevel import llhelper from pypy.rpython.lltypesystem import llmemory from pypy.rpython.lltypesystem.llarena import ArenaError # S = lltype.GcStruct('S', ('x', llmemory.Address), ('y', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # for attrname in ['x', 'y']: def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 42 ty = lltype.malloc(T) s1.x = llmemory.cast_ptr_to_adr(tx) s1.y = llmemory.cast_ptr_to_adr(ty) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(getattr(s1, attrname), lltype.Ptr(T)) if attrname == 'x': res = self.interpret(f, []) assert res.z == 42 else: py.test.raises((RuntimeError, ArenaError), self.interpret, f, [])
def weakpointer_offset(TYPE): if TYPE == WEAKREF: return llmemory.offsetof(WEAKREF, "weakptr") return -1