def __init__(self, chunk_size=DEFAULT_CHUNK_SIZE, start_heap_size=4096): self.heap_usage = 0 # at the end of the latest collection self.bytes_malloced = 0 # since the latest collection self.bytes_malloced_threshold = start_heap_size self.total_collection_time = 0.0 self.AddressStack = get_address_stack(chunk_size) self.malloced_objects = lltype.nullptr(self.HDR) self.malloced_objects_with_finalizer = lltype.nullptr(self.HDR) # these are usually only the small bits of memory that make a # weakref object self.objects_with_weak_pointers = lltype.nullptr(self.HDR) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) # pools, for x_swap_pool(): # 'curpool' is the current pool, lazily allocated (i.e. NULL means # the current POOL object is not yet malloc'ed). POOL objects are # usually at the start of a linked list of objects, via the HDRs. # The exception is 'curpool' whose linked list of objects is in # 'self.malloced_objects' instead of in the header of 'curpool'. # POOL objects are never in the middle of a linked list themselves. # XXX a likely cause for the current problems with pools is: # not all objects live in malloced_objects, some also live in # malloced_objects_with_finalizer and objects_with_weak_pointers self.curpool = lltype.nullptr(self.POOL) # 'poolnodes' is a linked list of all such linked lists. Each # linked list will usually start with a POOL object, but it can # also contain only normal objects if the POOL object at the head # was already freed. The objects in 'malloced_objects' are not # found via 'poolnodes'. self.poolnodes = lltype.nullptr(self.POOLNODE) self.collect_in_progress = False self.prev_collect_end_time = 0.0
def __init__(self, chunk_size=DEFAULT_CHUNK_SIZE, space_size=4096, max_space_size=sys.maxint//2+1): MovingGCBase.__init__(self) self.space_size = space_size self.max_space_size = max_space_size self.gcheaderbuilder = GCHeaderBuilder(self.HDR) self.AddressStack = get_address_stack(chunk_size) self.AddressDeque = get_address_deque(chunk_size) self.finalizer_lock_count = 0 self.red_zone = 0
def raw_free(adr): # try to free the whole object if 'adr' is the address of the header from pypy.rpython.memory.gcheader import GCHeaderBuilder try: objectptr = GCHeaderBuilder.object_from_header(adr.ptr) except KeyError: pass else: raw_free(cast_ptr_to_adr(objectptr)) assert isinstance(adr.ref()._obj, lltype._parentable) adr.ptr._as_obj()._free()
def _setup_gcclass(self): from pypy.rpython.memory.gcheader import GCHeaderBuilder self.GCClass = self.layoutbuilder.GCClass self.moving_gc = self.GCClass.moving_gc self.HDRPTR = lltype.Ptr(self.GCClass.HDR) self.gcheaderbuilder = GCHeaderBuilder(self.HDRPTR.TO) self.max_size_of_young_obj = self.GCClass.JIT_max_size_of_young_obj() self.minimal_size_in_nursery=self.GCClass.JIT_minimal_size_in_nursery() # for the fast path of mallocs, the following must be true, at least assert self.GCClass.inline_simple_malloc assert self.GCClass.inline_simple_malloc_varsize
def test_shrink_obj(): from pypy.rpython.memory.gcheader import GCHeaderBuilder HDR = lltype.Struct('HDR', ('h', lltype.Signed)) gcheaderbuilder = GCHeaderBuilder(HDR) size_gc_header = gcheaderbuilder.size_gc_header S = lltype.GcStruct('S', ('x', lltype.Signed), ('a', lltype.Array(lltype.Signed))) myarenasize = 200 a = arena_malloc(myarenasize, False) arena_reserve(a, size_gc_header + llmemory.sizeof(S, 10)) arena_shrink_obj(a, size_gc_header + llmemory.sizeof(S, 5)) arena_reset(a, size_gc_header + llmemory.sizeof(S, 5), False)
def __init__(self, config, chunk_size=DEFAULT_CHUNK_SIZE, translated_to_c=True): self.gcheaderbuilder = GCHeaderBuilder(self.HDR) self.AddressStack = get_address_stack(chunk_size) self.AddressDeque = get_address_deque(chunk_size) self.AddressDict = AddressDict self.null_address_dict = null_address_dict self.config = config assert isinstance(translated_to_c, bool) self.translated_to_c = translated_to_c
def test_raw_free_with_hdr(): from pypy.rpython.memory.gcheader import GCHeaderBuilder HDR = lltype.Struct('h', ('t', lltype.Signed)) gh = GCHeaderBuilder(HDR).size_gc_header A = lltype.GcArray(lltype.Signed) adr = raw_malloc(gh + sizeof(A, 10)) p_a = cast_adr_to_ptr(adr + gh, lltype.Ptr(A)) p_a[0] = 1 adr = cast_ptr_to_adr(p_a) - gh raw_free(adr) py.test.raises(RuntimeError, "p_a[0]") py.test.raises(RuntimeError, "p_a[0] = 2") repr(adr) str(p_a) S = lltype.GcStruct('S', ('x', lltype.Signed)) adr = raw_malloc(gh + sizeof(S)) p_s = cast_adr_to_ptr(adr + gh, lltype.Ptr(S)) p_s.x = 1 adr = cast_ptr_to_adr(p_s) - gh raw_free(adr) py.test.raises(RuntimeError, "p_s.x") py.test.raises(RuntimeError, "p_s.x = 2") repr(adr) str(p_s) T = lltype.GcStruct('T', ('s', S)) adr = raw_malloc(gh + sizeof(T)) p_s = cast_adr_to_ptr(adr + gh, lltype.Ptr(S)) p_s.x = 1 adr = cast_ptr_to_adr(p_s) - gh raw_free(adr) py.test.raises(RuntimeError, "p_s.x") py.test.raises(RuntimeError, "p_s.x = 2") repr(adr) str(p_s) U = lltype.Struct('U', ('y', lltype.Signed)) T = lltype.GcStruct('T', ('x', lltype.Signed), ('u', U)) adr = raw_malloc(gh + sizeof(T)) p_t = cast_adr_to_ptr(adr + gh, lltype.Ptr(T)) p_u = p_t.u p_u.y = 1 adr = cast_ptr_to_adr(p_t) - gh raw_free(adr) py.test.raises(RuntimeError, "p_u.y") py.test.raises(RuntimeError, "p_u.y = 2") repr(adr) str(p_u)
def test_replace_object_with_stub(): from pypy.rpython.memory.gcheader import GCHeaderBuilder HDR = lltype.Struct('HDR', ('x', lltype.Signed)) S = lltype.GcStruct('S', ('y', lltype.Signed), ('z', lltype.Signed)) STUB = lltype.GcStruct('STUB', ('t', lltype.Char)) gcheaderbuilder = GCHeaderBuilder(HDR) size_gc_header = gcheaderbuilder.size_gc_header ssize = llmemory.raw_malloc_usage(llmemory.sizeof(S)) a = arena_malloc(13 * ssize, True) hdraddr = a + 3 * ssize arena_reserve(hdraddr, size_gc_header + llmemory.sizeof(S)) hdr = llmemory.cast_adr_to_ptr(hdraddr, lltype.Ptr(HDR)) hdr.x = 42 obj = llmemory.cast_adr_to_ptr(hdraddr + size_gc_header, lltype.Ptr(S)) obj.y = -5 obj.z = -6 hdraddr = llmemory.cast_ptr_to_adr(obj) - size_gc_header arena_reset(hdraddr, size_gc_header + llmemory.sizeof(S), False) arena_reserve(hdraddr, size_gc_header + llmemory.sizeof(STUB)) # check that it possible to reach the newly reserved HDR+STUB # via the header of the old 'obj' pointer, both via the existing # 'hdraddr': hdr = llmemory.cast_adr_to_ptr(hdraddr, lltype.Ptr(HDR)) hdr.x = 46 stub = llmemory.cast_adr_to_ptr(hdraddr + size_gc_header, lltype.Ptr(STUB)) stub.t = '!' # and via a (now-invalid) pointer to the old 'obj': (this is needed # because during a garbage collection there are still pointers to # the old 'obj' around to be fixed) hdraddr = llmemory.cast_ptr_to_adr(obj) - size_gc_header hdr = llmemory.cast_adr_to_ptr(hdraddr, lltype.Ptr(HDR)) assert hdr.x == 46 stub = llmemory.cast_adr_to_ptr(hdraddr + size_gc_header, lltype.Ptr(STUB)) assert stub.t == '!'
def __init__(self, config, chunk_size=DEFAULT_CHUNK_SIZE): self.gcheaderbuilder = GCHeaderBuilder(self.HDR) self.AddressStack = get_address_stack(chunk_size) self.AddressDeque = get_address_deque(chunk_size) self.AddressDict = AddressDict self.config = config
def __init__(self, translator): super(RefcountingGCTransformer, self).__init__(translator, inline=True) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) gc_header_offset = self.gcheaderbuilder.size_gc_header self.deallocator_graphs_needing_transforming = [] # create incref, etc graph memoryError = MemoryError() HDRPTR = lltype.Ptr(self.HDR) def ll_incref(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) gcheader.refcount = gcheader.refcount + 1 def ll_decref(adr, dealloc): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: dealloc(adr) def ll_decref_simple(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 if refcount == 0: llop.gc_free(lltype.Void, adr) else: gcheader.refcount = refcount def ll_no_pointer_dealloc(adr): llop.gc_free(lltype.Void, adr) mh = mallocHelpers() mh.allocate = llmemory.raw_malloc def ll_malloc_fixedsize(size): size = gc_header_offset + size result = mh._ll_malloc_fixedsize(size) llmemory.raw_memclear(result, size) result += gc_header_offset return result def ll_malloc_varsize_no_length(length, size, itemsize): try: fixsize = gc_header_offset + size varsize = ovfcheck(itemsize * length) tot_size = ovfcheck(fixsize + varsize) except OverflowError: raise MemoryError() result = mh._ll_malloc_fixedsize(tot_size) llmemory.raw_memclear(result, tot_size) result += gc_header_offset return result mh.ll_malloc_varsize_no_length = ll_malloc_varsize_no_length ll_malloc_varsize = mh.ll_malloc_varsize def ll_identityhash(addr): obj = llmemory.cast_adr_to_ptr(addr, HDRPTR) h = obj.hash if h == 0: obj.hash = h = llmemory.cast_adr_to_int(addr) return h if self.translator: self.increfptr = self.inittime_helper(ll_incref, [llmemory.Address], lltype.Void) self.decref_ptr = self.inittime_helper( ll_decref, [llmemory.Address, lltype.Ptr(ADDRESS_VOID_FUNC)], lltype.Void ) self.decref_simple_ptr = self.inittime_helper(ll_decref_simple, [llmemory.Address], lltype.Void) self.no_pointer_dealloc_ptr = self.inittime_helper(ll_no_pointer_dealloc, [llmemory.Address], lltype.Void) self.malloc_fixedsize_ptr = self.inittime_helper(ll_malloc_fixedsize, [lltype.Signed], llmemory.Address) self.malloc_varsize_no_length_ptr = self.inittime_helper( ll_malloc_varsize_no_length, [lltype.Signed] * 3, llmemory.Address ) self.malloc_varsize_ptr = self.inittime_helper(ll_malloc_varsize, [lltype.Signed] * 4, llmemory.Address) self.identityhash_ptr = self.inittime_helper( ll_identityhash, [llmemory.Address], lltype.Signed, inline=False ) self.mixlevelannotator.finish() self.mixlevelannotator.backend_optimize() # cache graphs: self.decref_funcptrs = {} self.static_deallocator_funcptrs = {} self.dynamic_deallocator_funcptrs = {} self.queryptr2dynamic_deallocator_funcptr = {}
class RefcountingGCTransformer(GCTransformer): malloc_zero_filled = True HDR = lltype.Struct("header", ("refcount", lltype.Signed), ("hash", lltype.Signed)) def __init__(self, translator): super(RefcountingGCTransformer, self).__init__(translator, inline=True) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) gc_header_offset = self.gcheaderbuilder.size_gc_header self.deallocator_graphs_needing_transforming = [] # create incref, etc graph memoryError = MemoryError() HDRPTR = lltype.Ptr(self.HDR) def ll_incref(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) gcheader.refcount = gcheader.refcount + 1 def ll_decref(adr, dealloc): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: dealloc(adr) def ll_decref_simple(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 if refcount == 0: llop.gc_free(lltype.Void, adr) else: gcheader.refcount = refcount def ll_no_pointer_dealloc(adr): llop.gc_free(lltype.Void, adr) mh = mallocHelpers() mh.allocate = llmemory.raw_malloc def ll_malloc_fixedsize(size): size = gc_header_offset + size result = mh._ll_malloc_fixedsize(size) llmemory.raw_memclear(result, size) result += gc_header_offset return result def ll_malloc_varsize_no_length(length, size, itemsize): try: fixsize = gc_header_offset + size varsize = ovfcheck(itemsize * length) tot_size = ovfcheck(fixsize + varsize) except OverflowError: raise MemoryError() result = mh._ll_malloc_fixedsize(tot_size) llmemory.raw_memclear(result, tot_size) result += gc_header_offset return result mh.ll_malloc_varsize_no_length = ll_malloc_varsize_no_length ll_malloc_varsize = mh.ll_malloc_varsize def ll_identityhash(addr): obj = llmemory.cast_adr_to_ptr(addr, HDRPTR) h = obj.hash if h == 0: obj.hash = h = llmemory.cast_adr_to_int(addr) return h if self.translator: self.increfptr = self.inittime_helper(ll_incref, [llmemory.Address], lltype.Void) self.decref_ptr = self.inittime_helper( ll_decref, [llmemory.Address, lltype.Ptr(ADDRESS_VOID_FUNC)], lltype.Void ) self.decref_simple_ptr = self.inittime_helper(ll_decref_simple, [llmemory.Address], lltype.Void) self.no_pointer_dealloc_ptr = self.inittime_helper(ll_no_pointer_dealloc, [llmemory.Address], lltype.Void) self.malloc_fixedsize_ptr = self.inittime_helper(ll_malloc_fixedsize, [lltype.Signed], llmemory.Address) self.malloc_varsize_no_length_ptr = self.inittime_helper( ll_malloc_varsize_no_length, [lltype.Signed] * 3, llmemory.Address ) self.malloc_varsize_ptr = self.inittime_helper(ll_malloc_varsize, [lltype.Signed] * 4, llmemory.Address) self.identityhash_ptr = self.inittime_helper( ll_identityhash, [llmemory.Address], lltype.Signed, inline=False ) self.mixlevelannotator.finish() self.mixlevelannotator.backend_optimize() # cache graphs: self.decref_funcptrs = {} self.static_deallocator_funcptrs = {} self.dynamic_deallocator_funcptrs = {} self.queryptr2dynamic_deallocator_funcptr = {} def finish_helpers(self, **kwds): GCTransformer.finish_helpers(self, **kwds) from pypy.translator.backendopt.malloc import remove_mallocs seen = {} graphs = [] for fptr in self.static_deallocator_funcptrs.itervalues(): graph = fptr._obj.graph if graph in seen: continue seen[graph] = True graphs.append(graph) remove_mallocs(self.translator, graphs) def var_needs_set_transform(self, var): return var_needsgc(var) def push_alive_nopyobj(self, var, llops): v_adr = gen_cast(llops, llmemory.Address, var) llops.genop("direct_call", [self.increfptr, v_adr]) def pop_alive_nopyobj(self, var, llops): PTRTYPE = var.concretetype v_adr = gen_cast(llops, llmemory.Address, var) dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(PTRTYPE.TO) if dealloc_fptr is self.no_pointer_dealloc_ptr.value: # simple case llops.genop("direct_call", [self.decref_simple_ptr, v_adr]) else: cdealloc_fptr = rmodel.inputconst(lltype.typeOf(dealloc_fptr), dealloc_fptr) llops.genop("direct_call", [self.decref_ptr, v_adr, cdealloc_fptr]) def gct_fv_gc_malloc(self, hop, flags, TYPE, c_size): v_raw = hop.genop("direct_call", [self.malloc_fixedsize_ptr, c_size], resulttype=llmemory.Address) return v_raw def gct_fv_gc_malloc_varsize(self, hop, flags, TYPE, v_length, c_const_size, c_item_size, c_offset_to_length): if c_offset_to_length is None: v_raw = hop.genop( "direct_call", [self.malloc_varsize_no_length_ptr, v_length, c_const_size, c_item_size], resulttype=llmemory.Address, ) else: v_raw = hop.genop( "direct_call", [self.malloc_varsize_ptr, v_length, c_const_size, c_item_size, c_offset_to_length], resulttype=llmemory.Address, ) return v_raw def gct_gc_deallocate(self, hop): TYPE = hop.spaceop.args[0].value v_addr = hop.spaceop.args[1] dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(TYPE) cdealloc_fptr = rmodel.inputconst(lltype.typeOf(dealloc_fptr), dealloc_fptr) hop.genop("direct_call", [cdealloc_fptr, v_addr]) def consider_constant(self, TYPE, value): if value is not lltype.top_container(value): return if isinstance(TYPE, (lltype.GcStruct, lltype.GcArray)): p = value._as_ptr() if not self.gcheaderbuilder.get_header(p): hdr = self.gcheaderbuilder.new_header(p) hdr.refcount = sys.maxint // 2 hdr.hash = lltype.identityhash_nocache(p) def static_deallocation_funcptr_for_type(self, TYPE): if TYPE in self.static_deallocator_funcptrs: return self.static_deallocator_funcptrs[TYPE] # print_call_chain(self) if TYPE._gckind == "cpy": return # you don't really have an RPython deallocator for PyObjects rtti = get_rtti(TYPE) if rtti is not None and hasattr(rtti._obj, "destructor_funcptr"): destrptr = rtti._obj.destructor_funcptr DESTR_ARG = lltype.typeOf(destrptr).TO.ARGS[0] else: destrptr = None DESTR_ARG = None if destrptr is None and not find_gc_ptrs_in_type(TYPE): p = self.no_pointer_dealloc_ptr.value self.static_deallocator_funcptrs[TYPE] = p return p if destrptr is not None: body = "\n".join(_static_deallocator_body_for_type("v", TYPE, 3)) src = """ def ll_deallocator(addr): exc_instance = llop.gc_fetch_exception(EXC_INSTANCE_TYPE) try: v = cast_adr_to_ptr(addr, PTR_TYPE) gcheader = cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) # refcount is at zero, temporarily bump it to 1: gcheader.refcount = 1 destr_v = cast_pointer(DESTR_ARG, v) ll_call_destructor(destrptr, destr_v) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: %s llop.%s_free(lltype.Void, addr) except: pass llop.gc_restore_exception(lltype.Void, exc_instance) pop_alive(exc_instance) # XXX layering of exceptiontransform versus gcpolicy """ % ( body, TYPE._gckind, ) else: call_del = None body = "\n".join(_static_deallocator_body_for_type("v", TYPE)) src = ( "def ll_deallocator(addr):\n v = cast_adr_to_ptr(addr, PTR_TYPE)\n" + body + "\n llop.%s_free(lltype.Void, addr)\n" % (TYPE._gckind,) ) d = { "pop_alive": LLTransformerOp(self.pop_alive), "llop": llop, "lltype": lltype, "destrptr": destrptr, "gc_header_offset": self.gcheaderbuilder.size_gc_header, "cast_adr_to_ptr": llmemory.cast_adr_to_ptr, "cast_pointer": lltype.cast_pointer, "PTR_TYPE": lltype.Ptr(TYPE), "DESTR_ARG": DESTR_ARG, "EXC_INSTANCE_TYPE": self.translator.rtyper.exceptiondata.lltype_of_exception_value, "ll_call_destructor": ll_call_destructor, "HDRPTR": lltype.Ptr(self.HDR), } exec src in d this = d["ll_deallocator"] fptr = self.annotate_finalizer(this, [llmemory.Address], lltype.Void) self.static_deallocator_funcptrs[TYPE] = fptr for p in find_gc_ptrs_in_type(TYPE): self.static_deallocation_funcptr_for_type(p.TO) return fptr def dynamic_deallocation_funcptr_for_type(self, TYPE): assert TYPE._gckind != "cpy" if TYPE in self.dynamic_deallocator_funcptrs: return self.dynamic_deallocator_funcptrs[TYPE] # print_call_chain(self) rtti = get_rtti(TYPE) if rtti is None: p = self.static_deallocation_funcptr_for_type(TYPE) self.dynamic_deallocator_funcptrs[TYPE] = p return p queryptr = rtti._obj.query_funcptr if queryptr._obj in self.queryptr2dynamic_deallocator_funcptr: return self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] RTTI_PTR = lltype.Ptr(lltype.RuntimeTypeInfo) QUERY_ARG_TYPE = lltype.typeOf(queryptr).TO.ARGS[0] gc_header_offset = self.gcheaderbuilder.size_gc_header HDRPTR = lltype.Ptr(self.HDR) def ll_dealloc(addr): # bump refcount to 1 gcheader = llmemory.cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) gcheader.refcount = 1 v = llmemory.cast_adr_to_ptr(addr, QUERY_ARG_TYPE) rtti = queryptr(v) gcheader.refcount = 0 llop.gc_call_rtti_destructor(lltype.Void, rtti, addr) fptr = self.annotate_helper(ll_dealloc, [llmemory.Address], lltype.Void) self.dynamic_deallocator_funcptrs[TYPE] = fptr self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] = fptr return fptr def gct_gc_identityhash(self, hop): v_obj = hop.spaceop.args[0] v_adr = hop.genop("cast_ptr_to_adr", [v_obj], resulttype=llmemory.Address) hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result)
def __init__(self, translator): super(RefcountingGCTransformer, self).__init__(translator, inline=True) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) gc_header_offset = self.gcheaderbuilder.size_gc_header self.deallocator_graphs_needing_transforming = [] # create incref, etc graph memoryError = MemoryError() HDRPTR = lltype.Ptr(self.HDR) def ll_incref(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) gcheader.refcount = gcheader.refcount + 1 def ll_decref(adr, dealloc): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: dealloc(adr) def ll_decref_simple(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 if refcount == 0: llop.gc_free(lltype.Void, adr) else: gcheader.refcount = refcount def ll_no_pointer_dealloc(adr): llop.gc_free(lltype.Void, adr) mh = mallocHelpers() mh.allocate = llmemory.raw_malloc def ll_malloc_fixedsize(size): size = gc_header_offset + size result = mh._ll_malloc_fixedsize(size) llmemory.raw_memclear(result, size) result += gc_header_offset return result def ll_malloc_varsize_no_length(length, size, itemsize): try: fixsize = gc_header_offset + size varsize = ovfcheck(itemsize * length) tot_size = ovfcheck(fixsize + varsize) except OverflowError: raise MemoryError() result = mh._ll_malloc_fixedsize(tot_size) llmemory.raw_memclear(result, tot_size) result += gc_header_offset return result mh.ll_malloc_varsize_no_length = ll_malloc_varsize_no_length ll_malloc_varsize = mh.ll_malloc_varsize def ll_identityhash(addr): obj = llmemory.cast_adr_to_ptr(addr, HDRPTR) h = obj.hash if h == 0: obj.hash = h = llmemory.cast_adr_to_int(addr) return h if self.translator: self.increfptr = self.inittime_helper( ll_incref, [llmemory.Address], lltype.Void) self.decref_ptr = self.inittime_helper( ll_decref, [llmemory.Address, lltype.Ptr(ADDRESS_VOID_FUNC)], lltype.Void) self.decref_simple_ptr = self.inittime_helper( ll_decref_simple, [llmemory.Address], lltype.Void) self.no_pointer_dealloc_ptr = self.inittime_helper( ll_no_pointer_dealloc, [llmemory.Address], lltype.Void) self.malloc_fixedsize_ptr = self.inittime_helper( ll_malloc_fixedsize, [lltype.Signed], llmemory.Address) self.malloc_varsize_no_length_ptr = self.inittime_helper( ll_malloc_varsize_no_length, [lltype.Signed]*3, llmemory.Address) self.malloc_varsize_ptr = self.inittime_helper( ll_malloc_varsize, [lltype.Signed]*4, llmemory.Address) self.identityhash_ptr = self.inittime_helper( ll_identityhash, [llmemory.Address], lltype.Signed, inline=False) self.mixlevelannotator.finish() self.mixlevelannotator.backend_optimize() # cache graphs: self.decref_funcptrs = {} self.static_deallocator_funcptrs = {} self.dynamic_deallocator_funcptrs = {} self.queryptr2dynamic_deallocator_funcptr = {}
class RefcountingGCTransformer(GCTransformer): malloc_zero_filled = True HDR = lltype.Struct("header", ("refcount", lltype.Signed), ("hash", lltype.Signed)) def __init__(self, translator): super(RefcountingGCTransformer, self).__init__(translator, inline=True) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) gc_header_offset = self.gcheaderbuilder.size_gc_header self.deallocator_graphs_needing_transforming = [] # create incref, etc graph memoryError = MemoryError() HDRPTR = lltype.Ptr(self.HDR) def ll_incref(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) gcheader.refcount = gcheader.refcount + 1 def ll_decref(adr, dealloc): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: dealloc(adr) def ll_decref_simple(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 if refcount == 0: llop.gc_free(lltype.Void, adr) else: gcheader.refcount = refcount def ll_no_pointer_dealloc(adr): llop.gc_free(lltype.Void, adr) mh = mallocHelpers() mh.allocate = llmemory.raw_malloc def ll_malloc_fixedsize(size): size = gc_header_offset + size result = mh._ll_malloc_fixedsize(size) llmemory.raw_memclear(result, size) result += gc_header_offset return result def ll_malloc_varsize_no_length(length, size, itemsize): try: fixsize = gc_header_offset + size varsize = ovfcheck(itemsize * length) tot_size = ovfcheck(fixsize + varsize) except OverflowError: raise MemoryError() result = mh._ll_malloc_fixedsize(tot_size) llmemory.raw_memclear(result, tot_size) result += gc_header_offset return result mh.ll_malloc_varsize_no_length = ll_malloc_varsize_no_length ll_malloc_varsize = mh.ll_malloc_varsize def ll_identityhash(addr): obj = llmemory.cast_adr_to_ptr(addr, HDRPTR) h = obj.hash if h == 0: obj.hash = h = llmemory.cast_adr_to_int(addr) return h if self.translator: self.increfptr = self.inittime_helper( ll_incref, [llmemory.Address], lltype.Void) self.decref_ptr = self.inittime_helper( ll_decref, [llmemory.Address, lltype.Ptr(ADDRESS_VOID_FUNC)], lltype.Void) self.decref_simple_ptr = self.inittime_helper( ll_decref_simple, [llmemory.Address], lltype.Void) self.no_pointer_dealloc_ptr = self.inittime_helper( ll_no_pointer_dealloc, [llmemory.Address], lltype.Void) self.malloc_fixedsize_ptr = self.inittime_helper( ll_malloc_fixedsize, [lltype.Signed], llmemory.Address) self.malloc_varsize_no_length_ptr = self.inittime_helper( ll_malloc_varsize_no_length, [lltype.Signed]*3, llmemory.Address) self.malloc_varsize_ptr = self.inittime_helper( ll_malloc_varsize, [lltype.Signed]*4, llmemory.Address) self.identityhash_ptr = self.inittime_helper( ll_identityhash, [llmemory.Address], lltype.Signed, inline=False) self.mixlevelannotator.finish() self.mixlevelannotator.backend_optimize() # cache graphs: self.decref_funcptrs = {} self.static_deallocator_funcptrs = {} self.dynamic_deallocator_funcptrs = {} self.queryptr2dynamic_deallocator_funcptr = {} def finish_helpers(self, **kwds): GCTransformer.finish_helpers(self, **kwds) from pypy.translator.backendopt.malloc import remove_mallocs seen = {} graphs = [] for fptr in self.static_deallocator_funcptrs.itervalues(): graph = fptr._obj.graph if graph in seen: continue seen[graph] = True graphs.append(graph) remove_mallocs(self.translator, graphs) def var_needs_set_transform(self, var): return var_needsgc(var) def push_alive_nopyobj(self, var, llops): v_adr = gen_cast(llops, llmemory.Address, var) llops.genop("direct_call", [self.increfptr, v_adr]) def pop_alive_nopyobj(self, var, llops): PTRTYPE = var.concretetype v_adr = gen_cast(llops, llmemory.Address, var) dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(PTRTYPE.TO) if dealloc_fptr is self.no_pointer_dealloc_ptr.value: # simple case llops.genop("direct_call", [self.decref_simple_ptr, v_adr]) else: cdealloc_fptr = rmodel.inputconst( lltype.typeOf(dealloc_fptr), dealloc_fptr) llops.genop("direct_call", [self.decref_ptr, v_adr, cdealloc_fptr]) def gct_fv_gc_malloc(self, hop, flags, TYPE, c_size): v_raw = hop.genop("direct_call", [self.malloc_fixedsize_ptr, c_size], resulttype=llmemory.Address) return v_raw def gct_fv_gc_malloc_varsize(self, hop, flags, TYPE, v_length, c_const_size, c_item_size, c_offset_to_length): if c_offset_to_length is None: v_raw = hop.genop("direct_call", [self.malloc_varsize_no_length_ptr, v_length, c_const_size, c_item_size], resulttype=llmemory.Address) else: v_raw = hop.genop("direct_call", [self.malloc_varsize_ptr, v_length, c_const_size, c_item_size, c_offset_to_length], resulttype=llmemory.Address) return v_raw def gct_gc_deallocate(self, hop): TYPE = hop.spaceop.args[0].value v_addr = hop.spaceop.args[1] dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(TYPE) cdealloc_fptr = rmodel.inputconst( lltype.typeOf(dealloc_fptr), dealloc_fptr) hop.genop("direct_call", [cdealloc_fptr, v_addr]) def consider_constant(self, TYPE, value): if value is not lltype.top_container(value): return if isinstance(TYPE, (lltype.GcStruct, lltype.GcArray)): p = value._as_ptr() if not self.gcheaderbuilder.get_header(p): hdr = self.gcheaderbuilder.new_header(p) hdr.refcount = sys.maxint // 2 hdr.hash = lltype.identityhash_nocache(p) def static_deallocation_funcptr_for_type(self, TYPE): if TYPE in self.static_deallocator_funcptrs: return self.static_deallocator_funcptrs[TYPE] #print_call_chain(self) if TYPE._gckind == 'cpy': return # you don't really have an RPython deallocator for PyObjects rtti = get_rtti(TYPE) if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'): destrptr = rtti._obj.destructor_funcptr DESTR_ARG = lltype.typeOf(destrptr).TO.ARGS[0] else: destrptr = None DESTR_ARG = None if destrptr is None and not find_gc_ptrs_in_type(TYPE): p = self.no_pointer_dealloc_ptr.value self.static_deallocator_funcptrs[TYPE] = p return p if destrptr is not None: body = '\n'.join(_static_deallocator_body_for_type('v', TYPE, 3)) src = """ def ll_deallocator(addr): exc_instance = llop.gc_fetch_exception(EXC_INSTANCE_TYPE) try: v = cast_adr_to_ptr(addr, PTR_TYPE) gcheader = cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) # refcount is at zero, temporarily bump it to 1: gcheader.refcount = 1 destr_v = cast_pointer(DESTR_ARG, v) ll_call_destructor(destrptr, destr_v) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: %s llop.%s_free(lltype.Void, addr) except: pass llop.gc_restore_exception(lltype.Void, exc_instance) pop_alive(exc_instance) # XXX layering of exceptiontransform versus gcpolicy """ % (body, TYPE._gckind) else: call_del = None body = '\n'.join(_static_deallocator_body_for_type('v', TYPE)) src = ('def ll_deallocator(addr):\n v = cast_adr_to_ptr(addr, PTR_TYPE)\n' + body + '\n llop.%s_free(lltype.Void, addr)\n' % (TYPE._gckind,)) d = {'pop_alive': LLTransformerOp(self.pop_alive), 'llop': llop, 'lltype': lltype, 'destrptr': destrptr, 'gc_header_offset': self.gcheaderbuilder.size_gc_header, 'cast_adr_to_ptr': llmemory.cast_adr_to_ptr, 'cast_pointer': lltype.cast_pointer, 'PTR_TYPE': lltype.Ptr(TYPE), 'DESTR_ARG': DESTR_ARG, 'EXC_INSTANCE_TYPE': self.translator.rtyper.exceptiondata.lltype_of_exception_value, 'll_call_destructor': ll_call_destructor, 'HDRPTR':lltype.Ptr(self.HDR)} exec src in d this = d['ll_deallocator'] fptr = self.annotate_finalizer(this, [llmemory.Address], lltype.Void) self.static_deallocator_funcptrs[TYPE] = fptr for p in find_gc_ptrs_in_type(TYPE): self.static_deallocation_funcptr_for_type(p.TO) return fptr def dynamic_deallocation_funcptr_for_type(self, TYPE): assert TYPE._gckind != 'cpy' if TYPE in self.dynamic_deallocator_funcptrs: return self.dynamic_deallocator_funcptrs[TYPE] #print_call_chain(self) rtti = get_rtti(TYPE) if rtti is None: p = self.static_deallocation_funcptr_for_type(TYPE) self.dynamic_deallocator_funcptrs[TYPE] = p return p queryptr = rtti._obj.query_funcptr if queryptr._obj in self.queryptr2dynamic_deallocator_funcptr: return self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] RTTI_PTR = lltype.Ptr(lltype.RuntimeTypeInfo) QUERY_ARG_TYPE = lltype.typeOf(queryptr).TO.ARGS[0] gc_header_offset = self.gcheaderbuilder.size_gc_header HDRPTR = lltype.Ptr(self.HDR) def ll_dealloc(addr): # bump refcount to 1 gcheader = llmemory.cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) gcheader.refcount = 1 v = llmemory.cast_adr_to_ptr(addr, QUERY_ARG_TYPE) rtti = queryptr(v) gcheader.refcount = 0 llop.gc_call_rtti_destructor(lltype.Void, rtti, addr) fptr = self.annotate_helper(ll_dealloc, [llmemory.Address], lltype.Void) self.dynamic_deallocator_funcptrs[TYPE] = fptr self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] = fptr return fptr def gct_gc_identityhash(self, hop): v_obj = hop.spaceop.args[0] v_adr = hop.genop("cast_ptr_to_adr", [v_obj], resulttype=llmemory.Address) hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result)
def __init__(self, gcdescr, translator, llop1=llop): from pypy.rpython.memory.gctypelayout import _check_typeid from pypy.rpython.memory.gcheader import GCHeaderBuilder from pypy.rpython.memory.gctransform import framework GcLLDescription.__init__(self, gcdescr, translator) assert self.translate_support_code, "required with the framework GC" self.translator = translator self.llop1 = llop1 # we need the hybrid GC for GcRefList.alloc_gcref_list() to work if gcdescr.config.translation.gc != 'hybrid': raise NotImplementedError("--gc=%s not implemented with the JIT" % (gcdescr.config.translation.gc, )) # to find roots in the assembler, make a GcRootMap name = gcdescr.config.translation.gcrootfinder try: cls = globals()['GcRootMap_' + name] except KeyError: raise NotImplementedError("--gcrootfinder=%s not implemented" " with the JIT" % (name, )) gcrootmap = cls() self.gcrootmap = gcrootmap self.gcrefs = GcRefList() self.single_gcref_descr = GcPtrFieldDescr(0) # make a TransformerLayoutBuilder and save it on the translator # where it can be fished and reused by the FrameworkGCTransformer self.layoutbuilder = framework.TransformerLayoutBuilder(translator) self.layoutbuilder.delay_encoding() self.translator._jit2gc = { 'layoutbuilder': self.layoutbuilder, 'gcmapstart': lambda: gcrootmap.gcmapstart(), 'gcmapend': lambda: gcrootmap.gcmapend(), } self.GCClass = self.layoutbuilder.GCClass self.moving_gc = self.GCClass.moving_gc self.HDRPTR = lltype.Ptr(self.GCClass.HDR) self.gcheaderbuilder = GCHeaderBuilder(self.HDRPTR.TO) (self.array_basesize, _, self.array_length_ofs) = \ symbolic.get_array_token(lltype.GcArray(lltype.Signed), True) min_ns = self.GCClass.TRANSLATION_PARAMS['min_nursery_size'] self.max_size_of_young_obj = self.GCClass.get_young_fixedsize(min_ns) # make a malloc function, with three arguments def malloc_basic(size, tid): type_id = llop.extract_ushort(rffi.USHORT, tid) has_finalizer = bool(tid & (1 << 16)) _check_typeid(type_id) try: res = llop1.do_malloc_fixedsize_clear(llmemory.GCREF, type_id, size, True, has_finalizer, False) except MemoryError: fatalerror("out of memory (from JITted code)") res = lltype.nullptr(llmemory.GCREF.TO) #llop.debug_print(lltype.Void, "\tmalloc_basic", size, type_id, # "-->", res) return res self.malloc_basic = malloc_basic self.GC_MALLOC_BASIC = lltype.Ptr( lltype.FuncType([lltype.Signed, lltype.Signed], llmemory.GCREF)) self.WB_FUNCPTR = lltype.Ptr( lltype.FuncType([llmemory.Address, llmemory.Address], lltype.Void)) self.write_barrier_descr = WriteBarrierDescr(self) # def malloc_array(itemsize, tid, num_elem): type_id = llop.extract_ushort(rffi.USHORT, tid) _check_typeid(type_id) try: return llop1.do_malloc_varsize_clear( llmemory.GCREF, type_id, num_elem, self.array_basesize, itemsize, self.array_length_ofs, True) except MemoryError: fatalerror("out of memory (from JITted code)") return lltype.nullptr(llmemory.GCREF.TO) self.malloc_array = malloc_array self.GC_MALLOC_ARRAY = lltype.Ptr( lltype.FuncType([lltype.Signed] * 3, llmemory.GCREF)) # (str_basesize, str_itemsize, str_ofs_length) = symbolic.get_array_token(rstr.STR, True) (unicode_basesize, unicode_itemsize, unicode_ofs_length) = symbolic.get_array_token(rstr.UNICODE, True) str_type_id = self.layoutbuilder.get_type_id(rstr.STR) unicode_type_id = self.layoutbuilder.get_type_id(rstr.UNICODE) # def malloc_str(length): try: return llop1.do_malloc_varsize_clear(llmemory.GCREF, str_type_id, length, str_basesize, str_itemsize, str_ofs_length, True) except MemoryError: fatalerror("out of memory (from JITted code)") return lltype.nullptr(llmemory.GCREF.TO) def malloc_unicode(length): try: return llop1.do_malloc_varsize_clear(llmemory.GCREF, unicode_type_id, length, unicode_basesize, unicode_itemsize, unicode_ofs_length, True) except MemoryError: fatalerror("out of memory (from JITted code)") return lltype.nullptr(llmemory.GCREF.TO) self.malloc_str = malloc_str self.malloc_unicode = malloc_unicode self.GC_MALLOC_STR_UNICODE = lltype.Ptr( lltype.FuncType([lltype.Signed], llmemory.GCREF)) def malloc_fixedsize_slowpath(size): try: gcref = llop1.do_malloc_fixedsize_clear( llmemory.GCREF, 0, size, True, False, False) except MemoryError: fatalerror("out of memory (from JITted code)") return r_ulonglong(0) res = rffi.cast(lltype.Signed, gcref) nurs_free = llop1.gc_adr_of_nursery_free( llmemory.Address).signed[0] return r_ulonglong(nurs_free) << 32 | r_ulonglong(r_uint(res)) self.malloc_fixedsize_slowpath = malloc_fixedsize_slowpath self.MALLOC_FIXEDSIZE_SLOWPATH = lltype.FuncType( [lltype.Signed], lltype.UnsignedLongLong)
class RefcountingGCTransformer(GCTransformer): HDR = lltype.Struct("header", ("refcount", lltype.Signed)) def __init__(self, translator): super(RefcountingGCTransformer, self).__init__(translator, inline=True) self.gcheaderbuilder = GCHeaderBuilder(self.HDR) gc_header_offset = self.gcheaderbuilder.size_gc_header self.deallocator_graphs_needing_transforming = [] # create incref, etc graph memoryError = MemoryError() HDRPTR = lltype.Ptr(self.HDR) def ll_incref(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) gcheader.refcount = gcheader.refcount + 1 def ll_decref(adr, dealloc): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: dealloc(adr) def ll_decref_simple(adr): if adr: gcheader = llmemory.cast_adr_to_ptr(adr - gc_header_offset, HDRPTR) refcount = gcheader.refcount - 1 if refcount == 0: llop.gc_free(lltype.Void, adr) else: gcheader.refcount = refcount def ll_no_pointer_dealloc(adr): llop.gc_free(lltype.Void, adr) def ll_malloc_fixedsize(size): size = gc_header_offset + size result = lladdress.raw_malloc(size) if not result: raise memoryError lladdress.raw_memclear(result, size) result += gc_header_offset return result def ll_malloc_varsize_no_length(length, size, itemsize): try: fixsize = gc_header_offset + size varsize = ovfcheck(itemsize * length) tot_size = ovfcheck(fixsize + varsize) except OverflowError: raise memoryError result = lladdress.raw_malloc(tot_size) if not result: raise memoryError lladdress.raw_memclear(result, tot_size) result += gc_header_offset return result def ll_malloc_varsize(length, size, itemsize, lengthoffset): result = ll_malloc_varsize_no_length(length, size, itemsize) (result + lengthoffset).signed[0] = length return result if self.translator: self.increfptr = self.inittime_helper( ll_incref, [llmemory.Address], lltype.Void) self.decref_ptr = self.inittime_helper( ll_decref, [llmemory.Address, lltype.Ptr(ADDRESS_VOID_FUNC)], lltype.Void) self.decref_simple_ptr = self.inittime_helper( ll_decref_simple, [llmemory.Address], lltype.Void) self.no_pointer_dealloc_ptr = self.inittime_helper( ll_no_pointer_dealloc, [llmemory.Address], lltype.Void) self.malloc_fixedsize_ptr = self.inittime_helper( ll_malloc_fixedsize, [lltype.Signed], llmemory.Address) self.malloc_varsize_no_length_ptr = self.inittime_helper( ll_malloc_varsize_no_length, [lltype.Signed]*3, llmemory.Address) self.malloc_varsize_ptr = self.inittime_helper( ll_malloc_varsize, [lltype.Signed]*4, llmemory.Address) self.mixlevelannotator.finish() # for now # cache graphs: self.decref_funcptrs = {} self.static_deallocator_funcptrs = {} self.dynamic_deallocator_funcptrs = {} self.queryptr2dynamic_deallocator_funcptr = {} def var_needs_set_transform(self, var): return var_needsgc(var) def push_alive_nopyobj(self, var, llops): v_adr = gen_cast(llops, llmemory.Address, var) llops.genop("direct_call", [self.increfptr, v_adr]) def pop_alive_nopyobj(self, var, llops): PTRTYPE = var.concretetype v_adr = gen_cast(llops, llmemory.Address, var) dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(PTRTYPE.TO) if dealloc_fptr is self.no_pointer_dealloc_ptr.value: # simple case llops.genop("direct_call", [self.decref_simple_ptr, v_adr]) else: cdealloc_fptr = rmodel.inputconst( lltype.typeOf(dealloc_fptr), dealloc_fptr) llops.genop("direct_call", [self.decref_ptr, v_adr, cdealloc_fptr]) def gct_gc_protect(self, hop): """ protect this object from gc (make it immortal) """ self.push_alive(hop.spaceop.args[0]) def gct_gc_unprotect(self, hop): """ get this object back into gc control """ self.pop_alive(hop.spaceop.args[0]) def gct_malloc(self, hop): TYPE = hop.spaceop.result.concretetype.TO assert not TYPE._is_varsize() c_size = rmodel.inputconst(lltype.Signed, llmemory.sizeof(TYPE)) v_raw = hop.genop("direct_call", [self.malloc_fixedsize_ptr, c_size], resulttype=llmemory.Address) hop.cast_result(v_raw) gct_zero_malloc = gct_malloc def gct_malloc_varsize(self, hop): def intconst(c): return rmodel.inputconst(lltype.Signed, c) op = hop.spaceop TYPE = op.result.concretetype.TO assert TYPE._is_varsize() if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] else: ARRAY = TYPE assert isinstance(ARRAY, lltype.Array) if ARRAY._hints.get('isrpystring', False): c_const_size = intconst(llmemory.sizeof(TYPE, 1)) else: c_const_size = intconst(llmemory.sizeof(TYPE, 0)) c_item_size = intconst(llmemory.sizeof(ARRAY.OF)) if ARRAY._hints.get("nolength", False): v_raw = hop.genop("direct_call", [self.malloc_varsize_no_length_ptr, op.args[-1], c_const_size, c_item_size], resulttype=llmemory.Address) else: if isinstance(TYPE, lltype.Struct): offset_to_length = llmemory.FieldOffset(TYPE, TYPE._arrayfld) + \ llmemory.ArrayLengthOffset(ARRAY) else: offset_to_length = llmemory.ArrayLengthOffset(ARRAY) v_raw = hop.genop("direct_call", [self.malloc_varsize_ptr, op.args[-1], c_const_size, c_item_size, intconst(offset_to_length)], resulttype=llmemory.Address) hop.cast_result(v_raw) gct_zero_malloc_varsize = gct_malloc_varsize def gct_gc_deallocate(self, hop): TYPE = hop.spaceop.args[0].value v_addr = hop.spaceop.args[1] dealloc_fptr = self.dynamic_deallocation_funcptr_for_type(TYPE) cdealloc_fptr = rmodel.inputconst( lltype.typeOf(dealloc_fptr), dealloc_fptr) hop.genop("direct_call", [cdealloc_fptr, v_addr]) def consider_constant(self, TYPE, value): if value is not lltype.top_container(value): return if isinstance(TYPE, (lltype.GcStruct, lltype.GcArray)): p = value._as_ptr() if not self.gcheaderbuilder.get_header(p): hdr = self.gcheaderbuilder.new_header(p) hdr.refcount = sys.maxint // 2 def static_deallocation_funcptr_for_type(self, TYPE): if TYPE in self.static_deallocator_funcptrs: return self.static_deallocator_funcptrs[TYPE] #print_call_chain(self) rtti = get_rtti(TYPE) if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'): destrptr = rtti._obj.destructor_funcptr DESTR_ARG = lltype.typeOf(destrptr).TO.ARGS[0] else: destrptr = None DESTR_ARG = None if destrptr is None and not find_gc_ptrs_in_type(TYPE): #print repr(TYPE)[:80], 'is dealloc easy' p = self.no_pointer_dealloc_ptr.value self.static_deallocator_funcptrs[TYPE] = p return p if destrptr is not None: body = '\n'.join(_static_deallocator_body_for_type('v', TYPE, 3)) src = """ def ll_deallocator(addr): exc_instance = llop.gc_fetch_exception(EXC_INSTANCE_TYPE) try: v = cast_adr_to_ptr(addr, PTR_TYPE) gcheader = cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) # refcount is at zero, temporarily bump it to 1: gcheader.refcount = 1 destr_v = cast_pointer(DESTR_ARG, v) ll_call_destructor(destrptr, destr_v) refcount = gcheader.refcount - 1 gcheader.refcount = refcount if refcount == 0: %s llop.gc_free(lltype.Void, addr) except: pass llop.gc_restore_exception(lltype.Void, exc_instance) pop_alive(exc_instance) # XXX layering of exceptiontransform versus gcpolicy """ % (body, ) else: call_del = None body = '\n'.join(_static_deallocator_body_for_type('v', TYPE)) src = ('def ll_deallocator(addr):\n v = cast_adr_to_ptr(addr, PTR_TYPE)\n' + body + '\n llop.gc_free(lltype.Void, addr)\n') d = {'pop_alive': LLTransformerOp(self.pop_alive), 'llop': llop, 'lltype': lltype, 'destrptr': destrptr, 'gc_header_offset': self.gcheaderbuilder.size_gc_header, 'cast_adr_to_ptr': llmemory.cast_adr_to_ptr, 'cast_pointer': lltype.cast_pointer, 'PTR_TYPE': lltype.Ptr(TYPE), 'DESTR_ARG': DESTR_ARG, 'EXC_INSTANCE_TYPE': self.translator.rtyper.exceptiondata.lltype_of_exception_value, 'll_call_destructor': ll_call_destructor, 'HDRPTR':lltype.Ptr(self.HDR)} exec src in d this = d['ll_deallocator'] fptr = self.annotate_helper(this, [llmemory.Address], lltype.Void) self.static_deallocator_funcptrs[TYPE] = fptr for p in find_gc_ptrs_in_type(TYPE): self.static_deallocation_funcptr_for_type(p.TO) return fptr def dynamic_deallocation_funcptr_for_type(self, TYPE): if TYPE in self.dynamic_deallocator_funcptrs: return self.dynamic_deallocator_funcptrs[TYPE] #print_call_chain(self) rtti = get_rtti(TYPE) if rtti is None: p = self.static_deallocation_funcptr_for_type(TYPE) self.dynamic_deallocator_funcptrs[TYPE] = p return p queryptr = rtti._obj.query_funcptr if queryptr._obj in self.queryptr2dynamic_deallocator_funcptr: return self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] RTTI_PTR = lltype.Ptr(lltype.RuntimeTypeInfo) QUERY_ARG_TYPE = lltype.typeOf(queryptr).TO.ARGS[0] gc_header_offset = self.gcheaderbuilder.size_gc_header HDRPTR = lltype.Ptr(self.HDR) def ll_dealloc(addr): # bump refcount to 1 gcheader = llmemory.cast_adr_to_ptr(addr - gc_header_offset, HDRPTR) gcheader.refcount = 1 v = llmemory.cast_adr_to_ptr(addr, QUERY_ARG_TYPE) rtti = queryptr(v) gcheader.refcount = 0 llop.gc_call_rtti_destructor(lltype.Void, rtti, addr) fptr = self.annotate_helper(ll_dealloc, [llmemory.Address], lltype.Void) self.dynamic_deallocator_funcptrs[TYPE] = fptr self.queryptr2dynamic_deallocator_funcptr[queryptr._obj] = fptr return fptr