def define_custom_trace(cls): from rpython.rtyper.annlowlevel import llhelper # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s = lltype.nullptr(S) for i in range(10000): t = lltype.malloc(S) t.x = llmemory.cast_ptr_to_adr(s) s = t return s def measure_length(s): res = 0 while s: res += 1 s = llmemory.cast_adr_to_ptr(s.x, lltype.Ptr(S)) return res def f(n): s1 = setup() llop.gc__collect(lltype.Void) return measure_length(s1) return f
def _setup_repr_final(self): self._setup_immutable_field_list() self._check_for_immutable_conflicts() if self.gcflavor == 'gc': if (self.classdef is not None and self.classdef.classdesc.lookup('__del__') is not None): s_func = self.classdef.classdesc.s_read_attribute('__del__') source_desc = self.classdef.classdesc.lookup('__del__') source_classdef = source_desc.getclassdef(None) source_repr = getinstancerepr(self.rtyper, source_classdef) assert len(s_func.descriptions) == 1 funcdesc, = s_func.descriptions graph = funcdesc.getuniquegraph() self.check_graph_of_del_does_not_call_too_much( self.rtyper, graph) FUNCTYPE = FuncType([Ptr(source_repr.object_type)], Void) destrptr = functionptr(FUNCTYPE, graph.name, graph=graph, _callable=graph.func) else: destrptr = None self.rtyper.call_all_setups() # compute ForwardReferences now args_s = [SomePtr(Ptr(OBJECT))] graph = self.rtyper.annotate_helper(ll_runtime_type_info, args_s) s = self.rtyper.annotation(graph.getreturnvar()) if (not isinstance(s, SomePtr) or s.ll_ptrtype != Ptr(RuntimeTypeInfo)): raise TyperError("runtime type info function returns %r, " "expected Ptr(RuntimeTypeInfo)" % (s)) funcptr = self.rtyper.getcallable(graph) attachRuntimeTypeInfo(self.object_type, funcptr, destrptr) vtable = self.rclass.getvtable() self.rtyper.set_type_for_typeptr(vtable, self.lowleveltype.TO)
def _setup_repr_final(self): self._setup_immutable_field_list() self._check_for_immutable_conflicts() if self.gcflavor == 'gc': if (self.classdef is not None and self.classdef.classdesc.lookup('__del__') is not None): s_func = self.classdef.classdesc.s_read_attribute('__del__') source_desc = self.classdef.classdesc.lookup('__del__') source_classdef = source_desc.getclassdef(None) source_repr = getinstancerepr(self.rtyper, source_classdef) assert len(s_func.descriptions) == 1 funcdesc, = s_func.descriptions graph = funcdesc.getuniquegraph() self.check_graph_of_del_does_not_call_too_much(graph) FUNCTYPE = FuncType([Ptr(source_repr.object_type)], Void) destrptr = functionptr(FUNCTYPE, graph.name, graph=graph, _callable=graph.func) else: destrptr = None self.rtyper.call_all_setups() # compute ForwardReferences now args_s = [SomePtr(Ptr(OBJECT))] graph = self.rtyper.annotate_helper(ll_runtime_type_info, args_s) s = self.rtyper.annotation(graph.getreturnvar()) if (not isinstance(s, SomePtr) or s.ll_ptrtype != Ptr(RuntimeTypeInfo)): raise TyperError("runtime type info function returns %r, " "expected Ptr(RuntimeTypeInfo)" % (s)) funcptr = self.rtyper.getcallable(graph) attachRuntimeTypeInfo(self.object_type, funcptr, destrptr) vtable = self.rclass.getvtable() self.rtyper.set_type_for_typeptr(vtable, self.lowleveltype.TO)
def define_custom_trace(cls): from rpython.rtyper.annlowlevel import llhelper from rpython.rtyper.lltypesystem import llmemory # S = lltype.GcStruct('S', ('x', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 4243 s1.x = llmemory.cast_ptr_to_adr(tx) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(s1.x, lltype.Ptr(T)).z return f
def complete_destrptr(gctransformer): translator = gctransformer.translator mixlevelannotator = MixLevelHelperAnnotator(translator.rtyper) args_s = [lltype_to_annotation(STACKLET_PTR)] s_result = annmodel.s_None destrptr = mixlevelannotator.delayedfunction(stacklet_destructor, args_s, s_result) mixlevelannotator.finish() lltype.attachRuntimeTypeInfo(STACKLET, destrptr=destrptr)
def specialize_call(self, hop): from rpython.rtyper.llannotation import SomePtr TP = hop.args_s[0].const lambda_func = hop.args_s[1].const ll_func = lambda_func() args_s = [SomePtr(lltype.Ptr(TP))] funcptr = hop.rtyper.annotate_helper_fn(ll_func, args_s) hop.exception_cannot_occur() lltype.attachRuntimeTypeInfo(TP, destrptr=funcptr)
def test_caching_dynamic_deallocator(): S = lltype.GcStruct("S", ('x', lltype.Signed), rtti=True) S1 = lltype.GcStruct("S1", ('s', S), ('y', lltype.Signed), rtti=True) T = lltype.GcStruct("T", ('x', lltype.Signed), rtti=True) def f_S(s): s.x = 1 def f_S1(s1): s1.s.x = 1 s1.y = 2 def f_T(s): s.x = 1 def type_info_S(p): return lltype.getRuntimeTypeInfo(S) def type_info_T(p): return lltype.getRuntimeTypeInfo(T) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Ptr( lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_S) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f_S) pinf = lltype.attachRuntimeTypeInfo(S, qp, destrptr=dp) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f_S1) pinf = lltype.attachRuntimeTypeInfo(S1, qp, destrptr=dp) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(T)], lltype.Ptr( lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_T) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(T)], lltype.Void), "destructor_funcptr", _callable=f_T) pinf = lltype.attachRuntimeTypeInfo(T, qp, destrptr=dp) def f(): pass t = TranslationContext() t.buildannotator().build_types(f, []) t.buildrtyper().specialize() transformer = RefcountingGCTransformer(t) p_S = transformer.dynamic_deallocation_funcptr_for_type(S) p_S1 = transformer.dynamic_deallocation_funcptr_for_type(S1) p_T = transformer.dynamic_deallocation_funcptr_for_type(T) assert p_S is not p_T assert p_S is p_S1
def get_shadowstackref(root_walker, gctransformer): if hasattr(gctransformer, '_SHADOWSTACKREF'): return gctransformer._SHADOWSTACKREF SHADOWSTACKREFPTR = lltype.Ptr(lltype.GcForwardReference()) SHADOWSTACKREF = lltype.GcStruct('ShadowStackRef', ('base', llmemory.Address), ('top', llmemory.Address), ('context', llmemory.Address), #('fullstack', lltype.Bool), rtti=True) SHADOWSTACKREFPTR.TO.become(SHADOWSTACKREF) gc = gctransformer.gcdata.gc root_iterator = get_root_iterator(gctransformer) def customtrace(obj, prev): obj = llmemory.cast_adr_to_ptr(obj, SHADOWSTACKREFPTR) if not prev: root_iterator.setcontext(obj.context) prev = obj.top return root_iterator.nextleft(gc, obj.base, prev) CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) def shadowstack_destructor(shadowstackref): if root_walker.stacklet_support: from rpython.rlib import _rffi_stacklet as _c h = shadowstackref.context h = llmemory.cast_adr_to_ptr(h, _c.handle) shadowstackref.context = llmemory.NULL # base = shadowstackref.base shadowstackref.base = llmemory.NULL shadowstackref.top = llmemory.NULL llmemory.raw_free(base) # if root_walker.stacklet_support: if h: _c.destroy(h) destrptr = gctransformer.annotate_helper(shadowstack_destructor, [SHADOWSTACKREFPTR], lltype.Void) lltype.attachRuntimeTypeInfo(SHADOWSTACKREF, customtraceptr=customtraceptr, destrptr=destrptr) gctransformer._SHADOWSTACKREF = SHADOWSTACKREF return SHADOWSTACKREF
def get_shadowstackref(root_walker, gctransformer): if hasattr(gctransformer, '_SHADOWSTACKREF'): return gctransformer._SHADOWSTACKREF SHADOWSTACKREFPTR = lltype.Ptr(lltype.GcForwardReference()) SHADOWSTACKREF = lltype.GcStruct( 'ShadowStackRef', ('base', llmemory.Address), ('top', llmemory.Address), ('context', llmemory.Address), #('fullstack', lltype.Bool), rtti=True) SHADOWSTACKREFPTR.TO.become(SHADOWSTACKREF) def customtrace(gc, obj, callback, arg): obj = llmemory.cast_adr_to_ptr(obj, SHADOWSTACKREFPTR) addr = obj.top start = obj.base while addr != start: addr -= sizeofaddr gc._trace_callback(callback, arg, addr) gc = gctransformer.gcdata.gc assert not hasattr(gc, 'custom_trace_dispatcher') # ^^^ create_custom_trace_funcs() must not run before this gctransformer.translator.rtyper.custom_trace_funcs.append( (SHADOWSTACKREF, customtrace)) def shadowstack_destructor(shadowstackref): if root_walker.stacklet_support: from rpython.rlib import _rffi_stacklet as _c h = shadowstackref.context h = llmemory.cast_adr_to_ptr(h, _c.handle) shadowstackref.context = llmemory.NULL # base = shadowstackref.base shadowstackref.base = llmemory.NULL shadowstackref.top = llmemory.NULL llmemory.raw_free(base) # if root_walker.stacklet_support: if h: _c.destroy(h) destrptr = gctransformer.annotate_helper(shadowstack_destructor, [SHADOWSTACKREFPTR], lltype.Void) lltype.attachRuntimeTypeInfo(SHADOWSTACKREF, destrptr=destrptr) gctransformer._SHADOWSTACKREF = SHADOWSTACKREF return SHADOWSTACKREF
def attachRuntimeTypeInfoFunc(self, GCSTRUCT, func, ARG_GCSTRUCT=None, destrptr=None): self.call_all_setups() # compute ForwardReferences now if ARG_GCSTRUCT is None: ARG_GCSTRUCT = GCSTRUCT args_s = [annmodel.SomePtr(Ptr(ARG_GCSTRUCT))] graph = self.annotate_helper(func, args_s) s = self.annotator.binding(graph.getreturnvar()) if (not isinstance(s, annmodel.SomePtr) or s.ll_ptrtype != Ptr(RuntimeTypeInfo)): raise TyperError("runtime type info function %r returns %r, " "excepted Ptr(RuntimeTypeInfo)" % (func, s)) funcptr = self.getcallable(graph) attachRuntimeTypeInfo(GCSTRUCT, funcptr, destrptr, None)
def get_shadowstackref(root_walker, gctransformer): if hasattr(gctransformer, '_SHADOWSTACKREF'): return gctransformer._SHADOWSTACKREF SHADOWSTACKREFPTR = lltype.Ptr(lltype.GcForwardReference()) SHADOWSTACKREF = lltype.GcStruct('ShadowStackRef', ('base', llmemory.Address), ('top', llmemory.Address), ('context', llmemory.Address), #('fullstack', lltype.Bool), rtti=True) SHADOWSTACKREFPTR.TO.become(SHADOWSTACKREF) def customtrace(gc, obj, callback, arg): obj = llmemory.cast_adr_to_ptr(obj, SHADOWSTACKREFPTR) addr = obj.top start = obj.base while addr != start: addr -= sizeofaddr gc._trace_callback(callback, arg, addr) gc = gctransformer.gcdata.gc assert not hasattr(gc, 'custom_trace_dispatcher') # ^^^ create_custom_trace_funcs() must not run before this gctransformer.translator.rtyper.custom_trace_funcs.append( (SHADOWSTACKREF, customtrace)) def shadowstack_destructor(shadowstackref): if root_walker.stacklet_support: from rpython.rlib import _rffi_stacklet as _c h = shadowstackref.context h = llmemory.cast_adr_to_ptr(h, _c.handle) shadowstackref.context = llmemory.NULL # base = shadowstackref.base shadowstackref.base = llmemory.NULL shadowstackref.top = llmemory.NULL llmemory.raw_free(base) # if root_walker.stacklet_support: if h: _c.destroy(h) destrptr = gctransformer.annotate_helper(shadowstack_destructor, [SHADOWSTACKREFPTR], lltype.Void) lltype.attachRuntimeTypeInfo(SHADOWSTACKREF, destrptr=destrptr) gctransformer._SHADOWSTACKREF = SHADOWSTACKREF return SHADOWSTACKREF
def test_caching_dynamic_deallocator(): S = lltype.GcStruct("S", ('x', lltype.Signed), rtti=True) S1 = lltype.GcStruct("S1", ('s', S), ('y', lltype.Signed), rtti=True) T = lltype.GcStruct("T", ('x', lltype.Signed), rtti=True) def f_S(s): s.x = 1 def f_S1(s1): s1.s.x = 1 s1.y = 2 def f_T(s): s.x = 1 def type_info_S(p): return lltype.getRuntimeTypeInfo(S) def type_info_T(p): return lltype.getRuntimeTypeInfo(T) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Ptr(lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_S) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f_S) pinf = lltype.attachRuntimeTypeInfo(S, qp, destrptr=dp) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f_S1) pinf = lltype.attachRuntimeTypeInfo(S1, qp, destrptr=dp) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(T)], lltype.Ptr(lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_T) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(T)], lltype.Void), "destructor_funcptr", _callable=f_T) pinf = lltype.attachRuntimeTypeInfo(T, qp, destrptr=dp) def f(): pass t = TranslationContext() t.buildannotator().build_types(f, []) t.buildrtyper().specialize() transformer = RefcountingGCTransformer(t) p_S = transformer.dynamic_deallocation_funcptr_for_type(S) p_S1 = transformer.dynamic_deallocation_funcptr_for_type(S1) p_T = transformer.dynamic_deallocation_funcptr_for_type(T) assert p_S is not p_T assert p_S is p_S1
def attachRuntimeTypeInfoFunc(self, GCSTRUCT, func, ARG_GCSTRUCT=None, destrptr=None): self.call_all_setups() # compute ForwardReferences now if ARG_GCSTRUCT is None: ARG_GCSTRUCT = GCSTRUCT args_s = [SomePtr(Ptr(ARG_GCSTRUCT))] graph = self.annotate_helper(func, args_s) s = self.annotation(graph.getreturnvar()) if (not isinstance(s, SomePtr) or s.ll_ptrtype != Ptr(RuntimeTypeInfo)): raise TyperError("runtime type info function %r returns %r, " "excepted Ptr(RuntimeTypeInfo)" % (func, s)) funcptr = self.getcallable(graph) attachRuntimeTypeInfo(GCSTRUCT, funcptr, destrptr)
def test_boehm_finalizer___del__(): S = lltype.GcStruct("S", ('x', lltype.Signed), rtti=True) def f(s): s.x = 1 def type_info_S(p): return lltype.getRuntimeTypeInfo(S) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Ptr(lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_S) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f) lltype.attachRuntimeTypeInfo(S, qp, destrptr=dp) f, t = make_boehm_finalizer(S) assert f is not None
def test_malloc_new_with_destructor(): vtable = lltype.malloc(rclass.OBJECT_VTABLE, immortal=True) S = lltype.GcStruct("S", ("parent", rclass.OBJECT), rtti=True) DESTRUCTOR = lltype.FuncType([lltype.Ptr(S)], lltype.Void) destructor = lltype.functionptr(DESTRUCTOR, "destructor") lltype.attachRuntimeTypeInfo(S, destrptr=destructor) heaptracker.set_testing_vtable_for_gcstruct(S, vtable, "S") v = varoftype(lltype.Ptr(S)) op = SpaceOperation("malloc", [Constant(S, lltype.Void), Constant({"flavor": "gc"}, lltype.Void)], v) tr = Transformer(FakeCPU(), FakeResidualCallControl()) oplist = tr.rewrite_operation(op) op0, op1 = oplist assert op0.opname == "residual_call_r_r" assert op0.args[0].value == "alloc_with_del" # pseudo-function as a str assert list(op0.args[1]) == [] assert op1.opname == "-live-" assert op1.args == []
def get_shadowstackref(root_walker, gctransformer): if hasattr(gctransformer, '_SHADOWSTACKREF'): return gctransformer._SHADOWSTACKREF SHADOWSTACKREFPTR = lltype.Ptr(lltype.GcForwardReference()) SHADOWSTACKREF = lltype.GcStruct('ShadowStackRef', ('base', llmemory.Address), ('top', llmemory.Address), rtti=True) SHADOWSTACKREFPTR.TO.become(SHADOWSTACKREF) def customtrace(gc, obj, callback, arg1, arg2): obj = llmemory.cast_adr_to_ptr(obj, SHADOWSTACKREFPTR) walk_stack_root(gc._trace_callback, callback, arg1, arg2, obj.base, obj.top, is_minor=False) # xxx optimize? gc = gctransformer.gcdata.gc assert not hasattr(gc, 'custom_trace_dispatcher') # ^^^ create_custom_trace_funcs() must not run before this gctransformer.translator.rtyper.custom_trace_funcs.append( (SHADOWSTACKREF, customtrace)) def shadowstack_destructor(shadowstackref): base = shadowstackref.base shadowstackref.base = llmemory.NULL shadowstackref.top = llmemory.NULL llmemory.raw_free(base) destrptr = gctransformer.annotate_helper(shadowstack_destructor, [SHADOWSTACKREFPTR], lltype.Void) lltype.attachRuntimeTypeInfo(SHADOWSTACKREF, destrptr=destrptr) gctransformer._SHADOWSTACKREF = SHADOWSTACKREF return SHADOWSTACKREF
def test_custom_trace(self): from rpython.rtyper.annlowlevel import llhelper from rpython.rtyper.lltypesystem import llmemory from rpython.rtyper.lltypesystem.llarena import ArenaError # S = lltype.GcStruct('S', ('x', llmemory.Address), ('y', llmemory.Address), rtti=True) T = lltype.GcStruct('T', ('z', lltype.Signed)) offset_of_x = llmemory.offsetof(S, 'x') def customtrace(obj, prev): if not prev: return obj + offset_of_x else: return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(S, customtraceptr=customtraceptr) # for attrname in ['x', 'y']: def setup(): s1 = lltype.malloc(S) tx = lltype.malloc(T) tx.z = 42 ty = lltype.malloc(T) s1.x = llmemory.cast_ptr_to_adr(tx) s1.y = llmemory.cast_ptr_to_adr(ty) return s1 def f(): s1 = setup() llop.gc__collect(lltype.Void) return llmemory.cast_adr_to_ptr(getattr(s1, attrname), lltype.Ptr(T)) if attrname == 'x': res = self.interpret(f, []) assert res.z == 42 else: py.test.raises((RuntimeError, ArenaError), self.interpret, f, [])
def test_deallocator_with_destructor(): S = lltype.GcStruct("S", ('x', lltype.Signed), rtti=True) def f(s): s.x = 1 def type_info_S(p): return lltype.getRuntimeTypeInfo(S) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Ptr(lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_S) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f) pinf = lltype.attachRuntimeTypeInfo(S, qp, destrptr=dp) graph, t = make_deallocator(S)
def test_deallocator_with_destructor(): S = lltype.GcStruct("S", ('x', lltype.Signed), rtti=True) def f(s): s.x = 1 def type_info_S(p): return lltype.getRuntimeTypeInfo(S) qp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Ptr( lltype.RuntimeTypeInfo)), "type_info_S", _callable=type_info_S) dp = lltype.functionptr(lltype.FuncType([lltype.Ptr(S)], lltype.Void), "destructor_funcptr", _callable=f) pinf = lltype.attachRuntimeTypeInfo(S, qp, destrptr=dp) graph, t = make_deallocator(S)
def suspstack_destructor(suspstack): h = suspstack.handle if h: _c.destroy(h) SUSPSTACK = lltype.GcStruct('SuspStack', ('handle', _c.handle), ('anchor', llmemory.Address), ('callback_pieces', llmemory.Address), rtti=True) NULL_SUSPSTACK = lltype.nullptr(SUSPSTACK) CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) customtraceptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), customtrace) lltype.attachRuntimeTypeInfo(SUSPSTACK, customtraceptr=customtraceptr) ASM_FRAMEDATA_HEAD_PTR = lltype.Ptr(lltype.ForwardReference()) ASM_FRAMEDATA_HEAD_PTR.TO.become(lltype.Struct('ASM_FRAMEDATA_HEAD', ('prev', ASM_FRAMEDATA_HEAD_PTR), ('next', ASM_FRAMEDATA_HEAD_PTR) )) alternateanchor = lltype.malloc(ASM_FRAMEDATA_HEAD_PTR.TO, immortal=True) alternateanchor.prev = alternateanchor alternateanchor.next = alternateanchor FUNCNOARG_P = lltype.Ptr(lltype.FuncType([], _c.handle)) pypy_asm_stackwalk2 = rffi.llexternal('pypy_asm_stackwalk', [FUNCNOARG_P, ASM_FRAMEDATA_HEAD_PTR],
no += 1 state = 0 break # next iteration of the outermost loop else: # found it index = no * SIZEOFSIGNED * 8 + state # save new state state += 1 if state == MAX: no += 1 state = 0 if IS_32BIT: new_state = state | (no << 5) else: new_state = state | (no << 6) (obj_addr + getofs('jf_gc_trace_state')).signed[0] = new_state # sanity check frame_lgt = (obj_addr + getofs('jf_frame') + LENGTHOFS).signed[0] ll_assert(index < frame_lgt, "bogus frame field get") return (obj_addr + getofs('jf_frame') + BASEITEMOFS + SIGN_SIZE * (index)) return llmemory.NULL CUSTOMTRACEFUNC = lltype.FuncType([llmemory.Address, llmemory.Address], llmemory.Address) jitframe_trace_ptr = llhelper(lltype.Ptr(CUSTOMTRACEFUNC), jitframe_trace) lltype.attachRuntimeTypeInfo(JITFRAME, customtraceptr=jitframe_trace_ptr) JITFRAMEPTR = lltype.Ptr(JITFRAME)
def get_shadowstackref(root_walker, gctransformer): if hasattr(gctransformer, '_SHADOWSTACKREF'): return gctransformer._SHADOWSTACKREF # Helpers to same virtual address space by limiting to MAX the # number of full shadow stacks. If there are more, we compact # them into a separately-allocated zone of memory of just the right # size. See the comments in the definition of fullstack_cache below. def ll_prepare_free_slot(_unused): """Free up a slot in the array of MAX entries, ready for storing a new shadowstackref. Return the memory of the now-unused full shadowstack. """ index = fullstack_cache[0] if index > 0: return llmemory.NULL # there is already at least one free slot # # make a compact copy in one old entry and return the # original full-sized memory index = -index ll_assert(index > 0, "prepare_free_slot: cache[0] == 0") compacting = lltype.cast_int_to_ptr(SHADOWSTACKREFPTR, fullstack_cache[index]) index += 1 if index >= ShadowStackPool.MAX: index = 1 fullstack_cache[0] = -index # update to the next value in order # compacting.detach() original = compacting.base size = compacting.top - original new = llmemory.raw_malloc(size) if new == llmemory.NULL: return llmemory.NULL llmemory.raw_memcopy(original, new, size) compacting.base = new compacting.top = new + size return original def ll_attach(shadowstackref): """After prepare_free_slot(), store a shadowstackref in that slot.""" index = fullstack_cache[0] ll_assert(index > 0, "fullstack attach: no free slot") fullstack_cache[0] = fullstack_cache[index] fullstack_cache[index] = lltype.cast_ptr_to_int(shadowstackref) ll_assert(shadowstackref.fsindex == 0, "fullstack attach: already one?") shadowstackref.fsindex = index # > 0 def ll_detach(shadowstackref): """Detach a shadowstackref from the array of MAX entries.""" index = shadowstackref.fsindex ll_assert(index > 0, "detach: unattached shadowstackref") ll_assert(fullstack_cache[index] == lltype.cast_ptr_to_int(shadowstackref), "detach: bad fullstack_cache") shadowstackref.fsindex = 0 fullstack_cache[index] = fullstack_cache[0] fullstack_cache[0] = index def ll_rebuild(shadowstackref, fullstack_base): if shadowstackref.fsindex > 0: shadowstackref.detach() return fullstack_base else: # make an expanded copy of the compact shadowstack stored in # 'shadowstackref' and free that compact = shadowstackref.base size = shadowstackref.top - compact shadowstackref.base = fullstack_base shadowstackref.top = fullstack_base + size llmemory.raw_memcopy(compact, fullstack_base, size) llmemory.raw_free(compact) return llmemory.NULL SHADOWSTACKREFPTR = lltype.Ptr(lltype.GcForwardReference()) SHADOWSTACKREF = lltype.GcStruct('ShadowStackRef', ('base', llmemory.Address), ('top', llmemory.Address), ('context', llmemory.Address), ('fsindex', lltype.Signed), rtti=True, adtmeths={'prepare_free_slot': ll_prepare_free_slot, 'attach': ll_attach, 'detach': ll_detach, 'rebuild': ll_rebuild}) SHADOWSTACKREFPTR.TO.become(SHADOWSTACKREF) # Items 1..MAX-1 of the following array can be SHADOWSTACKREF # addresses cast to integer. Or, they are small numbers and they # make up a free list, rooted in item 0, which goes on until # terminated with a negative item. This negative item gives (the # opposite of) the index of the entry we try to remove next. # Initially all items are in this free list and the end is '-1'. fullstack_cache = lltype.malloc(lltype.Array(lltype.Signed), ShadowStackPool.MAX, flavor='raw', immortal=True) for i in range(len(fullstack_cache) - 1): fullstack_cache[i] = i + 1 fullstack_cache[len(fullstack_cache) - 1] = -1 def customtrace(gc, obj, callback, arg): obj = llmemory.cast_adr_to_ptr(obj, SHADOWSTACKREFPTR) index = obj.fsindex if index > 0: # Haaaaaaack: fullstack_cache[] is just an integer, so it # doesn't follow the SHADOWSTACKREF when it moves. But we # know this customtrace() will be called just after the # move. So we fix the fullstack_cache[] now... :-/ fullstack_cache[index] = lltype.cast_ptr_to_int(obj) addr = obj.top start = obj.base while addr != start: addr -= sizeofaddr gc._trace_callback(callback, arg, addr) gc = gctransformer.gcdata.gc assert not hasattr(gc, 'custom_trace_dispatcher') # ^^^ create_custom_trace_funcs() must not run before this gctransformer.translator.rtyper.custom_trace_funcs.append( (SHADOWSTACKREF, customtrace)) def shadowstack_destructor(shadowstackref): if root_walker.stacklet_support: from rpython.rlib import _rffi_stacklet as _c h = shadowstackref.context h = llmemory.cast_adr_to_ptr(h, _c.handle) shadowstackref.context = llmemory.NULL # if shadowstackref.fsindex > 0: shadowstackref.detach() base = shadowstackref.base shadowstackref.base = llmemory.NULL shadowstackref.top = llmemory.NULL llmemory.raw_free(base) # if root_walker.stacklet_support: if h: _c.destroy(h) destrptr = gctransformer.annotate_helper(shadowstack_destructor, [SHADOWSTACKREFPTR], lltype.Void) lltype.attachRuntimeTypeInfo(SHADOWSTACKREF, destrptr=destrptr) gctransformer._SHADOWSTACKREF = SHADOWSTACKREF return SHADOWSTACKREF