def test_cast_pointer(): S3 = lltype.GcStruct("s3", ('a', lltype.Signed)) S2 = lltype.GcStruct("s3", ('sub', S3)) S1 = lltype.GcStruct("s1", ('sub', S2)) p1 = malloc(S1) p2 = p1.sub p3 = p2.sub p12 = cast_pointer(lltype.Ptr(S1), p2) assert p12 == p1 p13 = cast_pointer(lltype.Ptr(S1), p3) assert p13 == p1 p21 = cast_pointer(lltype.Ptr(S2), p1) assert p21 == p2 p23 = cast_pointer(lltype.Ptr(S2), p3) assert p23 == p2 p31 = cast_pointer(lltype.Ptr(S3), p1) assert p31 == p3 p32 = cast_pointer(lltype.Ptr(S3), p2) assert p32 == p3 p3 = malloc(S3) p2 = malloc(S2) S0 = lltype.GcStruct("s0", ('sub', S1)) p0 = malloc(S0) assert p0 == cast_pointer(lltype.Ptr(S0), p0) p3 = cast_pointer(lltype.Ptr(S3), p0) p03 = cast_pointer(lltype.Ptr(S0), p3) assert p0 == p03 S1bis = lltype.GcStruct("s1b", ('sub', S2)) assert S1bis != S1 p1b = malloc(S1bis) p3 = p1b.sub.sub assert p1b == cast_pointer(lltype.Ptr(S1bis), p3)
def ll_getter(inst): top = lltype.cast_pointer(TOPPTR, inst) access = top.vable_access if access: return getattr(lltype.cast_pointer(ACCESSPTR, access), 'get_'+name)(top) else: return getattr(inst, name)
def ll_setter(inst, value): top = lltype.cast_pointer(TOPPTR, inst) access = top.vable_access if access: return getattr(lltype.cast_pointer(ACCESSPTR, access), 'set_' + name)(top, value) else: return setattr(inst, name, value)
def DONOTtest_runtime_type_info(): S = GcStruct('s', ('x', Signed)) attachRuntimeTypeInfo(S) s = malloc(S) assert runtime_type_info(s) == getRuntimeTypeInfo(S) S1 = GcStruct('s1', ('sub', S), ('x', Signed)) attachRuntimeTypeInfo(S1) s1 = malloc(S1) assert runtime_type_info(s1) == getRuntimeTypeInfo(S1) assert runtime_type_info(s1.sub) == getRuntimeTypeInfo(S1) assert runtime_type_info(cast_pointer(Ptr(S), s1)) == getRuntimeTypeInfo(S1) def dynamic_type_info_S(p): if p.x == 0: return getRuntimeTypeInfo(S) else: return getRuntimeTypeInfo(S1) fp = functionptr(FuncType([Ptr(S)], Ptr(RuntimeTypeInfo)), "dynamic_type_info_S", _callable=dynamic_type_info_S) attachRuntimeTypeInfo(S, fp) assert s.x == 0 assert runtime_type_info(s) == getRuntimeTypeInfo(S) s.x = 1 py.test.raises(RuntimeError, "runtime_type_info(s)") assert s1.sub.x == 0 py.test.raises(RuntimeError, "runtime_type_info(s1.sub)") s1.sub.x = 1 assert runtime_type_info(s1.sub) == getRuntimeTypeInfo(S1)
def op_debug_fatalerror(self, ll_msg, ll_exc=None): msg = ''.join(ll_msg.chars) if ll_exc is None: raise LLFatalError(msg) else: ll_exc_type = lltype.cast_pointer(rclass.OBJECTPTR, ll_exc).typeptr raise LLFatalError(msg, LLException(ll_exc_type, ll_exc))
def f(x): s = lltype.malloc(S) s.x = 123 if x < 0: t = lltype.cast_pointer(lltype.Ptr(T), s) t.y += 1 return s.x
def resume_after_raising(state, exception): if global_state.restart_substate == -1: # normal entry point for a call to state.switch() # first unwind the stack u = UnwindException() s = lltype.malloc(SWITCH_STATE) s.header.f_restart = INDEX_RESUME_AFTER_RAISING s.c = lltype.cast_opaque_ptr(SAVED_REFERENCE, state) add_frame_state(u, s.header) global_state.exception = exception raise u elif global_state.restart_substate == 0: # STATE 0: we didn't do anything so far, but the stack is unwound global_state.restart_substate = -1 # grab the frame corresponding to ourself # the 'targetstate' local is garbage here, it must be read back from # 's.c' where we saved it by the normal entry point above mystate = global_state.top s = lltype.cast_pointer(lltype.Ptr(SWITCH_STATE), mystate) targetstate = lltype.cast_opaque_ptr(lltype.Ptr(STATE_HEADER), s.c) resume_bottom = targetstate while resume_bottom.f_back: resume_bottom = resume_bottom.f_back resume_bottom.f_back = mystate.f_back global_state.top = targetstate raise UnwindException()
def ll_type_setup(p): tp = lltype.cast_pointer(lltype.Ptr(PY_TYPE_OBJECT), p) old_flags = tp.c_tp_flags tp.c_tp_flags |= Py_TPFLAGS_HEAPTYPE for name, value in objects: llop.setattr(PyObjPtr, tp, name, value) tp.c_tp_flags = old_flags
def test_repr(): S = lltype.GcStruct('S') T = lltype.GcStruct('T', ('header', S)) t = lltype.malloc(T) s = lltype.cast_pointer(lltype.Ptr(S), t) const = ConstPtr(lltype.cast_opaque_ptr(llmemory.GCREF, s)) assert const._getrepr_() == "*T"
def ll_frame_switch(targetstate): if global_state.restart_substate == -1: # normal entry point for a call to state.switch() # first unwind the stack u = UnwindException() s = lltype.malloc(SWITCH_STATE) s.header.f_restart = INDEX_SWITCH s.c = lltype.cast_opaque_ptr(SAVED_REFERENCE, targetstate) add_frame_state(u, s.header) raise u elif global_state.restart_substate == 0: # STATE 0: we didn't do anything so far, but the stack is unwound global_state.restart_substate = -1 # grab the frame corresponding to ourself # the 'targetstate' local is garbage here, it must be read back from # 's.c' where we saved it by the normal entry point above mystate = global_state.top s = lltype.cast_pointer(lltype.Ptr(SWITCH_STATE), mystate) targetstate = lltype.cast_opaque_ptr(lltype.Ptr(STATE_HEADER), s.c) # prepare a new saved state for the future switch() back, # which will go to STATE 1 below sourcestate = lltype.malloc(EMPTY_STATE).header sourcestate.f_back = mystate.f_back sourcestate.f_restart = INDEX_SWITCH + 1 global_state.top = targetstate global_state.retval_ref = lltype.cast_opaque_ptr(SAVED_REFERENCE, sourcestate) raise UnwindException() # this jumps to targetstate else: # STATE 1: switching back into a tasklet suspended by # a call to switch() global_state.top = frame.null_state global_state.restart_substate = -1 origin_state = lltype.cast_opaque_ptr(frame.OPAQUE_STATE_HEADER_PTR, fetch_retval_ref()) return origin_state # a normal return into the current tasklet,
def virtual_ref_during_tracing(self, real_object): assert real_object vref = lltype.malloc(self.JIT_VIRTUAL_REF) p = lltype.cast_pointer(rclass.OBJECTPTR, vref) p.typeptr = self.jit_virtual_ref_vtable vref.virtual_token = self.TOKEN_NONE vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object) return lltype.cast_opaque_ptr(llmemory.GCREF, vref)
def test_nullptr_cast(): S = lltype.Struct('s') p0 = nullptr(S) assert not p0 S1 = lltype.Struct("s1", ('s', S)) p10 = cast_pointer(lltype.Ptr(S1), p0) assert lltype.typeOf(p10) == lltype.Ptr(S1) assert not p10
def fill_into(vablerti, s, base, vrti): s = lltype.cast_opaque_ptr(PTRTYPE, s) i = 0 for desc in descs: v = vrti._read_field(vablerti, desc, base, i) tgt = lltype.cast_pointer(desc.PTRTYPE, s) setattr(tgt, desc.fieldname, v) i = i + 1
def ref(self, struct): if lltype.typeOf(struct).TO != self.TYPE: struct = lltype.cast_pointer(lltype.Ptr(self.TYPE), struct) FIELD = getattr(self.TYPE, self.fldname) if isinstance(FIELD, lltype.ContainerType): return getattr(struct, self.fldname) else: return lltype.direct_fieldptr(struct, self.fldname)
def set_field_touched(struc, value): T = fielddesc.RESTYPE if fielddesc.canbevirtual and fielddesc.gcref: vable_rti = struc.vable_rti vable_rti = cast_base_ptr_to_instance(VirtualizableRTI, vable_rti) vable_rti.touched_ptr_field(struc.vable_base, j) struc = lltype.cast_pointer(fielddesc.PTRTYPE, struc) setattr(struc, fielddesc.fieldname, value)
def test_cast_simple_widening2(): S2 = lltype.GcStruct("s2", ('a', lltype.Signed)) S1 = lltype.GcStruct("s1", ('sub1', S2)) p1 = malloc(S1) p2 = p1.sub1 assert lltype.typeOf(p2) == lltype.Ptr(S2) p3 = cast_pointer(lltype.Ptr(S1), p2) assert p3 == p1 p2 = malloc(S2)
def materialize(rgenop, boxes): s = lltype.malloc(TYPE) i = 0 for desc in descs: v = rvalue.ll_getvalue(boxes[i], desc.RESTYPE) tgt = lltype.cast_pointer(desc.PTRTYPE, s) setattr(tgt, desc.fieldname, v) i = i + 1 return rgenop.genconst(s)
def get_ll_pyobjectptr(self, rtyper): from pypy.rpython.rclass import getinstancerepr wrapperobj = self.instance rpython_obj = get_rpython_data(wrapperobj) rpython_cls = rpython_obj.__class__ classdef = rtyper.annotator.bookkeeper.getuniqueclassdef(rpython_cls) r_inst = getinstancerepr(rtyper, classdef) pyobj = r_inst.convert_const(rpython_obj) return lltype.cast_pointer(PyObjPtr, pyobj)
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) # kind_and_fptr = builder.special_funcptr_for_type(TYPE) if kind_and_fptr is not None: kind, fptr = kind_and_fptr info.finalizer_or_customtrace = fptr if kind == "finalizer": infobits |= T_HAS_FINALIZER elif kind == 'light_finalizer': infobits |= T_HAS_FINALIZER | T_HAS_LIGHTWEIGHT_FINALIZER elif kind == "custom_trace": infobits |= T_HAS_CUSTOM_TRACE else: assert 0, kind # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def touch_update(strucref): struc = lltype.cast_opaque_ptr(TOPPTR, strucref) vable_rti = struc.vable_rti vable_rti = cast_base_ptr_to_instance(VirtualizableRTI, vable_rti) vable_rti.touch(struc.vable_base) vable_base = struc.vable_base j = -1 for fielddesc, _ in redirected_fielddescs: j += 1 if fielddesc.canbevirtual and fielddesc.gcref: if vable_rti.is_field_virtual(vable_base, j): continue v = vable_rti.read_field(fielddesc, vable_base, j) tgt = lltype.cast_pointer(fielddesc.PTRTYPE, struc) setattr(tgt, fielddesc.fieldname, v) ACCESSPTR = TOPPTR.TO.vable_access struc.vable_access = lltype.cast_pointer(ACCESSPTR, access_touched)
def test_more_fakeaddress_equality(): S = lltype.GcStruct('S', ('x', lltype.Signed)) T = lltype.GcStruct('T', ('s', S)) t = lltype.malloc(T) t.s.x = 1 s = lltype.cast_pointer(lltype.Ptr(S), t) a_t, a_s = map(cast_ptr_to_adr, [s, t]) assert a_t == a_s
def genconst(llvalue): T = lltype.typeOf(llvalue) T1 = lltype.erasedType(T) if T1 != T: llvalue = lltype.cast_pointer(T1, llvalue) v = flowmodel.Constant(llvalue) v.concretetype = T1 if v.concretetype == lltype.Void: # XXX genconst should not really be used for Void constants assert not isinstance(llvalue, str) and not isinstance(llvalue, lltype.LowLevelType) return to_opaque_object(v)
def test_cast_simple_widening(): S2 = lltype.Struct("s2", ('a', lltype.Signed)) S1 = lltype.Struct("s1", ('sub1', S2), ('sub2', S2)) p1 = malloc(S1, immortal=True) p2 = p1.sub1 p3 = p2 p4 = cast_pointer(lltype.Ptr(S1), p3) assert p4 == p1 SUnrelated = lltype.Struct("unrelated") py.test.raises(TypeError, "cast_pointer(lltype.Ptr(SUnrelated), p3)")
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) # kind_and_fptr = builder.special_funcptr_for_type(TYPE) if kind_and_fptr is not None: kind, fptr = kind_and_fptr info.finalizer_or_customtrace = fptr if kind == "finalizer": infobits |= T_HAS_FINALIZER elif kind == "light_finalizer": infobits |= T_HAS_FINALIZER | T_HAS_LIGHTWEIGHT_FINALIZER elif kind == "custom_trace": infobits |= T_HAS_CUSTOM_TRACE else: assert 0, kind # if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation(llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == "gc": infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if builder.is_weakref_type(TYPE): infobits |= T_IS_WEAKREF if is_subclass_of_object(TYPE): infobits |= T_IS_RPYTHON_INSTANCE info.infobits = infobits | T_KEY_VALUE
def _cast_to_ptr(self, EXPECTED_TYPE): if self: PTRTYPE = lltype.typeOf(self.ptr) if (isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType)): return lltype.cast_opaque_ptr(EXPECTED_TYPE, self.ptr) else: # regular case return lltype.cast_pointer(EXPECTED_TYPE, self.ptr) else: return lltype.nullptr(EXPECTED_TYPE.TO)
def f(n, m, j): i = sa = 0 pa = lltype.malloc(A) pa.val = 7 p = pa.parent while i < n: myjitdriver.jit_merge_point(n=n, m=m, i=i, j=j, sa=sa, p=p) if i < m: pa = lltype.cast_pointer(lltype.Ptr(A), p) sa += pa.val elif i == m: pb = lltype.malloc(B) pb.charval = 'y' p = pb.parent else: pb = lltype.cast_pointer(lltype.Ptr(B), p) sa += ord(pb.charval) sa += 100 assert n>0 and m>0 i += j return sa
def get_field_touched(struc): T = fielddesc.RESTYPE tgt = lltype.cast_pointer(fielddesc.PTRTYPE, struc) if fielddesc.canbevirtual and fielddesc.gcref: vable_rti = struc.vable_rti vable_rti = cast_base_ptr_to_instance(VirtualizableRTI, vable_rti) vable_base = struc.vable_base if vable_rti.is_field_virtual(vable_base, j): # this will force s = vable_rti.read_field(fielddesc, vable_base, j) setattr(tgt, fielddesc.fieldname, s) return s return getattr(tgt, fielddesc.fieldname)
def _generalcast(T, value): if isinstance(T, lltype.Ptr): return lltype.cast_pointer(T, value) elif T == llmemory.Address: return llmemory.cast_ptr_to_adr(value) else: T1 = lltype.typeOf(value) if T1 is llmemory.Address: value = llmemory.cast_adr_to_int(value) elif isinstance(T1, lltype.Ptr): value = lltype.cast_ptr_to_int(value) else: value = value return lltype.cast_primitive(T, value)
def cast_any_ptr(EXPECTED_TYPE, ptr): # this is a generalization of the various cast_xxx_ptr() functions. PTRTYPE = lltype.typeOf(ptr) if PTRTYPE == EXPECTED_TYPE: return ptr elif EXPECTED_TYPE == WeakRefPtr: return cast_ptr_to_weakrefptr(ptr) elif PTRTYPE == WeakRefPtr: ptr = cast_weakrefptr_to_ptr(None, ptr) return cast_any_ptr(EXPECTED_TYPE, ptr) elif (isinstance(EXPECTED_TYPE.TO, lltype.OpaqueType) or isinstance(PTRTYPE.TO, lltype.OpaqueType)): return lltype.cast_opaque_ptr(EXPECTED_TYPE, ptr) else: # regular case return lltype.cast_pointer(EXPECTED_TYPE, ptr)
def test_weakref(): S1 = lltype.GcStruct('S1', ('x',lltype.Signed)) S = lltype.GcStruct('S', ('s1', S1)) s = lltype.malloc(S) s1 = lltype.cast_pointer(lltype.Ptr(S1), s) w = weakref_create(s) assert weakref_deref(lltype.Ptr(S), w) == s assert weakref_deref(lltype.Ptr(S1), w) == s1 # check that the weakref stays alive even if there are only # cast_pointer'ed references around del s import gc; gc.collect() assert weakref_deref(lltype.Ptr(S1), w) == s1 # now really kill the structure del s1 import gc; gc.collect() assert weakref_deref(lltype.Ptr(S), w) == lltype.nullptr(S) assert weakref_deref(lltype.Ptr(S1), w) == lltype.nullptr(S1)
def _generalcast(T, value): if lltype.typeOf(value) == T: return value elif isinstance(T, lltype.Ptr): return lltype.cast_pointer(T, value) elif T == llmemory.Address: return llmemory.cast_ptr_to_adr(value) elif isinstance(T, ootype.StaticMethod): fn = value._obj return ootype._static_meth(T, graph=fn.graph, _callable=fn._callable) else: T1 = lltype.typeOf(value) if T1 is llmemory.Address: value = llmemory.cast_adr_to_int(value) elif isinstance(T1, lltype.Ptr): value = lltype.cast_ptr_to_int(value) else: value = value return lltype.cast_primitive(T, value)
def encode_type_shape(builder, info, TYPE, index): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = index info.ofstoptrs = builder.offsets2table(offsets, TYPE) info.finalizer = builder.make_finalizer_funcptr_for_type(TYPE) if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if TYPE == WEAKREF: infobits |= T_IS_WEAKREF info.infobits = infobits
def encode_type_shape(builder, info, TYPE): """Encode the shape of the TYPE into the TYPE_INFO structure 'info'.""" offsets = offsets_to_gc_pointers(TYPE) infobits = 0 info.ofstoptrs = builder.offsets2table(offsets, TYPE) info.finalizer = builder.make_finalizer_funcptr_for_type(TYPE) if not TYPE._is_varsize(): info.fixedsize = llarena.round_up_for_allocation( llmemory.sizeof(TYPE), builder.GCClass.object_minimal_size) # note about round_up_for_allocation(): in the 'info' table # we put a rounded-up size only for fixed-size objects. For # varsize ones, the GC must anyway compute the size at run-time # and round up that result. else: infobits |= T_IS_VARSIZE varinfo = lltype.cast_pointer(GCData.VARSIZE_TYPE_INFO_PTR, info) info.fixedsize = llmemory.sizeof(TYPE, 0) if isinstance(TYPE, lltype.Struct): ARRAY = TYPE._flds[TYPE._arrayfld] ofs1 = llmemory.offsetof(TYPE, TYPE._arrayfld) varinfo.ofstolength = ofs1 + llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = ofs1 + llmemory.itemoffsetof(ARRAY, 0) else: assert isinstance(TYPE, lltype.GcArray) ARRAY = TYPE if (isinstance(ARRAY.OF, lltype.Ptr) and ARRAY.OF.TO._gckind == 'gc'): infobits |= T_IS_GCARRAY_OF_GCPTR varinfo.ofstolength = llmemory.ArrayLengthOffset(ARRAY) varinfo.ofstovar = llmemory.itemoffsetof(TYPE, 0) assert isinstance(ARRAY, lltype.Array) if ARRAY.OF != lltype.Void: offsets = offsets_to_gc_pointers(ARRAY.OF) else: offsets = () if len(offsets) > 0: infobits |= T_HAS_GCPTR_IN_VARSIZE varinfo.varofstoptrs = builder.offsets2table(offsets, ARRAY.OF) varinfo.varitemsize = llmemory.sizeof(ARRAY.OF) if TYPE == WEAKREF: infobits |= T_IS_WEAKREF info.infobits = infobits
def force_virtual(self, inst): vref = lltype.cast_pointer(lltype.Ptr(self.JIT_VIRTUAL_REF), inst) token = vref.virtual_token if token != self.TOKEN_NONE: if token == self.TOKEN_TRACING_RESCALL: # The "virtual" is not a virtual at all during tracing. # We only need to reset virtual_token to TOKEN_NONE # as a marker for the tracing, to tell it that this # "virtual" escapes. assert vref.forced vref.virtual_token = self.TOKEN_NONE else: assert not vref.forced from pypy.jit.metainterp.compile import ResumeGuardForcedDescr ResumeGuardForcedDescr.force_now(self.cpu, token) assert vref.virtual_token == self.TOKEN_NONE assert vref.forced else: assert vref.forced return vref.forced
def ll_frame_switch(targetstate): if global_state.restart_substate == -1: # normal entry point for a call to state.switch() # first unwind the stack u = UnwindException() s = lltype.malloc(SWITCH_STATE) s.header.f_restart = INDEX_SWITCH s.c = lltype.cast_opaque_ptr(SAVED_REFERENCE, targetstate) add_frame_state(u, s.header) raise u elif global_state.restart_substate == 0: # STATE 0: we didn't do anything so far, but the stack is unwound global_state.restart_substate = -1 # grab the frame corresponding to ourself # the 'targetstate' local is garbage here, it must be read back from # 's.c' where we saved it by the normal entry point above mystate = global_state.top s = lltype.cast_pointer(lltype.Ptr(SWITCH_STATE), mystate) targetstate = lltype.cast_opaque_ptr(lltype.Ptr(STATE_HEADER), s.c) # prepare a new saved state for the future switch() back, # which will go to STATE 1 below sourcestate = lltype.malloc(EMPTY_STATE).header sourcestate.f_back = mystate.f_back sourcestate.f_restart = INDEX_SWITCH + 1 sourcestate.f_depth = mystate.f_depth global_state.top = targetstate global_state.retval_ref = lltype.cast_opaque_ptr( SAVED_REFERENCE, sourcestate) raise SwitchException() # this jumps to targetstate else: # STATE 1: switching back into a tasklet suspended by # a call to switch() global_state.top = frame.null_state global_state.restart_substate = -1 origin_state = lltype.cast_opaque_ptr(frame.OPAQUE_STATE_HEADER_PTR, fetch_retval_ref()) return origin_state # a normal return into the current tasklet,