def optimize_GUARD_VALUE(self, op): value = self.getvalue(op.getarg(0)) if value.is_virtual(): arg = value.get_constant_class(self.optimizer.cpu) if arg: addr = arg.getaddr() name = self.optimizer.metainterp_sd.get_name_from_address(addr) else: name = "<unknown>" raise InvalidLoop( 'A promote of a virtual %s (a recently allocated object) never makes sense!' % name) old_guard_op = value.get_last_guard(self.optimizer) if old_guard_op and not isinstance(old_guard_op.getdescr(), compile.ResumeAtPositionDescr): # there already has been a guard_nonnull or guard_class or # guard_nonnull_class on this value, which is rather silly. # replace the original guard with a guard_value if old_guard_op.getopnum() != rop.GUARD_NONNULL: # This is only safe if the class of the guard_value matches the # class of the guard_*_class, otherwise the intermediate ops might # be executed with wrong classes. previous_classbox = value.get_constant_class( self.optimizer.cpu) expected_classbox = self.optimizer.cpu.ts.cls_of_box( op.getarg(1)) assert previous_classbox is not None assert expected_classbox is not None if not previous_classbox.same_constant(expected_classbox): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop( op) raise InvalidLoop( 'A GUARD_VALUE (%s) was proven to always fail' % r) descr = compile.ResumeGuardValueDescr() op = old_guard_op.copy_and_change( rop.GUARD_VALUE, args=[old_guard_op.getarg(0), op.getarg(1)], descr=descr) # Note: we give explicitly a new descr for 'op'; this is why the # old descr must not be ResumeAtPositionDescr (checked above). # Better-safe-than-sorry but it should never occur: we should # not put in short preambles guard_xxx and guard_value # on the same box. self.optimizer.replace_guard(op, value) descr.make_a_counter_per_value(op) # to be safe if isinstance(value, PtrOptValue): value.last_guard_pos = -1 constbox = op.getarg(1) assert isinstance(constbox, Const) self.optimize_guard(op, constbox)
def optimize_GUARD_GC_TYPE(self, op): info = getptrinfo(op.getarg(0)) if info and info.is_constant(): c = get_box_replacement(op.getarg(0)) tid = self.optimizer.cpu.get_actual_typeid(c.getref_base()) if tid != op.getarg(1).getint(): raise InvalidLoop("wrong GC type ID found on a constant") return if info is not None and info.get_descr() is not None: if info.get_descr().get_type_id() != op.getarg(1).getint(): raise InvalidLoop("wrong GC types passed around!") return return self.emit(op)
def optimize_GUARD_OVERFLOW(self, op): # If INT_xxx_OVF was replaced by INT_xxx, *but* we still see # GUARD_OVERFLOW, then the loop is invalid. lastop = self.last_emitted_operation if lastop is None: raise InvalidLoop('An INT_xxx_OVF was proven not to overflow but' + 'guarded with GUARD_OVERFLOW') opnum = lastop.getopnum() if opnum not in (rop.INT_ADD_OVF, rop.INT_SUB_OVF, rop.INT_MUL_OVF): raise InvalidLoop('An INT_xxx_OVF was proven not to overflow but' + 'guarded with GUARD_OVERFLOW') self.emit_operation(op)
def optimize_GUARD_IS_OBJECT(self, op): info = getptrinfo(op.getarg(0)) if info and info.is_constant(): if info.is_null(): raise InvalidLoop("A GUARD_IS_OBJECT(NULL) found") c = get_box_replacement(op.getarg(0)) if self.optimizer.cpu.check_is_object(c.getref_base()): return raise InvalidLoop("A GUARD_IS_OBJECT(not-an-object) found") if info is not None: if info.is_about_object(): return if info.is_precise(): raise InvalidLoop() return self.emit(op)
def optimize_GUARD_NONNULL_CLASS(self, op): value = self.getvalue(op.getarg(0)) if value.is_null(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_NONNULL_CLASS (%s) was proven to ' 'always fail' % r) self.optimize_GUARD_CLASS(op)
def optimize_GUARD_CLASS(self, op): expectedclassbox = op.getarg(1) info = self.ensure_ptr_info_arg0(op) assert isinstance(expectedclassbox, Const) realclassbox = info.get_known_class(self.optimizer.cpu) if realclassbox is not None: if realclassbox.same_constant(expectedclassbox): return r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_CLASS (%s) was proven to always fail' % r) old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op and not isinstance(old_guard_op.getdescr(), compile.ResumeAtPositionDescr): # there already has been a guard_nonnull or guard_class or # guard_nonnull_class on this value. if old_guard_op.getopnum() == rop.GUARD_NONNULL: # it was a guard_nonnull, which we replace with a # guard_nonnull_class. descr = compile.ResumeGuardDescr() op = old_guard_op.copy_and_change( rop.GUARD_NONNULL_CLASS, args=[old_guard_op.getarg(0), op.getarg(1)], descr=descr) # Note: we give explicitly a new descr for 'op'; this is why the # old descr must not be ResumeAtPositionDescr (checked above). # Better-safe-than-sorry but it should never occur: we should # not put in short preambles guard_nonnull and guard_class # on the same box. self.optimizer.replace_guard(op, info) return self.emit(op) return self.emit(op)
def optimize_GUARD_NONNULL_CLASS(self, op): info = getptrinfo(op.getarg(0)) if info and info.is_null(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_NONNULL_CLASS (%s) was proven to ' 'always fail' % r) return self.optimize_GUARD_CLASS(op)
def optimize_QUASIIMMUT_FIELD(self, op): # Pattern: QUASIIMMUT_FIELD(s, descr=QuasiImmutDescr) # x = GETFIELD_GC(s, descr='inst_x') # pure # If 's' is a constant (after optimizations) we rely on the rest of the # optimizations to constant-fold the following pure getfield_gc. # in addition, we record the dependency here to make invalidation work # correctly. # NB: emitting the pure GETFIELD_GC is only safe because the # QUASIIMMUT_FIELD is also emitted to make sure the dependency is # registered. structvalue = self.ensure_ptr_info_arg0(op) if not structvalue.is_constant(): return # not a constant at all; ignore QUASIIMMUT_FIELD # from rpython.jit.metainterp.quasiimmut import QuasiImmutDescr qmutdescr = op.getdescr() assert isinstance(qmutdescr, QuasiImmutDescr) # check that the value is still correct; it could have changed # already between the tracing and now. In this case, we mark the loop # as invalid if not qmutdescr.is_still_valid_for(get_box_replacement(op.getarg(0))): raise InvalidLoop('quasi immutable field changed during tracing') # record as an out-of-line guard if self.optimizer.quasi_immutable_deps is None: self.optimizer.quasi_immutable_deps = {} self.optimizer.quasi_immutable_deps[qmutdescr.qmut] = None
def use_box(self, box, preamble_op, optimizer=None): if not self.build_inplace: raise InvalidLoop( "Forcing boxes would modify an existing short preamble") jump_op = self.short.pop() AbstractShortPreambleBuilder.use_box(self, box, preamble_op, optimizer) self.short.append(jump_op)
def optimize_GUARD_NONNULL(self, op): opinfo = getptrinfo(op.getarg(0)) if opinfo is not None: if opinfo.is_nonnull(): return elif opinfo.is_null(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_NONNULL (%s) was proven to always ' 'fail' % r) return self.emit(op)
def optimize_GUARD_NONNULL(self, op): value = self.getvalue(op.getarg(0)) if value.is_nonnull(): return elif value.is_null(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop( 'A GUARD_NONNULL (%s) was proven to always fail' % r) self.emit_operation(op) value.make_nonnull(self.optimizer)
def make_len_gt(self, mode, descr, val): if self.lenbound: if self.lenbound.mode != mode or self.lenbound.descr != descr: # XXX a rare case? it seems to occur sometimes when # running lib-python's test_io.py in PyPy on Linux 32... from rpython.jit.metainterp.optimize import InvalidLoop raise InvalidLoop("bad mode/descr") self.lenbound.bound.make_gt(IntBound(val, val)) else: self.lenbound = LenBound(mode, descr, IntLowerBound(val + 1))
def optimize_GUARD_ISNULL(self, op): info = self.getptrinfo(op.getarg(0)) if info is not None: if info.is_null(): return elif info.is_nonnull(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_ISNULL (%s) was proven to always ' 'fail' % r) self.emit_operation(op) self.make_constant(op.getarg(0), self.optimizer.cpu.ts.CONST_NULL)
def replace_old_guard_with_guard_value(self, op, info, old_guard_op): # there already has been a guard_nonnull or guard_class or # guard_nonnull_class on this value, which is rather silly. # This function replaces the original guard with a # guard_value. Must be careful: doing so is unsafe if the # original guard checks for something inconsistent, # i.e. different than what it would give if the guard_value # passed (this is a rare case, but possible). If we get # inconsistent results in this way, then we must not do the # replacement, otherwise we'd put guard_value up there but all # intermediate ops might be executed by assuming something # different, from the old guard that is now removed... c_value = op.getarg(1) if not c_value.nonnull(): raise InvalidLoop('A GUARD_VALUE(..., NULL) follows some other ' 'guard that it is not NULL') previous_classbox = info.get_known_class(self.optimizer.cpu) if previous_classbox is not None: expected_classbox = self.optimizer.cpu.cls_of_box(c_value) assert expected_classbox is not None if not previous_classbox.same_constant( expected_classbox): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_VALUE (%s) was proven to ' 'always fail' % r) if not self.optimizer.can_replace_guards: return op descr = compile.ResumeGuardDescr() op = old_guard_op.copy_and_change(rop.GUARD_VALUE, args=[old_guard_op.getarg(0), op.getarg(1)], descr=descr) # Note: we give explicitly a new descr for 'op'; this is why the # old descr must not be ResumeAtPositionDescr (checked above). # Better-safe-than-sorry but it should never occur: we should # not put in short preambles guard_xxx and guard_value # on the same box. self.optimizer.replace_guard(op, info) # to be safe info.reset_last_guard_pos() return op
def optimize_GUARD_NONNULL(self, op): opinfo = self.getptrinfo(op.getarg(0)) if opinfo is not None: if opinfo.is_nonnull(): return elif opinfo.is_null(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_NONNULL (%s) was proven to always ' 'fail' % r) self.emit_operation(op) self.make_nonnull(op.getarg(0)) self.getptrinfo(op.getarg(0)).mark_last_guard(self.optimizer)
def optimize_GETARRAYITEM_GC_I(self, op): opinfo = getptrinfo(op.getarg(0)) if opinfo and opinfo.is_virtual(): indexbox = self.get_constant_box(op.getarg(1)) if indexbox is not None: item = opinfo.getitem(op.getdescr(), indexbox.getint()) if item is None: # reading uninitialized array items? raise InvalidLoop("reading uninitialized virtual " "array items") self.make_equal_to(op, item) return self.make_nonnull(op.getarg(0)) return self.emit(op)
def optimize_guard(self, op, constbox): box = op.getarg(0) if box.type == 'i': intbound = self.getintbound(box) if intbound.is_constant(): if not intbound.getint() == constbox.getint(): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop( op) raise InvalidLoop('A GUARD_{VALUE,TRUE,FALSE} (%s) ' 'was proven to always fail' % r) return elif box.type == 'r': box = get_box_replacement(box) if box.is_constant(): if not box.same_constant(constbox): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop( op) raise InvalidLoop('A GUARD_VALUE (%s) ' 'was proven to always fail' % r) return return self.emit(op)
def optimize_GUARD_SUBCLASS(self, op): info = getptrinfo(op.getarg(0)) optimizer = self.optimizer # must raise 'InvalidLoop' in all cases where 'info' shows the # class cannot possibly match (see test_issue2926) if info and info.is_constant(): c = get_box_replacement(op.getarg(0)) vtable = optimizer.cpu.cls_of_box(c).getint() if optimizer._check_subclass(vtable, op.getarg(1).getint()): return raise InvalidLoop("GUARD_SUBCLASS(const) proven to always fail") if info is not None and info.is_about_object(): known_class = info.get_known_class(optimizer.cpu) if known_class: # Class of 'info' is exactly 'known_class'. # We know statically if the 'guard_subclass' will pass or fail. if optimizer._check_subclass(known_class.getint(), op.getarg(1).getint()): return else: raise InvalidLoop( "GUARD_SUBCLASS(known_class) proven to always fail") elif info.get_descr() is not None: # Class of 'info' is either get_descr() or a subclass of it. # We're keeping the 'guard_subclass' at runtime only in the # case where get_descr() is some strict parent class of # the argument to 'guard_subclass'. info_base_descr = info.get_descr().get_vtable() if optimizer._check_subclass(info_base_descr, op.getarg(1).getint()): return # guard_subclass always passing elif optimizer._check_subclass( op.getarg(1).getint(), info_base_descr): pass # don't know, must keep the 'guard_subclass' else: raise InvalidLoop( "GUARD_SUBCLASS(base_class) proven to always fail") return self.emit(op)
def optimize_GETINTERIORFIELD_GC_I(self, op): opinfo = getptrinfo(op.getarg(0)) if opinfo and opinfo.is_virtual(): indexbox = self.get_constant_box(op.getarg(1)) if indexbox is not None: descr = op.getdescr() fld = opinfo.getinteriorfield_virtual(indexbox.getint(), descr) if fld is None: raise InvalidLoop("reading uninitialized virtual interior " "array items") self.make_equal_to(op, fld) return self.make_nonnull(op.getarg(0)) return self.emit(op)
def _inline_short_preamble(self, short_preamble, inliner, patchguardop, assumed_classes): i = 1 # XXX this is intentiontal :-(. short_preamble can change during the # loop in some cases while i < len(short_preamble): shop = short_preamble[i] newop = inliner.inline_op(shop) if newop.is_guard(): if not patchguardop: raise InvalidLoop("would like to have short preamble, but it has a guard and there's no guard_future_condition") assert isinstance(newop, GuardResOp) assert isinstance(patchguardop, GuardResOp) newop.rd_snapshot = patchguardop.rd_snapshot newop.rd_frame_info_list = patchguardop.rd_frame_info_list newop.setdescr(compile.ResumeAtPositionDescr()) self.optimizer.send_extra_operation(newop) if shop.result in assumed_classes: classbox = self.getvalue(newop.result).get_constant_class(self.optimizer.cpu) if not classbox or not classbox.same_constant(assumed_classes[shop.result]): raise InvalidLoop('The class of an opaque pointer before the jump ' + 'does not mach the class ' + 'it has at the start of the target loop') i += 1
def add_preamble_op(self, preamble_op): """ Notice that we're actually using the preamble_op, add it to label and jump """ # Could this be considered a speculative error? # This check should only fail when trying to jump to an existing trace # by forcing portions of the virtualstate. if not self.build_inplace: raise InvalidLoop( "Forcing boxes would modify an existing short preamble") op = preamble_op.op.get_box_replacement() if preamble_op.invented_name: self.extra_same_as.append(op) self.label_args.append(op) self.jump_args.append(preamble_op.preamble_op)
def optimize_guard(self, op, constbox, emit_operation=True): value = self.getvalue(op.getarg(0)) if value.is_constant(): box = value.box assert isinstance(box, Const) if not box.same_constant(constbox): r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_{VALUE,TRUE,FALSE} (%s) was proven ' 'to always fail' % r) return if emit_operation: self.emit_operation(op) value.make_constant(constbox) if self.optimizer.optheap: self.optimizer.optheap.value_updated(value, self.getvalue(constbox))
def make_constant(self, box, constbox): assert isinstance(constbox, Const) box = get_box_replacement(box) # safety-check: if the constant is outside the bounds for the # box, then it is an invalid loop if (box.get_forwarded() is not None and isinstance(constbox, ConstInt) and not isinstance(box.get_forwarded(), info.AbstractRawPtrInfo)): if not box.get_forwarded().contains(constbox.getint()): raise InvalidLoop("a box is turned into constant that is " "outside the range allowed for that box") if box.is_constant(): return if box.type == 'r' and box.get_forwarded() is not None: opinfo = box.get_forwarded() opinfo.copy_fields_to_const(getptrinfo(constbox), self.optheap) box.set_forwarded(constbox)
def optimize_GUARD_VALUE(self, op): arg0 = op.getarg(0) if arg0.type == 'r': info = getptrinfo(arg0) if info: if info.is_virtual(): raise InvalidLoop("promote of a virtual") old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op is not None: op = self.replace_old_guard_with_guard_value( op, info, old_guard_op) elif arg0.type == 'f': arg0 = get_box_replacement(arg0) if arg0.is_constant(): return constbox = op.getarg(1) assert isinstance(constbox, Const) return self.optimize_guard(op, constbox)
def optimize_GUARD_SUBCLASS(self, op): info = self.getptrinfo(op.getarg(0)) if info and info.is_constant(): c = self.get_box_replacement(op.getarg(0)) vtable = self.optimizer.cpu.ts.cls_of_box(c).getint() if self._check_subclass(vtable, op.getarg(1).getint()): return raise InvalidLoop("GUARD_SUBCLASS(const) proven to always fail") if info is not None and info.is_about_object(): known_class = info.get_known_class(self.optimizer.cpu) if known_class: if self._check_subclass(known_class.getint(), op.getarg(1).getint()): return elif info.get_descr() is not None: if self._check_subclass(info.get_descr().get_vtable(), op.getarg(1).getint()): return self.emit_operation(op)
def optimize_GUARD_CLASS(self, op): value = self.getvalue(op.getarg(0)) expectedclassbox = op.getarg(1) assert isinstance(expectedclassbox, Const) realclassbox = value.get_constant_class(self.optimizer.cpu) if realclassbox is not None: if realclassbox.same_constant(expectedclassbox): return r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op) raise InvalidLoop('A GUARD_CLASS (%s) was proven to always fail' % r) assert isinstance(value, PtrOptValue) old_guard_op = value.get_last_guard(self.optimizer) if old_guard_op and not isinstance(old_guard_op.getdescr(), compile.ResumeAtPositionDescr): # there already has been a guard_nonnull or guard_class or # guard_nonnull_class on this value. if old_guard_op.getopnum() == rop.GUARD_NONNULL: # it was a guard_nonnull, which we replace with a # guard_nonnull_class. descr = compile.ResumeGuardNonnullClassDescr() op = old_guard_op.copy_and_change( rop.GUARD_NONNULL_CLASS, args=[old_guard_op.getarg(0), op.getarg(1)], descr=descr) # Note: we give explicitly a new descr for 'op'; this is why the # old descr must not be ResumeAtPositionDescr (checked above). # Better-safe-than-sorry but it should never occur: we should # not put in short preambles guard_nonnull and guard_class # on the same box. self.optimizer.replace_guard(op, value) # not emitting the guard, so we have to pass None to # make_constant_class, so last_guard_pos is not updated self.emit_operation(op) value.make_constant_class(None, expectedclassbox) return self.emit_operation(op) value.make_constant_class(self.optimizer, expectedclassbox)
def close_loop(self, start_label, jumpop, patchguardop): virtual_state = self.initial_virtual_state short_inputargs = self.short[0].getarglist() inputargs = self.inputargs short_jumpargs = inputargs[:] # Construct jumpargs from the virtual state original_jumpargs = jumpop.getarglist()[:] values = [self.getvalue(arg) for arg in jumpop.getarglist()] try: jumpargs = virtual_state.make_inputargs(values, self.optimizer) except BadVirtualState: raise InvalidLoop('The state of the optimizer at the end of ' + 'peeled loop is inconsistent with the ' + 'VirtualState at the beginning of the peeled ' + 'loop') jumpop.initarglist(jumpargs) # Inline the short preamble at the end of the loop jmp_to_short_args = virtual_state.make_inputargs(values, self.optimizer, keyboxes=True) assert len(short_inputargs) == len(jmp_to_short_args) args = {} for i in range(len(short_inputargs)): if short_inputargs[i] in args: if args[short_inputargs[i]] != jmp_to_short_args[i]: raise InvalidLoop('The short preamble wants the ' + 'same box passed to multiple of its ' + 'inputargs, but the jump at the ' + 'end of this bridge does not do that.') args[short_inputargs[i]] = jmp_to_short_args[i] self.short_inliner = Inliner(short_inputargs, jmp_to_short_args) self._inline_short_preamble(self.short, self.short_inliner, patchguardop, self.short_boxes.assumed_classes) # Import boxes produced in the preamble but used in the loop newoperations = self.optimizer.get_newoperations() self.boxes_created_this_iteration = {} i = j = 0 while i < len(newoperations) or j < len(jumpargs): if i == len(newoperations): while j < len(jumpargs): a = jumpargs[j] #if self.optimizer.loop.logops: # debug_print('J: ' + self.optimizer.loop.logops.repr_of_arg(a)) self.import_box(a, inputargs, short_jumpargs, jumpargs) j += 1 else: self._import_op(newoperations[i], inputargs, short_jumpargs, jumpargs) i += 1 newoperations = self.optimizer.get_newoperations() jumpop.initarglist(jumpargs) self.optimizer.send_extra_operation(jumpop) self.short.append(ResOperation(rop.JUMP, short_jumpargs, None, descr=jumpop.getdescr())) # Verify that the virtual state at the end of the loop is one # that is compatible with the virtual state at the start of the loop final_virtual_state = self.get_virtual_state(original_jumpargs) #debug_start('jit-log-virtualstate') #virtual_state.debug_print('Closed loop with ') bad = {} if not virtual_state.generalization_of(final_virtual_state, bad, cpu=self.optimizer.cpu): # We ended up with a virtual state that is not compatible # and we are thus unable to jump to the start of the loop #final_virtual_state.debug_print("Bad virtual state at end of loop, ", # bad) #debug_stop('jit-log-virtualstate') raise InvalidLoop('The virtual state at the end of the peeled ' + 'loop is not compatible with the virtual ' + 'state at the start of the loop which makes ' + 'it impossible to close the loop') #debug_stop('jit-log-virtualstate') maxguards = self.optimizer.metainterp_sd.warmrunnerdesc.memory_manager.max_retrace_guards if self.optimizer.emitted_guards > maxguards: target_token = jumpop.getdescr() assert isinstance(target_token, TargetToken) target_token.targeting_jitcell_token.retraced_count = sys.maxint self.finalize_short_preamble(start_label)
def optimize_peeled_loop(self, trace, celltoken, state, runtime_boxes, call_pure_results, inline_short_preamble=True): trace = trace.get_iter() try: label_args = self.import_state(trace.inputargs, state) except VirtualStatesCantMatch: raise InvalidLoop("Cannot import state, virtual states don't match") self.potential_extra_ops = {} self.optimizer.init_inparg_dict_from(label_args) try: info, _ = self.optimizer.propagate_all_forward( trace, call_pure_results, flush=False) except SpeculativeError: raise InvalidLoop("Speculative heap access would be ill-typed") end_jump = info.jump_op label_op = ResOperation(rop.LABEL, label_args, descr=celltoken) for a in end_jump.getarglist(): self.optimizer.force_box_for_end_of_preamble( self.optimizer.get_box_replacement(a)) current_vs = self.get_virtual_state(end_jump.getarglist()) # pick the vs we want to jump to assert isinstance(celltoken, JitCellToken) target_virtual_state = self.pick_virtual_state(current_vs, state.virtual_state, celltoken.target_tokens) # force the boxes for virtual state to match try: args = target_virtual_state.make_inputargs( [self.get_box_replacement(x) for x in end_jump.getarglist()], self.optimizer, force_boxes=True) for arg in args: if arg is not None: self.optimizer.force_box(arg) except VirtualStatesCantMatch: raise InvalidLoop("Virtual states did not match " "after picking the virtual state, when forcing" " boxes") extra_same_as = self.short_preamble_producer.extra_same_as[:] target_token = self.finalize_short_preamble(label_op, state.virtual_state) label_op.setdescr(target_token) if not inline_short_preamble: self.jump_to_preamble(celltoken, end_jump, info) return (UnrollInfo(target_token, label_op, extra_same_as, self.optimizer.quasi_immutable_deps), self.optimizer._newoperations) try: new_virtual_state = self.jump_to_existing_trace( end_jump, label_op, runtime_boxes, force_boxes=False) except InvalidLoop: # inlining short preamble failed, jump to preamble self.jump_to_preamble(celltoken, end_jump, info) return (UnrollInfo(target_token, label_op, extra_same_as, self.optimizer.quasi_immutable_deps), self.optimizer._newoperations) if new_virtual_state is not None: # Attempt to force virtual boxes in order to avoid jumping # to the preamble. try: new_virtual_state = self.jump_to_existing_trace( end_jump, label_op, runtime_boxes, force_boxes=True) except InvalidLoop: pass if new_virtual_state is not None: self.jump_to_preamble(celltoken, end_jump, info) return (UnrollInfo(target_token, label_op, extra_same_as, self.optimizer.quasi_immutable_deps), self.optimizer._newoperations) self.disable_retracing_if_max_retrace_guards( self.optimizer._newoperations, target_token) return (UnrollInfo(target_token, label_op, extra_same_as, self.optimizer.quasi_immutable_deps), self.optimizer._newoperations)