def optimize_GUARD_OVERFLOW(self, op): # If INT_xxx_OVF was replaced by INT_xxx, *but* we still see # GUARD_OVERFLOW, then the loop is invalid. lastop = self.last_emitted_operation if lastop is None: raise InvalidLoop('An INT_xxx_OVF was proven not to overflow but' + 'guarded with GUARD_OVERFLOW') opnum = lastop.getopnum() if opnum not in (rop.INT_ADD_OVF, rop.INT_SUB_OVF, rop.INT_MUL_OVF): raise InvalidLoop('An INT_xxx_OVF was proven not to overflow but' + 'guarded with GUARD_OVERFLOW') self.emit_operation(op)
def optimize_GUARD_CLASS(self, op): value = self.getvalue(op.getarg(0)) expectedclassbox = op.getarg(1) assert isinstance(expectedclassbox, Const) realclassbox = value.get_constant_class(self.optimizer.cpu) if realclassbox is not None: if realclassbox.same_constant(expectedclassbox): return raise InvalidLoop('A GUARD_CLASS was proven to always fail') if value.last_guard: # there already has been a guard_nonnull or guard_class or # guard_nonnull_class on this value. old_guard_op = value.last_guard if old_guard_op.getopnum() == rop.GUARD_NONNULL: # it was a guard_nonnull, which we replace with a # guard_nonnull_class. op = old_guard_op.copy_and_change( rop.GUARD_NONNULL_CLASS, args=[old_guard_op.getarg(0), op.getarg(1)]) self.optimizer.replaces_guard[op] = old_guard_op # hack hack hack. Change the guard_opnum on # new_guard_op.getdescr() so that when resuming, # the operation is not skipped by pyjitpl.py. descr = op.getdescr() assert isinstance(descr, compile.ResumeGuardDescr) descr.guard_opnum = rop.GUARD_NONNULL_CLASS self.emit_operation(op) value.make_constant_class(expectedclassbox, op)
def optimize_GUARD_NONNULL(self, op): value = self.getvalue(op.getarg(0)) if value.is_nonnull(): return elif value.is_null(): raise InvalidLoop('A GUARD_NONNULL was proven to always fail') self.emit_operation(op) value.make_nonnull(op)
def optimize_GUARD_ISNULL(self, op): value = self.getvalue(op.getarg(0)) if value.is_null(): return elif value.is_nonnull(): raise InvalidLoop('A GUARD_ISNULL was proven to always fail') self.emit_operation(op) value.make_constant(self.optimizer.cpu.ts.CONST_NULL)
def generate_guards(self, other, box, cpu, extra_guards, renum): if self.generalization_of(other, renum, {}): return if renum[self.position] != other.position: raise InvalidLoop('The numbering of the virtual states does not ' + 'match. This means that two virtual fields ' + 'have been set to the same Box in one of the ' + 'virtual states but not in the other.') self._generate_guards(other, box, cpu, extra_guards)
def optimize_guard(self, op, constbox, emit_operation=True): value = self.getvalue(op.getarg(0)) if value.is_constant(): box = value.box assert isinstance(box, Const) if not box.same_constant(constbox): raise InvalidLoop('A GUARD_{VALUE,TRUE,FALSE} was proven to' + 'always fail') return if emit_operation: self.emit_operation(op) value.make_constant(constbox) self.optimizer.turned_constant(value)
def _generate_guards(self, other, box, cpu, extra_guards): raise InvalidLoop('Generating guards for making the VirtualStates ' + 'at hand match have not been implemented')
def _generate_guards(self, other, box, cpu, extra_guards): if not isinstance(other, NotVirtualStateInfo): raise InvalidLoop('The VirtualStates does not match as a ' + 'virtual appears where a pointer is needed ' + 'and it is too late to force it.') if self.lenbound or other.lenbound: raise InvalidLoop('The array length bounds does not match.') if self.level == LEVEL_KNOWNCLASS and \ box.nonnull() and \ self.known_class.same_constant(cpu.ts.cls_of_box(box)): # Note: This is only a hint on what the class of box was # during the trace. There are actually no guarentees that this # box realy comes from a trace. The hint is used here to choose # between either eimtting a guard_class and jumping to an # excisting compiled loop or retracing the loop. Both # alternatives will always generate correct behaviour, but # performace will differ. op = ResOperation(rop.GUARD_NONNULL, [box], None) extra_guards.append(op) op = ResOperation(rop.GUARD_CLASS, [box, self.known_class], None) extra_guards.append(op) return if self.level == LEVEL_NONNULL and \ other.level == LEVEL_UNKNOWN and \ isinstance(box, BoxPtr) and \ box.nonnull(): op = ResOperation(rop.GUARD_NONNULL, [box], None) extra_guards.append(op) return if self.level == LEVEL_UNKNOWN and \ other.level == LEVEL_UNKNOWN and \ isinstance(box, BoxInt) and \ self.intbound.contains(box.getint()): if self.intbound.has_lower: bound = self.intbound.lower if not (other.intbound.has_lower and \ other.intbound.lower >= bound): res = BoxInt() op = ResOperation(rop.INT_GE, [box, ConstInt(bound)], res) extra_guards.append(op) op = ResOperation(rop.GUARD_TRUE, [res], None) extra_guards.append(op) if self.intbound.has_upper: bound = self.intbound.upper if not (other.intbound.has_upper and \ other.intbound.upper <= bound): res = BoxInt() op = ResOperation(rop.INT_LE, [box, ConstInt(bound)], res) extra_guards.append(op) op = ResOperation(rop.GUARD_TRUE, [res], None) extra_guards.append(op) return # Remaining cases are probably not interesting raise InvalidLoop('Generating guards for making the VirtualStates ' + 'at hand match have not been implemented') if self.level == LEVEL_CONSTANT: import pdb pdb.set_trace() raise NotImplementedError
def optimize_GUARD_NONNULL_CLASS(self, op): value = self.getvalue(op.getarg(0)) if value.is_null(): raise InvalidLoop('A GUARD_NONNULL_CLASS was proven to always ' + 'fail') self.optimize_GUARD_CLASS(op)
def close_loop(self, start_label, jumpop): virtual_state = self.initial_virtual_state short_inputargs = self.short[0].getarglist() inputargs = self.inputargs short_jumpargs = inputargs[:] # Construct jumpargs from the virtual state original_jumpargs = jumpop.getarglist()[:] values = [self.getvalue(arg) for arg in jumpop.getarglist()] try: jumpargs = virtual_state.make_inputargs(values, self.optimizer) except BadVirtualState: raise InvalidLoop('The state of the optimizer at the end of ' + 'peeled loop is inconsistent with the ' + 'VirtualState at the begining of the peeled ' + 'loop') jumpop.initarglist(jumpargs) # Inline the short preamble at the end of the loop jmp_to_short_args = virtual_state.make_inputargs(values, self.optimizer, keyboxes=True) assert len(short_inputargs) == len(jmp_to_short_args) args = {} for i in range(len(short_inputargs)): if short_inputargs[i] in args: if args[short_inputargs[i]] != jmp_to_short_args[i]: raise InvalidLoop('The short preamble wants the ' + 'same box passed to multiple of its ' + 'inputargs, but the jump at the ' + 'end of this bridge does not do that.') args[short_inputargs[i]] = jmp_to_short_args[i] self.short_inliner = Inliner(short_inputargs, jmp_to_short_args) i = 1 while i < len(self.short): # Note that self.short might be extended during this loop op = self.short[i] newop = self.short_inliner.inline_op(op) self.optimizer.send_extra_operation(newop) i += 1 # Import boxes produced in the preamble but used in the loop newoperations = self.optimizer.get_newoperations() self.boxes_created_this_iteration = {} i = j = 0 while i < len(newoperations) or j < len(jumpargs): if i == len(newoperations): while j < len(jumpargs): a = jumpargs[j] #if self.optimizer.loop.logops: # debug_print('J: ' + self.optimizer.loop.logops.repr_of_arg(a)) self.import_box(a, inputargs, short_jumpargs, jumpargs) j += 1 else: op = newoperations[i] self.boxes_created_this_iteration[op.result] = True args = op.getarglist() if op.is_guard(): args = args + op.getfailargs() #if self.optimizer.loop.logops: # debug_print('OP: ' + self.optimizer.loop.logops.repr_of_resop(op)) for a in args: #if self.optimizer.loop.logops: # debug_print('A: ' + self.optimizer.loop.logops.repr_of_arg(a)) self.import_box(a, inputargs, short_jumpargs, jumpargs) i += 1 newoperations = self.optimizer.get_newoperations() jumpop.initarglist(jumpargs) self.optimizer.send_extra_operation(jumpop) self.short.append( ResOperation(rop.JUMP, short_jumpargs, None, descr=jumpop.getdescr())) # Verify that the virtual state at the end of the loop is one # that is compatible with the virtual state at the start of the loop modifier = VirtualStateAdder(self.optimizer) final_virtual_state = modifier.get_virtual_state(original_jumpargs) #debug_start('jit-log-virtualstate') #virtual_state.debug_print('Closed loop with ') bad = {} if not virtual_state.generalization_of(final_virtual_state, bad): # We ended up with a virtual state that is not compatible # and we are thus unable to jump to the start of the loop #final_virtual_state.debug_print("Bad virtual state at end of loop, ", # bad) #debug_stop('jit-log-virtualstate') raise InvalidLoop('The virtual state at the end of the peeled ' + 'loop is not compatible with the virtual ' + 'state at the start of the loop which makes ' + 'it impossible to close the loop') #debug_stop('jit-log-virtualstate') maxguards = self.optimizer.metainterp_sd.warmrunnerdesc.memory_manager.max_retrace_guards if self.optimizer.emitted_guards > maxguards: target_token = jumpop.getdescr() assert isinstance(target_token, TargetToken) target_token.targeting_jitcell_token.retraced_count = sys.maxint self.finilize_short_preamble(start_label)