def _emit_operation(self, op): assert not rop.is_call_pure(op.getopnum()) orig_op = op op = self.get_box_replacement(op) if op.is_constant(): return # can happen e.g. if we postpone the operation that becomes # constant # XXX kill, requires thinking #op = self.replace_op_with(op, op.opnum) for i in range(op.numargs()): arg = self.force_box(op.getarg(i)) op.setarg(i, arg) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_OPS) if rop.is_guard(op.opnum): assert isinstance(op, GuardResOp) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_GUARDS) pendingfields = self.pendingfields self.pendingfields = None if self.replaces_guard and orig_op in self.replaces_guard: self.replace_guard_op(self.replaces_guard[orig_op], op) del self.replaces_guard[orig_op] return else: op = self.emit_guard_operation(op, pendingfields) opnum = op.opnum if ((rop.has_no_side_effect(opnum) or rop.is_guard(opnum) or rop.is_jit_debug(opnum) or rop.is_ovf(opnum)) and not self.is_call_pure_pure_canraise(op)): pass else: self._last_guard_op = None self._really_emitted_operation = op self._newoperations.append(op) self._emittedoperations[op] = None
def _emit_operation(self, op): assert not rop.is_call_pure(op.getopnum()) orig_op = op op = self.get_box_replacement(op) if op.is_constant(): return # can happen e.g. if we postpone the operation that becomes # constant # XXX kill, requires thinking #op = self.replace_op_with(op, op.opnum) for i in range(op.numargs()): arg = self.force_box(op.getarg(i)) op.setarg(i, arg) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_OPS) if rop.is_guard(op.opnum): assert isinstance(op, GuardResOp) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_GUARDS) pendingfields = self.pendingfields self.pendingfields = None if self.replaces_guard and orig_op in self.replaces_guard: self.replace_guard_op(self.replaces_guard[orig_op], op) del self.replaces_guard[orig_op] return else: op = self.emit_guard_operation(op, pendingfields) elif op.can_raise(): self.exception_might_have_happened = True opnum = op.opnum if ((rop.has_no_side_effect(opnum) or rop.is_guard(opnum) or rop.is_jit_debug(opnum) or rop.is_ovf(opnum)) and not self.is_call_pure_pure_canraise(op)): pass else: self._last_guard_op = None self._really_emitted_operation = op self._newoperations.append(op)
def user_loop_bail_fast_path(loop, warmstate): """ In a fast path over the trace loop: try to prevent vecopt of spending time on a loop that will most probably fail. """ resop_count = 0 # the count of operations minus debug_merge_points vector_instr = 0 guard_count = 0 at_least_one_array_access = True for i,op in enumerate(loop.operations): if rop.is_jit_debug(op.opnum): continue if op.vector >= 0 and not rop.is_guard(op.opnum): vector_instr += 1 resop_count += 1 if op.is_primitive_array_access(): at_least_one_array_access = True if rop.is_call(op.opnum) or rop.is_call_assembler(op.opnum): return True if rop.is_guard(op.opnum): guard_count += 1 if not at_least_one_array_access: return True return False
def ensure_can_hold_constants(self, asm, op): # allocates 8 bytes in memory for pointers, long integers or floats if rop.is_jit_debug(op.getopnum()): return for arg in op.getarglist(): if arg.is_constant(): self.reserve_literal(8, arg, asm)
def __init__(self, loop): self.loop = loop self.label = Node(loop.label, 0) self.nodes = [ Node(op,0) for op in loop.operations if not rop.is_jit_debug(op.opnum) ] for i,node in enumerate(self.nodes): node.opidx = i+1 self.inodes = [] # imaginary nodes self.jump = Node(loop.jump, len(self.nodes)+1) self.invariant_vars = {} self.update_invariant_vars() self.memory_refs = {} self.schedulable_nodes = [] self.index_vars = {} self.comparison_vars = {} self.guards = [] self.build_dependencies()