def force_box(self, op, optforce): if not self.is_virtual(): return op if self.mode is mode_string: s = self.get_constant_string_spec(optforce, mode_string) if s is not None: c_s = get_const_ptr_for_string(s) get_box_replacement(op).set_forwarded(c_s) return c_s else: s = self.get_constant_string_spec(optforce, mode_unicode) if s is not None: c_s = get_const_ptr_for_unicode(s) get_box_replacement(op).set_forwarded(c_s) return c_s self._is_virtual = False lengthbox = self.getstrlen(op, optforce.optimizer.optstring, self.mode) newop = ResOperation(self.mode.NEWSTR, [lengthbox]) if not we_are_translated(): newop.name = 'FORCE' optforce.emit_extra(newop) newop = optforce.optimizer.getlastop() newop.set_forwarded(self) op = get_box_replacement(op) op.set_forwarded(newop) optstring = optforce.optimizer.optstring self.initialize_forced_string(op, optstring, op, CONST_0, self.mode) return newop
def optimize_INT_MUL(self, op): arg1 = get_box_replacement(op.getarg(0)) b1 = self.getintbound(arg1) arg2 = get_box_replacement(op.getarg(1)) b2 = self.getintbound(arg2) # If one side of the op is 1 the result is the other side. if b1.equal(1): self.make_equal_to(op, arg2) elif b2.equal(1): self.make_equal_to(op, arg1) elif b1.equal(0) or b2.equal(0): self.make_constant_int(op, 0) else: for lhs, rhs in [(arg1, arg2), (arg2, arg1)]: lh_info = self.getintbound(lhs) if lh_info.is_constant(): x = lh_info.getint() # x & (x - 1) == 0 is a quick test for power of 2 if x & (x - 1) == 0: new_rhs = ConstInt(highest_bit(lh_info.getint())) op = self.replace_op_with(op, rop.INT_LSHIFT, args=[rhs, new_rhs]) break return self.emit(op)
def opt_call_stroruni_STR_CONCAT(self, op, mode): self.make_nonnull_str(op.getarg(1), mode) self.make_nonnull_str(op.getarg(2), mode) self.make_vstring_concat(op, mode, get_box_replacement(op.getarg(1)), get_box_replacement(op.getarg(2))) self.last_emitted_operation = REMOVED return True, None
def postprocess_INT_OR_or_XOR(self, op): v1 = get_box_replacement(op.getarg(0)) b1 = self.getintbound(v1) v2 = get_box_replacement(op.getarg(1)) b2 = self.getintbound(v2) b = b1.or_bound(b2) self.getintbound(op).intersect(b)
def postprocess_INT_SUB_OVF(self, op): arg0 = get_box_replacement(op.getarg(0)) arg1 = get_box_replacement(op.getarg(1)) b0 = self.getintbound(arg0) b1 = self.getintbound(arg1) resbound = b0.sub_bound(b1) r = self.getintbound(op) r.intersect(resbound)
def _same_args(self, op1, op2, start_index1, start_index2): j = start_index2 for i in range(start_index1, op1.numargs()): box = get_box_replacement(op1.getarg(i)) if not get_box_replacement(op2.getarg(j)).same_box(box): return False j += 1 return True
def put_field_back_to_info(self, op, opinfo, optheap): arg = get_box_replacement(op.getarg(2)) struct = get_box_replacement(op.getarg(0)) opinfo.setitem(op.getdescr(), self.index, struct, arg, optheap=optheap, cf=self)
def optimize_INT_OR_or_XOR(self, op): v1 = get_box_replacement(op.getarg(0)) v2 = get_box_replacement(op.getarg(1)) if v1 is v2: if op.getopnum() == rop.INT_OR: self.make_equal_to(op, v1) else: self.make_constant_int(op, 0) return None return self.emit(op)
def getinfo(self, op): if op.type == 'r': return getptrinfo(op) elif op.type == 'i': if self.is_raw_ptr(op): return getptrinfo(op) return self.getintbound(op) elif op.type == 'f': if get_box_replacement(op).is_constant(): return info.FloatConstInfo(get_box_replacement(op))
def lookup1(self, opt, box0, descr): for i in range(self.REMEMBER_LIMIT): op = self.lst[i] if op is None: break if box0.same_box(get_box_replacement( op.getarg(0))) and op.getdescr() is descr: op = self.force_preamble_op(opt, op, i) return get_box_replacement(op) return None
def DISABLED_optimize_UINT_GE(self, op): arg1 = get_box_replacement(op.getarg(0)) arg2 = get_box_replacement(op.getarg(1)) b1 = self.getintbound(arg1) b2 = self.getintbound(arg2) if b2.known_nonnegative() and b1.known_ge(b2): self.make_constant_int(op, 1) elif b1.known_nonnegative() and b1.known_lt(b2): self.make_constant_int(op, 0) else: return self.emit(op)
def optimize_INT_GE(self, op): arg1 = get_box_replacement(op.getarg(0)) arg2 = get_box_replacement(op.getarg(1)) b1 = self.getintbound(arg1) b2 = self.getintbound(arg2) if b1.known_ge(b2) or arg1 is arg2: self.make_constant_int(op, 1) elif b1.known_lt(b2): self.make_constant_int(op, 0) else: return self.emit(op)
def optimize_SETARRAYITEM_GC(self, op): opinfo = getptrinfo(op.getarg(0)) if opinfo and opinfo.is_virtual(): indexbox = self.get_constant_box(op.getarg(1)) if indexbox is not None: opinfo.setitem(op.getdescr(), indexbox.getint(), get_box_replacement(op.getarg(0)), get_box_replacement(op.getarg(2))) return self.make_nonnull(op.getarg(0)) return self.emit(op)
def lookup(self, optimizer, op): numargs = op.numargs() if numargs == 1: return self.lookup1(optimizer, get_box_replacement(op.getarg(0)), op.getdescr()) elif numargs == 2: return self.lookup2(optimizer, get_box_replacement(op.getarg(0)), get_box_replacement(op.getarg(1)), op.getdescr()) else: assert False
def postprocess_GETARRAYITEM_GC_I(self, op): # then remember the result of reading the array item arrayinfo = self.ensure_ptr_info_arg0(op) indexb = self.getintbound(op.getarg(1)) if indexb.is_constant(): index = indexb.getint() cf = self.arrayitem_cache(op.getdescr(), index) arrayinfo.setitem(op.getdescr(), indexb.getint(), get_box_replacement(op.getarg(0)), get_box_replacement(op), optheap=self, cf=cf)
def optimize_INT_SUB_OVF(self, op): arg0 = get_box_replacement(op.getarg(0)) arg1 = get_box_replacement(op.getarg(1)) b0 = self.getintbound(arg0) b1 = self.getintbound(arg1) if arg0.same_box(arg1): self.make_constant_int(op, 0) return None resbound = b0.sub_bound(b1) if resbound.bounded(): # this case takes care of int_sub_ovf(x, 0) as well op = self.replace_op_with(op, rop.INT_SUB) return self.emit(op)
def optimize_INT_NE(self, op): arg0 = get_box_replacement(op.getarg(0)) b1 = self.getintbound(arg0) arg1 = get_box_replacement(op.getarg(1)) b2 = self.getintbound(arg1) if b1.known_gt(b2): self.make_constant_int(op, 1) elif b1.known_lt(b2): self.make_constant_int(op, 1) elif arg0 is arg1: self.make_constant_int(op, 0) else: return self.emit(op)
def _optimize_CALL_DICT_LOOKUP(self, op): # Cache consecutive lookup() calls on the same dict and key, # depending on the 'flag_store' argument passed: # FLAG_LOOKUP: always cache and use the cached result. # FLAG_STORE: don't cache (it might return -1, which would be # incorrect for future lookups); but if found in # the cache and the cached value was already checked # non-negative, then we can reuse it. # FLAG_DELETE: never cache, never use the cached result (because # if there is a cached result, the FLAG_DELETE call # is needed for its side-effect of removing it). # In theory we could cache a -1 for the case where # the delete is immediately followed by a lookup, # but too obscure. # from rpython.rtyper.lltypesystem.rordereddict import FLAG_LOOKUP from rpython.rtyper.lltypesystem.rordereddict import FLAG_STORE flag_value = self.getintbound(op.getarg(4)) if not flag_value.is_constant(): return False flag = flag_value.getint() if flag != FLAG_LOOKUP and flag != FLAG_STORE: return False # descrs = op.getdescr().get_extra_info().extradescrs assert descrs # translation hint descr1 = descrs[0] try: d = self.cached_dict_reads[descr1] except KeyError: d = self.cached_dict_reads[descr1] = args_dict() self.corresponding_array_descrs[descrs[1]] = descr1 # key = [ get_box_replacement(op.getarg(1)), # dict get_box_replacement(op.getarg(2)) ] # key # other args can be ignored here (hash, store_flag) try: res_v = d[key] except KeyError: if flag == FLAG_LOOKUP: d[key] = op return False else: if flag != FLAG_LOOKUP: if not self.getintbound(res_v).known_ge(IntBound(0, 0)): return False self.make_equal_to(op, res_v) self.last_emitted_operation = REMOVED return True
def optimize_INT_SUB(self, op): arg1 = get_box_replacement(op.getarg(0)) arg2 = get_box_replacement(op.getarg(1)) b1 = self.getintbound(arg1) b2 = self.getintbound(arg2) if b2.equal(0): self.make_equal_to(op, arg1) elif b1.equal(0): op = self.replace_op_with(op, rop.INT_NEG, args=[arg2]) return self.emit(op) elif arg1 == arg2: self.make_constant_int(op, 0) else: return self.emit(op)
def postprocess_INT_LSHIFT(self, op): arg0 = get_box_replacement(op.getarg(0)) b1 = self.getintbound(arg0) arg1 = get_box_replacement(op.getarg(1)) b2 = self.getintbound(arg1) r = self.getintbound(op) b = b1.lshift_bound(b2) r.intersect(b) # intbound.lshift_bound checks for an overflow and if the # lshift can be proven not to overflow sets b.has_upper and # b.has_lower if b.bounded(): # Synthesize the reverse op for optimize_default to reuse self.pure_from_args(rop.INT_RSHIFT, [op, arg1], arg0)
def optimize_INT_ADD(self, op): if self.is_raw_ptr(op.getarg(0)) or self.is_raw_ptr(op.getarg(1)): return self.emit(op) arg1 = get_box_replacement(op.getarg(0)) b1 = self.getintbound(arg1) arg2 = get_box_replacement(op.getarg(1)) b2 = self.getintbound(arg2) # If one side of the op is 0 the result is the other side. if b1.equal(0): self.make_equal_to(op, arg2) elif b2.equal(0): self.make_equal_to(op, arg1) else: return self.emit(op)
def optimize_FLOAT_ABS(self, op): v = get_box_replacement(op.getarg(0)) arg_op = self.optimizer.as_operation(v) if arg_op is not None and arg_op.getopnum() == rop.FLOAT_ABS: self.make_equal_to(op, v) else: return self.emit(op)
def optimize_QUASIIMMUT_FIELD(self, op): # Pattern: QUASIIMMUT_FIELD(s, descr=QuasiImmutDescr) # x = GETFIELD_GC(s, descr='inst_x') # pure # If 's' is a constant (after optimizations) we rely on the rest of the # optimizations to constant-fold the following pure getfield_gc. # in addition, we record the dependency here to make invalidation work # correctly. # NB: emitting the pure GETFIELD_GC is only safe because the # QUASIIMMUT_FIELD is also emitted to make sure the dependency is # registered. structvalue = self.ensure_ptr_info_arg0(op) if not structvalue.is_constant(): return # not a constant at all; ignore QUASIIMMUT_FIELD # from rpython.jit.metainterp.quasiimmut import QuasiImmutDescr qmutdescr = op.getdescr() assert isinstance(qmutdescr, QuasiImmutDescr) # check that the value is still correct; it could have changed # already between the tracing and now. In this case, we mark the loop # as invalid if not qmutdescr.is_still_valid_for(get_box_replacement(op.getarg(0))): raise InvalidLoop('quasi immutable field changed during tracing') # record as an out-of-line guard if self.optimizer.quasi_immutable_deps is None: self.optimizer.quasi_immutable_deps = {} self.optimizer.quasi_immutable_deps[qmutdescr.qmut] = None
def do_setfield(self, optheap, op): # Update the state with the SETFIELD_GC/SETARRAYITEM_GC operation 'op'. structinfo = optheap.ensure_ptr_info_arg0(op) arg1 = get_box_replacement(self._get_rhs_from_set_op(op)) if self.possible_aliasing(structinfo): self.force_lazy_set(optheap, op.getdescr()) assert not self.possible_aliasing(structinfo) cached_field = self._getfield(structinfo, op.getdescr(), optheap, False) if cached_field is not None: cached_field = cached_field.get_box_replacement() if not cached_field or not cached_field.same_box(arg1): # common case: store the 'op' as lazy_set self._lazy_set = op else: # this is the case where the pending setfield ends up # storing precisely the value that is already there, # as proved by 'cached_fields'. In this case, we don't # need any _lazy_set: the heap value is already right. # Note that this may reset to None a non-None lazy_set, # cancelling its previous effects with no side effect. # Now, we have to force the item in the short preamble self._getfield(structinfo, op.getdescr(), optheap) self._lazy_set = None
def produce_potential_short_preamble_ops(self, optimizer, shortboxes, descr, index=-1): assert self._lazy_set is None for i, info in enumerate(self.cached_infos): structbox = get_box_replacement(self.cached_structs[i]) info.produce_short_preamble_ops(structbox, descr, index, optimizer, shortboxes)
def serialize_optrewrite(self, available_boxes): res = [] for i, box in self.loop_invariant_results.iteritems(): box = get_box_replacement(box) if box in available_boxes: res.append((i, box)) return res
def _optimize_call_arrayop(self, op, source_box, dest_box, source_start_box, dest_start_box, length_box): length = self.get_constant_box(length_box) if length and length.getint() == 0: return True # 0-length arraycopy or arraymove source_info = getptrinfo(source_box) dest_info = getptrinfo(dest_box) source_start_box = self.get_constant_box(source_start_box) dest_start_box = self.get_constant_box(dest_start_box) extrainfo = op.getdescr().get_extra_info() if (source_start_box and dest_start_box and length and ((dest_info and dest_info.is_virtual()) or length.getint() <= 8) and ((source_info and source_info.is_virtual()) or length.getint() <= 8) and extrainfo.single_write_descr_array is not None): #<-sanity check source_start = source_start_box.getint() dest_start = dest_start_box.getint() arraydescr = extrainfo.single_write_descr_array if arraydescr.is_array_of_structs(): return False # not supported right now index_current = 0 index_delta = +1 index_stop = length.getint() if (source_box is dest_box and # ARRAYMOVE only source_start < dest_start): # iterate in reverse order index_current = index_stop - 1 index_delta = -1 index_stop = -1 # XXX fish fish fish while index_current != index_stop: index = index_current index_current += index_delta assert index >= 0 if source_info and source_info.is_virtual(): val = source_info.getitem(arraydescr, index + source_start) else: opnum = OpHelpers.getarrayitem_for_descr(arraydescr) newop = ResOperation( opnum, [source_box, ConstInt(index + source_start)], descr=arraydescr) self.optimizer.send_extra_operation(newop) val = newop if val is None: continue if dest_info and dest_info.is_virtual(): dest_info.setitem(arraydescr, index + dest_start, get_box_replacement(dest_box), val) else: newop = ResOperation( rop.SETARRAYITEM_GC, [dest_box, ConstInt(index + dest_start), val], descr=arraydescr) self.optimizer.send_extra_operation(newop) return True return False
def get_constant_box(self, box): box = get_box_replacement(box) if isinstance(box, Const): return box if (box.type == 'i' and box.get_forwarded() and box.get_forwarded().is_constant()): return ConstInt(box.get_forwarded().getint()) return None
def postprocess_GUARD_FALSE(self, op): box = get_box_replacement(op.getarg(0)) box1 = self.optimizer.as_operation(box) if box1 is not None and box1.getopnum() == rop.INT_IS_ZERO: # we can't use the (current) range analysis for this because # "anything but 0" is not a valid range self.pure_from_args(rop.INT_IS_TRUE, [box1.getarg(0)], CONST_1) self.make_constant(box, CONST_0)
def optimize_FLOAT_MUL(self, op): arg1 = op.getarg(0) arg2 = op.getarg(1) # Constant fold f0 * 1.0 and turn f0 * -1.0 into a FLOAT_NEG, these # work in all cases, including NaN and inf for lhs, rhs in [(arg1, arg2), (arg2, arg1)]: v1 = get_box_replacement(lhs) v2 = get_box_replacement(rhs) if v1.is_constant(): if v1.getfloat() == 1.0: self.make_equal_to(op, v2) return elif v1.getfloat() == -1.0: newop = self.replace_op_with(op, rop.FLOAT_NEG, args=[rhs]) return self.emit(newop) return self.emit(op)