def INSN(self, loc1, loc2): code1 = loc1.location_code() code2 = loc2.location_code() # You can pass in the scratch register as a location, but you # must be careful not to combine it with location types that # might need to use the scratch register themselves. if loc2 is X86_64_SCRATCH_REG: if code1 == 'j': assert (name.startswith("MOV") and rx86.fits_in_32bits(loc1.value_j())) if loc1 is X86_64_SCRATCH_REG and not name.startswith("MOV"): assert code2 not in ('j', 'i') for possible_code2 in unrolling_location_codes: if not has_implementation_for('?', possible_code2): continue if code2 == possible_code2: val2 = getattr(loc2, "value_" + possible_code2)() # # Fake out certain operations for x86_64 if self.WORD == 8 and possible_code2 == 'i' and not rx86.fits_in_32bits( val2): insn_with_64_bit_immediate(self, loc1, loc2) return # # Regular case for possible_code1 in unrolling_location_codes: if not has_implementation_for(possible_code1, possible_code2): continue if code1 == possible_code1: val1 = getattr(loc1, "value_" + possible_code1)() # More faking out of certain operations for x86_64 fits32 = rx86.fits_in_32bits if possible_code1 == 'j' and not fits32(val1): val1 = self._addr_as_reg_offset(val1) invoke(self, "m" + possible_code2, val1, val2) return if possible_code2 == 'j' and not fits32(val2): val2 = self._addr_as_reg_offset(val2) invoke(self, possible_code1 + "m", val1, val2) return if possible_code1 == 'm' and not fits32(val1[1]): val1 = self._fix_static_offset_64_m(val1) if possible_code2 == 'm' and not fits32(val2[1]): val2 = self._fix_static_offset_64_m(val2) if possible_code1 == 'a' and not fits32(val1[3]): val1 = self._fix_static_offset_64_a(val1) if possible_code2 == 'a' and not fits32(val2[3]): val2 = self._fix_static_offset_64_a(val2) invoke(self, possible_code1 + possible_code2, val1, val2) return _missing_binary_insn(name, code1, code2)
def INSN(self, loc1, loc2): code1 = loc1.location_code() code2 = loc2.location_code() # You can pass in the scratch register as a location, but you # must be careful not to combine it with location types that # might need to use the scratch register themselves. if loc2 is X86_64_SCRATCH_REG: if code1 == 'j': assert (name.startswith("MOV") and rx86.fits_in_32bits(loc1.value_j())) if loc1 is X86_64_SCRATCH_REG and not name.startswith("MOV"): assert code2 not in ('j', 'i') for possible_code2 in unrolling_location_codes: if not has_implementation_for('?', possible_code2): continue if code2 == possible_code2: val2 = getattr(loc2, "value_" + possible_code2)() # # Fake out certain operations for x86_64 if self.WORD == 8 and possible_code2 == 'i' and not rx86.fits_in_32bits(val2): insn_with_64_bit_immediate(self, loc1, loc2) return # # Regular case for possible_code1 in unrolling_location_codes: if not has_implementation_for(possible_code1, possible_code2): continue if code1 == possible_code1: val1 = getattr(loc1, "value_" + possible_code1)() # More faking out of certain operations for x86_64 fits32 = rx86.fits_in_32bits if possible_code1 == 'j' and not fits32(val1): val1 = self._addr_as_reg_offset(val1) invoke(self, "m" + possible_code2, val1, val2) return if possible_code2 == 'j' and not fits32(val2): val2 = self._addr_as_reg_offset(val2) invoke(self, possible_code1 + "m", val1, val2) return if possible_code1 == 'm' and not fits32(val1[1]): val1 = self._fix_static_offset_64_m(val1) if possible_code2 == 'm' and not fits32(val2[1]): val2 = self._fix_static_offset_64_m(val2) if possible_code1 == 'a' and not fits32(val1[3]): val1 = self._fix_static_offset_64_a(val1) if possible_code2 == 'a' and not fits32(val2[3]): val2 = self._fix_static_offset_64_a(val2) invoke(self, possible_code1 + possible_code2, val1, val2) return _missing_binary_insn(name, code1, code2)
def insn_with_64_bit_immediate(self, loc1, loc2): # These are the worst cases: val2 = loc2.value_i() code1 = loc1.location_code() if code1 == 'j': checkvalue = loc1.value_j() elif code1 == 'm': checkvalue = loc1.value_m()[1] elif code1 == 'a': checkvalue = loc1.value_a()[3] else: checkvalue = 0 if not rx86.fits_in_32bits(checkvalue): # INSN_ji, and both operands are 64-bit; or INSN_mi or INSN_ai # and the constant offset in the address is 64-bit. # Hopefully this doesn't happen too often freereg = loc1.find_unused_reg() self.PUSH_r(freereg.value) self.MOV_ri(freereg.value, val2) INSN(self, loc1, freereg) self.POP_r(freereg.value) else: # For this case, we should not need the scratch register more than here. self._load_scratch(val2) if name == 'MOV' and loc1 is X86_64_SCRATCH_REG: return # don't need a dummy "MOV r11, r11" INSN(self, loc1, X86_64_SCRATCH_REG)
def INSN(self, loc): code = loc.location_code() for possible_code in unrolling_location_codes: if code == possible_code: val = getattr(loc, "value_" + possible_code)() if self.WORD == 8 and possible_code == 'i' and not rx86.fits_in_32bits(val): self._load_scratch(val) _rx86_getattr(self, name + "_r")(X86_64_SCRATCH_REG.value) else: methname = name + "_" + possible_code _rx86_getattr(self, methname)(val)
def INSN(self, loc): code = loc.location_code() for possible_code in unrolling_location_codes: if code == possible_code: val = getattr(loc, "value_" + possible_code)() if self.WORD == 8 and possible_code == 'i' and not rx86.fits_in_32bits( val): self._load_scratch(val) _rx86_getattr(self, name + "_r")(X86_64_SCRATCH_REG.value) else: methname = name + "_" + possible_code _rx86_getattr(self, methname)(val)
def test_bug_setfield_64bit(self): if WORD == 4: py.test.skip("only for 64 bits") TP = lltype.GcStruct("S", ("i", lltype.Signed)) ofsi = self.cpu.fielddescrof(TP, "i") for i in range(500): p = lltype.malloc(TP) addr = rffi.cast(lltype.Signed, p) if fits_in_32bits(addr): break # fitting in 32 bits, good else: py.test.skip("cannot get a 32-bit pointer") res = ConstPtr(rffi.cast(llmemory.GCREF, addr)) self.execute_operation(rop.SETFIELD_RAW, [res, ConstInt(3 ** 33)], "void", ofsi) assert p.i == 3 ** 33
def test_bug_setfield_64bit(self): if WORD == 4: py.test.skip("only for 64 bits") TP = lltype.GcStruct('S', ('i', lltype.Signed)) ofsi = self.cpu.fielddescrof(TP, 'i') for i in range(500): p = lltype.malloc(TP) addr = rffi.cast(lltype.Signed, p) if fits_in_32bits(addr): break # fitting in 32 bits, good else: py.test.skip("cannot get a 32-bit pointer") res = ConstPtr(rffi.cast(llmemory.GCREF, addr)) self.execute_operation(rop.SETFIELD_RAW, [res, ConstInt(3**33)], 'void', ofsi) assert p.i == 3**33
def _addr_as_reg_offset(self, addr): # Encodes a (64-bit) address as an offset from the scratch register. # If we are within a "reuse_scratch_register" block, we remember the # last value we loaded to the scratch register and encode the address # as an offset from that if we can if self._scratch_register_known: offset = addr - self._scratch_register_value if rx86.fits_in_32bits(offset): return (X86_64_SCRATCH_REG.value, offset) # else: fall through if self._reuse_scratch_register: self._scratch_register_known = True self._scratch_register_value = addr self.MOV_ri(X86_64_SCRATCH_REG.value, addr) return (X86_64_SCRATCH_REG.value, 0)