Пример #1
0
    def walk_stack_from(self):
        curframe = lltype.malloc(WALKFRAME, flavor='raw')
        otherframe = lltype.malloc(WALKFRAME, flavor='raw')

        # Walk over all the pieces of stack.  They are in a circular linked
        # list of structures of 7 words, the 2 first words being prev/next.
        # The anchor of this linked list is:
        anchor = llmemory.cast_ptr_to_adr(gcrootanchor)
        initialframedata = anchor.address[1]
        stackscount = 0
        while initialframedata != anchor:     # while we have not looped back
            self.walk_frames(curframe, otherframe, initialframedata)
            # Then proceed to the next piece of stack
            initialframedata = initialframedata.address[1]
            stackscount += 1
        #
        # for the JIT: rpy_fastgil may contain an extra framedata
        rpy_fastgil = rgil.gil_fetch_fastgil().signed[0]
        if rpy_fastgil != 1:
            ll_assert(rpy_fastgil != 0, "walk_stack_from doesn't have the GIL")
            initialframedata = rffi.cast(llmemory.Address, rpy_fastgil)
            self.walk_frames(curframe, otherframe, initialframedata)
            stackscount += 1
        #
        expected = rffi.stackcounter.stacks_counter
        if NonConstant(0):
            rffi.stackcounter.stacks_counter += 42    # hack to force it
        ll_assert(not (stackscount < expected), "non-closed stacks around")
        ll_assert(not (stackscount > expected), "stacks counter corruption?")
        lltype.free(otherframe, flavor='raw')
        lltype.free(curframe, flavor='raw')
Пример #2
0
 def pre_assemble(self, asm, operations, bridge=False):
     # O(len(operations)). I do not think there is a way
     # around this.
     #
     # Problem:
     # constants such as floating point operations, plain pointers,
     # or integers might serve as parameter to an operation. thus
     # it must be loaded into a register. There is a space benefit
     # for 64-bit integers, or python floats, when a constant is used
     # twice.
     #
     # Solution:
     # the current solution (gcc does the same), use a literal pool
     # located at register r13. This one can easily offset with 20
     # bit signed values (should be enough)
     self.pool_start = asm.mc.get_relative_pos()
     for op in operations:
         self.ensure_can_hold_constants(asm, op)
     self._ensure_value(asm.cpu.pos_exc_value(), asm)
     # the top of shadow stack
     gcrootmap = asm.cpu.gc_ll_descr.gcrootmap
     if gcrootmap and gcrootmap.is_shadow_stack:
         self._ensure_value(gcrootmap.get_root_stack_top_addr(), asm)
     # endaddr of insert stack check
     endaddr, lengthaddr, _ = asm.cpu.insert_stack_check()
     self._ensure_value(endaddr, asm)
     # fast gil
     fastgil = rffi.cast(lltype.Signed, rgil.gil_fetch_fastgil())
     self._ensure_value(fastgil, asm)
Пример #3
0
    def walk_stack_from(self):
        curframe = lltype.malloc(WALKFRAME, flavor='raw')
        otherframe = lltype.malloc(WALKFRAME, flavor='raw')

        # Walk over all the pieces of stack.  They are in a circular linked
        # list of structures of 7 words, the 2 first words being prev/next.
        # The anchor of this linked list is:
        anchor = llmemory.cast_ptr_to_adr(gcrootanchor)
        initialframedata = anchor.address[1]
        stackscount = 0
        while initialframedata != anchor:     # while we have not looped back
            self.walk_frames(curframe, otherframe, initialframedata)
            # Then proceed to the next piece of stack
            initialframedata = initialframedata.address[1]
            stackscount += 1
        #
        # for the JIT: rpy_fastgil may contain an extra framedata
        rpy_fastgil = rgil.gil_fetch_fastgil().signed[0]
        if rpy_fastgil != 1:
            ll_assert(rpy_fastgil != 0, "walk_stack_from doesn't have the GIL")
            initialframedata = rffi.cast(llmemory.Address, rpy_fastgil)
            self.walk_frames(curframe, otherframe, initialframedata)
            stackscount += 1
        #
        expected = rffi.stackcounter.stacks_counter
        if NonConstant(0):
            rffi.stackcounter.stacks_counter += 42    # hack to force it
        ll_assert(not (stackscount < expected), "non-closed stacks around")
        ll_assert(not (stackscount > expected), "stacks counter corruption?")
        lltype.free(otherframe, flavor='raw')
        lltype.free(curframe, flavor='raw')
Пример #4
0
    def pre_assemble(self, asm, operations, allgcrefs, bridge=False):
        # Problem:
        # constants such as floating point operations, plain pointers,
        # or integers might serve as parameter to an operation. thus
        # it must be loaded into a register. Loading them from immediate
        # takes quite long and slows down the resulting JIT code.
        # There is a space benefit for 64-bit integers/doubles used twice.
        #
        # creates the table for gc references here
        self.gc_table_addr = asm.mc.get_relative_pos()
        self.gcref_table_size = len(allgcrefs) * WORD
        mc = asm.mc
        assert mc.get_relative_pos() == 0
        for i in range(self.gcref_table_size):
            mc.writechar('\x00')
        asm.setup_gcrefs_list(allgcrefs)

        self.pool_start = asm.mc.get_relative_pos()
        for op in operations:
            self.ensure_can_hold_constants(asm, op)
        self._ensure_value(asm.cpu.pos_exc_value(), asm)
        # the top of shadow stack
        gcrootmap = asm.cpu.gc_ll_descr.gcrootmap
        if gcrootmap and gcrootmap.is_shadow_stack:
            self._ensure_value(gcrootmap.get_root_stack_top_addr(), asm)
        # endaddr of insert stack check
        endaddr, lengthaddr, _ = asm.cpu.insert_stack_check()
        self._ensure_value(endaddr, asm)
        # fast gil
        fastgil = rffi.cast(lltype.Signed, rgil.gil_fetch_fastgil())
        self._ensure_value(fastgil, asm)
Пример #5
0
    def pre_assemble(self, asm, operations, allgcrefs, bridge=False):
        # Problem:
        # constants such as floating point operations, plain pointers,
        # or integers might serve as parameter to an operation. thus
        # it must be loaded into a register. Loading them from immediate
        # takes quite long and slows down the resulting JIT code.
        # There is a space benefit for 64-bit integers/doubles used twice.
        #
        # creates the table for gc references here
        self.gc_table_addr = asm.mc.get_relative_pos()
        self.gcref_table_size = len(allgcrefs) * WORD
        mc = asm.mc
        assert mc.get_relative_pos() == 0
        for i in range(self.gcref_table_size):
            mc.writechar('\x00')
        asm.setup_gcrefs_list(allgcrefs)

        self.pool_start = asm.mc.get_relative_pos()
        for op in operations:
            self.ensure_can_hold_constants(asm, op)
        self._ensure_value(asm.cpu.pos_exc_value(), asm)
        # the top of shadow stack
        gcrootmap = asm.cpu.gc_ll_descr.gcrootmap
        if gcrootmap and gcrootmap.is_shadow_stack:
            self._ensure_value(gcrootmap.get_root_stack_top_addr(), asm)
        # endaddr of insert stack check
        endaddr, lengthaddr, _ = asm.cpu.insert_stack_check()
        self._ensure_value(endaddr, asm)
        # fast gil
        fastgil = rffi.cast(lltype.Signed, rgil.gil_fetch_fastgil())
        self._ensure_value(fastgil, asm)
Пример #6
0
 def emit_call_release_gil(self):
     """Emit a CALL_RELEASE_GIL, including calls to releasegil_addr
     and reacqgil_addr."""
     fastgil = rffi.cast(lltype.Signed, rgil.gil_fetch_fastgil())
     self.select_call_release_gil_mode()
     self.prepare_arguments()
     self.push_gcmap_for_call_release_gil()
     self.call_releasegil_addr_and_move_real_arguments(fastgil)
     self.emit_raw_call()
     self.restore_stack_pointer()
     self.move_real_result_and_call_reacqgil_addr(fastgil)
     self.pop_gcmap()
     self.load_result()
Пример #7
0
 def emit_call_release_gil(self, save_err):
     """Emit a CALL_RELEASE_GIL, including calls to releasegil_addr
     and reacqgil_addr.  'save_err' is a combination of rffi.RFFI_*ERR*."""
     fastgil = rffi.cast(lltype.Signed, rgil.gil_fetch_fastgil())
     self.select_call_release_gil_mode()
     self.prepare_arguments()
     self.push_gcmap_for_call_release_gil()
     self.call_releasegil_addr_and_move_real_arguments(fastgil)
     self.write_real_errno(save_err)
     self.emit_raw_call()
     self.restore_stack_pointer()
     self.read_real_errno(save_err)
     self.move_real_result_and_call_reacqgil_addr(fastgil)
     self.pop_gcmap()
     self.load_result()