Esempio n. 1
0
 def allocate_prebuilt_ref(self, size):
     o = lib.allocate_prebuilt(self.header_size +
                               size * ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, size)
     lib.qcgc_write(o)  # Register object
     return ffi.cast("myobject_t *", o)
Esempio n. 2
0
 def test_incremenatal(self):
     o = ffi.cast("object_t *", self.allocate_ref(lib.qcgc_arena_size // ffi.sizeof("myobject_t *")))
     self.push_root(o)
     p = ffi.cast("object_t *", self.allocate(1))
     self.push_root(p)
     q = ffi.cast("object_t *", self.allocate(1))
     self.set_ref(o, 0, p)
     #
     self.pop_root()
     lib.qcgc_incmark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertFalse(self.gp_gray_stack_has(o))
     #
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", p)), lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", q)), lib.BLOCK_WHITE)
     #
     self.set_ref(o, 1, q)
     self.assertTrue(self.gp_gray_stack_has(o))
     #
     lib.qcgc_incmark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertFalse(self.gp_gray_stack_has(o))
     #
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", p)), lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", q)), lib.BLOCK_BLACK)
Esempio n. 3
0
 def allocate_weakref(self, to):
     o = lib.qcgc_allocate(self.header_size + ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, 0)  # Prevent from tracing
     ffi.cast("myobject_t *", o).refs[0] = ffi.cast("myobject_t *", to) # Ref has to be valid before registering
     lib.qcgc_register_weakref(o, ffi.cast("object_t **",
         ffi.cast("myobject_t *", o).refs)) # XXX: ffi.addressof .refs[0] does not work
     lib.qcgc_write(o)
     return o
Esempio n. 4
0
 def allocate_weakref(self, to):
     o = lib.qcgc_allocate(self.header_size + ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, 0)  # Prevent from tracing
     ffi.cast("myobject_t *", o).refs[0] = ffi.cast(
         "myobject_t *", to)  # Ref has to be valid before registering
     lib.qcgc_register_weakref(
         o, ffi.cast(
             "object_t **",
             ffi.cast("myobject_t *",
                      o).refs))  # XXX: ffi.addressof .refs[0] does not work
     lib.qcgc_write(o)
     return o
Esempio n. 5
0
 def test_incremenatal(self):
     o = ffi.cast(
         "object_t *",
         self.allocate_ref(lib.qcgc_arena_size //
                           ffi.sizeof("myobject_t *")))
     self.push_root(o)
     p = ffi.cast("object_t *", self.allocate(1))
     self.push_root(p)
     q = ffi.cast("object_t *", self.allocate(1))
     self.set_ref(o, 0, p)
     #
     self.pop_root()
     lib.qcgc_incmark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertFalse(self.gp_gray_stack_has(o))
     #
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", p)),
                      lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", q)),
                      lib.BLOCK_WHITE)
     #
     self.set_ref(o, 1, q)
     self.assertTrue(self.gp_gray_stack_has(o))
     #
     lib.qcgc_incmark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertFalse(self.gp_gray_stack_has(o))
     #
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", p)),
                      lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", q)),
                      lib.BLOCK_BLACK)
Esempio n. 6
0
 def test_mark_large(self):
     o = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(o)
     p = ffi.cast("object_t *", self.allocate_ref(lib.qcgc_arena_size // ffi.sizeof("myobject_t *")))
     self.push_root(p)
     q = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(q)
     r = ffi.cast("object_t *", self.allocate(1))
     self.push_root(r)
     s = ffi.cast("object_t *", self.allocate_ref(1))
     self.push_root(s)
     t = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(t)
     self.set_ref(p, 0, q)
     self.set_ref(p, 1, r)
     self.set_ref(p, 2, s)
     self.set_ref(s, 0, p)
     #
     for _ in range(6):
         self.pop_root()
     self.push_root(o)
     self.push_root(s)
     #
     lib.qcgc_mark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(p))
     self.assertTrue(self.hbtable_marked(p))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(q))
     self.assertTrue(self.hbtable_marked(q))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(t))
     self.assertFalse(self.hbtable_marked(t))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", s)), lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", r)), lib.BLOCK_BLACK)
     #
     lib.bump_ptr_reset()
     lib.qcgc_sweep()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertFalse(self.hbtable_marked(o))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(p))
     self.assertFalse(self.hbtable_marked(p))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(q))
     self.assertFalse(self.hbtable_marked(q))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertFalse(self.hbtable_has(t))
     self.assertFalse(self.hbtable_marked(t))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", s)), lib.BLOCK_WHITE)
     self.assertEqual(self.get_blocktype(
         ffi.cast("cell_t *", r)), lib.BLOCK_WHITE)
Esempio n. 7
0
 def test_mark_large(self):
     o = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(o)
     p = ffi.cast(
         "object_t *",
         self.allocate_ref(lib.qcgc_arena_size //
                           ffi.sizeof("myobject_t *")))
     self.push_root(p)
     q = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(q)
     r = ffi.cast("object_t *", self.allocate(1))
     self.push_root(r)
     s = ffi.cast("object_t *", self.allocate_ref(1))
     self.push_root(s)
     t = ffi.cast("object_t *", self.allocate(lib.qcgc_arena_size))
     self.push_root(t)
     self.set_ref(p, 0, q)
     self.set_ref(p, 1, r)
     self.set_ref(p, 2, s)
     self.set_ref(s, 0, p)
     #
     for _ in range(6):
         self.pop_root()
     self.push_root(o)
     self.push_root(s)
     #
     lib.qcgc_mark()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertTrue(self.hbtable_marked(o))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(p))
     self.assertTrue(self.hbtable_marked(p))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(q))
     self.assertTrue(self.hbtable_marked(q))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(t))
     self.assertFalse(self.hbtable_marked(t))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", s)),
                      lib.BLOCK_BLACK)
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", r)),
                      lib.BLOCK_BLACK)
     #
     lib.bump_ptr_reset()
     lib.qcgc_sweep()
     #
     self.assertTrue(self.hbtable_has(o))
     self.assertFalse(self.hbtable_marked(o))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(p))
     self.assertFalse(self.hbtable_marked(p))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertTrue(self.hbtable_has(q))
     self.assertFalse(self.hbtable_marked(q))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertFalse(self.hbtable_has(t))
     self.assertFalse(self.hbtable_marked(t))
     self.assertEqual(lib.qcgc_state.gray_stack_size, 0)
     #
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", s)),
                      lib.BLOCK_WHITE)
     self.assertEqual(self.get_blocktype(ffi.cast("cell_t *", r)),
                      lib.BLOCK_WHITE)
Esempio n. 8
0
class QCGCTest(unittest.TestCase):
    header_size = ffi.sizeof("myobject_t")

    def setUp(self):
        lib.qcgc_initialize()

    def tearDown(self):
        lib.qcgc_destroy()

    def push_root(self, o):
        lib.qcgc_push_root(ffi.cast("object_t *", o))

    def pop_root(self):
        lib.qcgc_pop_root(1)

    def allocate(self, size):
        o = lib.qcgc_allocate(self.header_size + size)
        self.assertNotEqual(o, ffi.NULL)
        lib._set_type_id(o, 0)
        return ffi.cast("myobject_t *", o)

    def allocate_ref(self, size):
        o = lib.qcgc_allocate(self.header_size +
                              size * ffi.sizeof("myobject_t *"))
        self.assertNotEqual(o, ffi.NULL)
        lib._set_type_id(o, size)
        return ffi.cast("myobject_t *", o)

    def allocate_prebuilt(self, size):
        o = lib.allocate_prebuilt(self.header_size + size)
        self.assertNotEqual(o, ffi.NULL)
        lib._set_type_id(o, 0)
        lib.qcgc_write(o)  # Register object
        return ffi.cast("myobject_t *", o)

    def allocate_prebuilt_ref(self, size):
        o = lib.allocate_prebuilt(self.header_size +
                                  size * ffi.sizeof("myobject_t *"))
        self.assertNotEqual(o, ffi.NULL)
        lib._set_type_id(o, size)
        lib.qcgc_write(o)  # Register object
        return ffi.cast("myobject_t *", o)

    def allocate_weakref(self, to):
        o = lib.qcgc_allocate(self.header_size + ffi.sizeof("myobject_t *"))
        self.assertNotEqual(o, ffi.NULL)
        lib._set_type_id(o, 0)  # Prevent from tracing
        ffi.cast("myobject_t *", o).refs[0] = ffi.cast(
            "myobject_t *", to)  # Ref has to be valid before registering
        lib.qcgc_register_weakref(
            o, ffi.cast(
                "object_t **",
                ffi.cast("myobject_t *",
                         o).refs))  # XXX: ffi.addressof .refs[0] does not work
        lib.qcgc_write(o)
        return o

    def set_ref(self, obj, index, ref):
        lib.qcgc_write(ffi.cast("object_t *", obj))  # Trigger write barrier
        assert index >= 0
        assert ffi.cast("myobject_t *", obj).type_id > index
        ffi.cast("myobject_t *",
                 obj).refs[index] = ffi.cast("myobject_t *", ref)

    def gp_gray_stack_has(self, obj):
        for i in range(lib.qcgc_state.gp_gray_stack.count):
            if (lib.qcgc_state.gp_gray_stack.items[i] == obj):
                return True
        return False

    def bump_allocate(self, size):
        if self.bump_remaining_cells() < lib.bytes_to_cells(size):
            lib.qcgc_bump_allocator_renew_block(size, True)
        return lib.qcgc_bump_allocate(size)

    def get_ref(self, obj, index):
        return ffi.cast("myobject_t *", obj).refs[index]

    def ss_size(self):
        return lib._qcgc_shadowstack.top - lib._qcgc_shadowstack.base

    def get_blocktype(self, ptr):
        return lib.qcgc_arena_get_blocktype(lib.qcgc_arena_addr(ptr),
                                            lib.qcgc_arena_cell_index(ptr))

    def set_blocktype(self, ptr, blocktype):
        lib.qcgc_arena_set_blocktype(lib.qcgc_arena_addr(ptr),
                                     lib.qcgc_arena_cell_index(ptr), blocktype)

    def bump_remaining_cells(self):
        return lib._qcgc_bump_allocator.end - lib._qcgc_bump_allocator.ptr

    # Utilities for mark/sweep testing
    def gen_structure_1(self):
        result = self.allocate_ref(6)
        result_list = [result]

        for i in range(5):
            p = self.allocate(1)
            result_list.append(p)
            self.set_ref(result, i, p)
        p = self.allocate_ref(1)
        result_list.append(p)
        self.set_ref(result, 5, p)

        q = self.allocate(1)
        result_list.append(q)
        self.set_ref(p, 0, q)
        return result, result_list

    def gen_circular_structure(self, size):
        assert size >= 1

        first = self.allocate_ref(1)
        objects = [first]
        p = first

        # Build chain
        for _ in range(size - 1):
            q = self.allocate_ref(1)
            objects.append(q)
            self.set_ref(p, 0, q)
            p = q

        # Close cycle
        self.set_ref(p, 0, first)
        return objects
Esempio n. 9
0
 def allocate_ref(self, size):
     o = lib.qcgc_allocate(self.header_size +
                           size * ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, size)
     return ffi.cast("myobject_t *", o)
Esempio n. 10
0
 def allocate_prebuilt_ref(self, size):
     o = lib.allocate_prebuilt(self.header_size + size * ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, size)
     lib.qcgc_write(o) # Register object
     return ffi.cast("myobject_t *", o)
Esempio n. 11
0
 def allocate_ref(self, size):
     o = lib.qcgc_allocate(self.header_size + size * ffi.sizeof("myobject_t *"))
     self.assertNotEqual(o, ffi.NULL)
     lib._set_type_id(o, size)
     return ffi.cast("myobject_t *", o)