Ejemplo n.º 1
0
 def _fixup(self):
     if self.ptr is not None and self.ptr._was_freed():
         # hack to support llarena.test_replace_object_with_stub()
         from rpython.rtyper.lltypesystem import llarena
         return llarena.getfakearenaaddress(self)
     else:
         return self
Ejemplo n.º 2
0
 def _fixup(self):
     if self.ptr is not None and self.ptr._was_freed():
         # hack to support llarena.test_replace_object_with_stub()
         from rpython.rtyper.lltypesystem import llarena
         return llarena.getfakearenaaddress(self)
     else:
         return self
Ejemplo n.º 3
0
 def _get_object_hash(self, obj, objsize, tid):
     # Returns the hash of the object, which must not be GC_HASH_NOTTAKEN.
     gc_hash = tid & GCFLAG_HASHMASK
     if gc_hash == GC_HASH_HASFIELD:
         obj = llarena.getfakearenaaddress(obj)
         return (obj + objsize).signed[0]
     elif gc_hash == GC_HASH_TAKEN_ADDR:
         return llmemory.cast_adr_to_int(obj)
     elif gc_hash == GC_HASH_TAKEN_NURS:
         return self._compute_current_nursery_hash(obj)
     else:
         assert 0, "gc_hash == GC_HASH_NOTTAKEN"
Ejemplo n.º 4
0
 def _nuninitialized(self, page, size_class):
     # Helper for debugging: count the number of uninitialized blocks
     freeblock = page.freeblock
     for i in range(page.nfree):
         freeblock = freeblock.address[0]
     assert freeblock != NULL
     pageaddr = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     num_initialized_blocks, rem = divmod(
         freeblock - pageaddr - self.hdrsize, size_class * WORD)
     assert rem == 0, "page size_class misspecified?"
     nblocks = self.nblocks_for_size[size_class]
     return nblocks - num_initialized_blocks
Ejemplo n.º 5
0
 def _nuninitialized(self, page, size_class):
     # Helper for debugging: count the number of uninitialized blocks
     freeblock = page.freeblock
     for i in range(page.nfree):
         freeblock = freeblock.address[0]
     assert freeblock != NULL
     pageaddr = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     num_initialized_blocks, rem = divmod(
         freeblock - pageaddr - self.hdrsize, size_class * WORD)
     assert rem == 0, "page size_class misspecified?"
     nblocks = self.nblocks_for_size[size_class]
     return nblocks - num_initialized_blocks
Ejemplo n.º 6
0
 def _get_object_hash(self, obj, objsize, tid):
     # Returns the hash of the object, which must not be GC_HASH_NOTTAKEN.
     gc_hash = tid & GCFLAG_HASHMASK
     if gc_hash == GC_HASH_HASFIELD:
         obj = llarena.getfakearenaaddress(obj)
         return (obj + objsize).signed[0]
     elif gc_hash == GC_HASH_TAKEN_ADDR:
         return llmemory.cast_adr_to_int(obj)
     elif gc_hash == GC_HASH_TAKEN_NURS:
         return self._compute_current_nursery_hash(obj)
     else:
         assert 0, "gc_hash == GC_HASH_NOTTAKEN"
Ejemplo n.º 7
0
 def free_page(self, page):
     """Free a whole page."""
     #
     # Insert the freed page in the arena's 'freepages' list.
     # If nfreepages == totalpages, then it will be freed at the
     # end of mass_free().
     arena = page.arena
     arena.nfreepages += 1
     pageaddr = llmemory.cast_ptr_to_adr(page)
     pageaddr = llarena.getfakearenaaddress(pageaddr)
     llarena.arena_reset(pageaddr, self.page_size, 0)
     llarena.arena_reserve(pageaddr, llmemory.sizeof(llmemory.Address))
     pageaddr.address[0] = arena.freepages
     arena.freepages = pageaddr
Ejemplo n.º 8
0
 def free_page(self, page):
     """Free a whole page."""
     #
     # Insert the freed page in the arena's 'freepages' list.
     # If nfreepages == totalpages, then it will be freed at the
     # end of mass_free().
     arena = page.arena
     arena.nfreepages += 1
     pageaddr = llmemory.cast_ptr_to_adr(page)
     pageaddr = llarena.getfakearenaaddress(pageaddr)
     llarena.arena_reset(pageaddr, self.page_size, 0)
     llarena.arena_reserve(pageaddr, llmemory.sizeof(llmemory.Address))
     pageaddr.address[0] = arena.freepages
     arena.freepages = pageaddr
Ejemplo n.º 9
0
 def malloc(self, size):
     """Allocate a block from a page in an arena."""
     nsize = llmemory.raw_malloc_usage(size)
     ll_assert(nsize > 0, "malloc: size is null or negative")
     ll_assert(nsize <= self.small_request_threshold,"malloc: size too big")
     ll_assert((nsize & (WORD-1)) == 0, "malloc: size is not aligned")
     self.total_memory_used += r_uint(nsize)
     #
     # Get the page to use from the size
     size_class = nsize >> WORD_POWER_2
     page = self.page_for_size[size_class]
     if page == PAGE_NULL:
         page = self.allocate_new_page(size_class)
     #
     # The result is simply 'page.freeblock'
     result = page.freeblock
     if page.nfree > 0:
         #
         # The 'result' was part of the chained list; read the next.
         page.nfree -= 1
         freeblock = result.address[0]
         llarena.arena_reset(result,
                             llmemory.sizeof(llmemory.Address),
                             0)
         #
     else:
         # The 'result' is part of the uninitialized blocks.
         freeblock = result + nsize
     #
     page.freeblock = freeblock
     #
     pageaddr = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     if freeblock - pageaddr > self.page_size - nsize:
         # This was the last free block, so unlink the page from the
         # chained list and put it in the 'full_page_for_size' list.
         self.page_for_size[size_class] = page.nextpage
         page.nextpage = self.full_page_for_size[size_class]
         self.full_page_for_size[size_class] = page
     #
     llarena.arena_reserve(result, _dummy_size(size))
     return result
Ejemplo n.º 10
0
 def malloc(self, size):
     """Allocate a block from a page in an arena."""
     nsize = llmemory.raw_malloc_usage(size)
     ll_assert(nsize > 0, "malloc: size is null or negative")
     ll_assert(nsize <= self.small_request_threshold,
               "malloc: size too big")
     ll_assert((nsize & (WORD - 1)) == 0, "malloc: size is not aligned")
     self.total_memory_used += r_uint(nsize)
     #
     # Get the page to use from the size
     size_class = nsize >> WORD_POWER_2
     page = self.page_for_size[size_class]
     if page == PAGE_NULL:
         page = self.allocate_new_page(size_class)
     #
     # The result is simply 'page.freeblock'
     result = page.freeblock
     if page.nfree > 0:
         #
         # The 'result' was part of the chained list; read the next.
         page.nfree -= 1
         freeblock = result.address[0]
         llarena.arena_reset(result, llmemory.sizeof(llmemory.Address), 0)
         #
     else:
         # The 'result' is part of the uninitialized blocks.
         freeblock = result + nsize
     #
     page.freeblock = freeblock
     #
     pageaddr = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     if freeblock - pageaddr > self.page_size - nsize:
         # This was the last free block, so unlink the page from the
         # chained list and put it in the 'full_page_for_size' list.
         self.page_for_size[size_class] = page.nextpage
         page.nextpage = self.full_page_for_size[size_class]
         self.full_page_for_size[size_class] = page
     #
     llarena.arena_reserve(result, _dummy_size(size))
     return result
Ejemplo n.º 11
0
 def walk_page(self, page, block_size, ok_to_free_func):
     """Walk over all objects in a page, and ask ok_to_free_func()."""
     #
     # 'freeblock' is the next free block
     freeblock = page.freeblock
     #
     # 'prevfreeblockat' is the address of where 'freeblock' was read from.
     prevfreeblockat = lltype.direct_fieldptr(page, 'freeblock')
     prevfreeblockat = llmemory.cast_ptr_to_adr(prevfreeblockat)
     #
     obj = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     obj += self.hdrsize
     surviving = 0  # initially
     skip_free_blocks = page.nfree
     #
     while True:
         #
         if obj == freeblock:
             #
             if skip_free_blocks == 0:
                 #
                 # 'obj' points to the first uninitialized block,
                 # or to the end of the page if there are none.
                 break
             #
             # 'obj' points to a free block.  It means that
             # 'prevfreeblockat.address[0]' does not need to be updated.
             # Just read the next free block from 'obj.address[0]'.
             skip_free_blocks -= 1
             prevfreeblockat = obj
             freeblock = obj.address[0]
             #
         else:
             # 'obj' points to a valid object.
             ll_assert(freeblock > obj,
                       "freeblocks are linked out of order")
             #
             if ok_to_free_func(obj):
                 #
                 # The object should die.
                 llarena.arena_reset(obj, _dummy_size(block_size), 0)
                 llarena.arena_reserve(obj,
                                       llmemory.sizeof(llmemory.Address))
                 # Insert 'obj' in the linked list of free blocks.
                 prevfreeblockat.address[0] = obj
                 prevfreeblockat = obj
                 obj.address[0] = freeblock
                 #
                 # Update the number of free objects in the page.
                 page.nfree += 1
                 #
             else:
                 # The object survives.
                 surviving += 1
         #
         obj += block_size
     #
     # Update the global total size of objects.
     self.total_memory_used += r_uint(surviving * block_size)
     #
     # Return the number of surviving objects.
     return surviving
Ejemplo n.º 12
0
 def walk_page(self, page, block_size, ok_to_free_func):
     """Walk over all objects in a page, and ask ok_to_free_func()."""
     #
     # 'freeblock' is the next free block
     freeblock = page.freeblock
     #
     # 'prevfreeblockat' is the address of where 'freeblock' was read from.
     prevfreeblockat = lltype.direct_fieldptr(page, 'freeblock')
     prevfreeblockat = llmemory.cast_ptr_to_adr(prevfreeblockat)
     #
     obj = llarena.getfakearenaaddress(llmemory.cast_ptr_to_adr(page))
     obj += self.hdrsize
     surviving = 0    # initially
     skip_free_blocks = page.nfree
     #
     while True:
         #
         if obj == freeblock:
             #
             if skip_free_blocks == 0:
                 #
                 # 'obj' points to the first uninitialized block,
                 # or to the end of the page if there are none.
                 break
             #
             # 'obj' points to a free block.  It means that
             # 'prevfreeblockat.address[0]' does not need to be updated.
             # Just read the next free block from 'obj.address[0]'.
             skip_free_blocks -= 1
             prevfreeblockat = obj
             freeblock = obj.address[0]
             #
         else:
             # 'obj' points to a valid object.
             ll_assert(freeblock > obj,
                       "freeblocks are linked out of order")
             #
             if ok_to_free_func(obj):
                 #
                 # The object should die.
                 llarena.arena_reset(obj, _dummy_size(block_size), 0)
                 llarena.arena_reserve(obj,
                                       llmemory.sizeof(llmemory.Address))
                 # Insert 'obj' in the linked list of free blocks.
                 prevfreeblockat.address[0] = obj
                 prevfreeblockat = obj
                 obj.address[0] = freeblock
                 #
                 # Update the number of free objects in the page.
                 page.nfree += 1
                 #
             else:
                 # The object survives.
                 surviving += 1
         #
         obj += block_size
     #
     # Update the global total size of objects.
     self.total_memory_used += r_uint(surviving * block_size)
     #
     # Return the number of surviving objects.
     return surviving