def test_set_size_limit(self): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in range(size * 10): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_set_size_limit(self): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in xrange(size * 10): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_generational_cache_set_size_limit(): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in iter_range(size * 10): cache.add(StubObjectInfo(value)) assert len(cache.get_cached()) == size * 2
def test_set_size_larger_than_current_size(self): """ Setting the cache size to something more than the number of objects in the cache does not affect its current contents, and will merge any elements from the second generation into the first one. """ cache = GenerationalCache(1) cache.add(self.obj1) # new=[1] old=[] cache.add(self.obj2) # new=[2] old=[1] cache.set_size(2) # new=[1, 2] old=[] cache.add(self.obj3) # new=[3] old=[1, 2] self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2, self.obj3])
def test_set_size_smaller_than_current_size(self): """ Setting the size to a smaller size than the number of objects currently cached will drop some of the extra content. Note that because of the generation system, it can actually hold two times the size requested in edge cases. """ cache = GenerationalCache(150) for i in range(250): cache.add(StubObjectInfo(i)) cache.set_size(100) cached = cache.get_cached() self.assertEquals(len(cached), 100) for obj_info in cache.get_cached(): self.assertTrue(obj_info.id >= 100)