def test_remove_nothing(self): cache = GenerationalCache() cache.add(self.obj1) present = cache.remove(self.obj2) self.assertFalse(present) self.assertEqual(cache.get_cached(), [self.obj1])
def test_generational_cache_two_generations(obj1, obj2): """ Inserting more objects than the cache's size causes the cache to contain two generations, each holding up to <size> objects. """ cache = GenerationalCache(1) cache.add(obj1) cache.add(obj2) assert sorted(cache.get_cached()) == [obj1, obj2]
def test_two_generations(self): """ Inserting more objects than the cache's size causes the cache to contain two generations, each holding up to <size> objects. """ cache = GenerationalCache(1) cache.add(self.obj1) cache.add(self.obj2) self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2])
def test_size_limit(self): """ A cache will never hold more than twice its size in objects. The generational system is what prevents it from holding exactly the requested number of objects. """ size = 10 cache = GenerationalCache(size) for value in xrange(5 * size): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_size_limit(self): """ A cache will never hold more than twice its size in objects. The generational system is what prevents it from holding exactly the requested number of objects. """ size = 10 cache = GenerationalCache(size) for value in range(5 * size): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_remove_object(self): cache = GenerationalCache() cache.add(self.obj1) cache.add(self.obj2) cache.add(self.obj3) present = cache.remove(self.obj2) self.assertTrue(present) self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj3])
def test_generational_cache_remove_object(obj1, obj2, obj3): cache = GenerationalCache() cache.add(obj1) cache.add(obj2) cache.add(obj3) present = cache.remove(obj2) assert present assert sorted(cache.get_cached()) == [obj1, obj3]
def test_generational_cache_clear_cache_clears_the_second_generation( obj1, obj2): cache = GenerationalCache(1) cache.add(obj1) cache.add(obj2) cache.clear() assert cache.get_cached() == []
def test_set_size_larger_than_current_size(self): """ Setting the cache size to something more than the number of objects in the cache does not affect its current contents, and will merge any elements from the second generation into the first one. """ cache = GenerationalCache(1) cache.add(self.obj1) # new=[1] old=[] cache.add(self.obj2) # new=[2] old=[1] cache.set_size(2) # new=[1, 2] old=[] cache.add(self.obj3) # new=[3] old=[1, 2] self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2, self.obj3])
def test_generational_cache_evict_oldest(obj1, obj2, obj3): """The "oldest" object is the first to be evicted.""" cache = GenerationalCache(1) cache.add(obj1) cache.add(obj2) cache.add(obj3) assert sorted(cache.get_cached()) == [obj2, obj3]
def test_evict_oldest(self): """The "oldest" object is the first to be evicted.""" cache = GenerationalCache(1) cache.add(self.obj1) cache.add(self.obj2) cache.add(self.obj3) self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3])
def storm_cache_factory(): """Return a Storm Cache of the type and size specified in dbconfig.""" if dbconfig.storm_cache == 'generational': return GenerationalCache(int(dbconfig.storm_cache_size)) elif dbconfig.storm_cache == 'stupid': return StupidCache(int(dbconfig.storm_cache_size)) elif dbconfig.storm_cache == 'default': return Cache(int(dbconfig.storm_cache_size)) else: assert False, "Unknown storm_cache %s." % dbconfig.storm_cache
def test_evict_LRU(self): """ Actually, it's not the oldest but the LRU object that is first to be evicted. Re-adding the oldest object makes it not be the LRU. """ cache = GenerationalCache(1) cache.add(self.obj1) cache.add(self.obj2) # This "refreshes" the oldest object in the cache. cache.add(self.obj1) cache.add(self.obj3) self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj3])
def test_generational_cache_evict_lru(obj1, obj2, obj3): """ Actually, it's not the oldest but the LRU object that is first to be evicted. Re-adding the oldest object makes it not be the LRU. """ cache = GenerationalCache(1) cache.add(obj1) cache.add(obj2) # This "refreshes" the oldest object in the cache. cache.add(obj1) cache.add(obj3) assert sorted(cache.get_cached()) == [obj1, obj3]
def test_remove_from_overlap(self): """ Removing an object from the cache removes it from both its primary and secondary generations. """ cache = GenerationalCache(2) cache.add(self.obj1) # new=[1] old=[] cache.add(self.obj2) # new=[1, 2] old=[] cache.add(self.obj3) # new=[3] old=[1, 2] cache.add(self.obj1) # new=[3, 1] old=[1, 2] present = cache.remove(self.obj1) self.assertTrue(present) self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3])
def test_generational_cache_remove_nothing(obj1, obj2): cache = GenerationalCache() cache.add(obj1) present = cache.remove(obj2) assert not present assert cache.get_cached() == [obj1]
def test_generational_cache_remove_from_overlap(obj1, obj2, obj3): """ Removing an object from the cache removes it from both its primary and secondary generations. """ cache = GenerationalCache(2) cache.add(obj1) # new=[1] old=[] cache.add(obj2) # new=[1, 2] old=[] cache.add(obj3) # new=[3] old=[1, 2] cache.add(obj1) # new=[3, 1] old=[1, 2] present = cache.remove(obj1) assert present assert sorted(cache.get_cached()) == [obj2, obj3]
def test_generational_cache_set_size_limit(): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in iter_range(size * 10): cache.add(StubObjectInfo(value)) assert len(cache.get_cached()) == size * 2
def test_generational_cache_three_generations(obj1, obj2, obj3): """ If more than 2*<size> objects come along, only 2*<size> objects are retained. """ cache = GenerationalCache(1) cache.add(obj1) cache.add(obj2) cache.add(obj3) assert sorted(cache.get_cached()) == [obj2, obj3]
def test_set_size_limit(self): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in range(size * 10): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_three_generations(self): """ If more than 2*<size> objects come along, only 2*<size> objects are retained. """ cache = GenerationalCache(1) cache.add(self.obj1) cache.add(self.obj2) cache.add(self.obj3) self.assertEqual(sorted(cache.get_cached()), [self.obj2, self.obj3])
def test_set_size_smaller_than_current_size(self): """ Setting the size to a smaller size than the number of objects currently cached will drop some of the extra content. Note that because of the generation system, it can actually hold two times the size requested in edge cases. """ cache = GenerationalCache(150) for i in range(250): cache.add(StubObjectInfo(i)) cache.set_size(100) cached = cache.get_cached() self.assertEquals(len(cached), 100) for obj_info in cache.get_cached(): self.assertTrue(obj_info.id >= 100)
def test_generational_cache_generational_overlap(obj1, obj2, obj3): """ An object that is both in the primary and the secondary generation is listed only once in the cache's contents. """ cache = GenerationalCache(2) cache.add(obj1) # new=[1] old=[] cache.add(obj2) # new=[1, 2] old=[] cache.add(obj3) # new=[3] old=[1, 2] cache.add(obj1) # new=[3, 1] old=[1, 2] assert sorted(cache.get_cached()) == [obj1, obj2, obj3]
def test_generational_overlap(self): """ An object that is both in the primary and the secondary generation is listed only once in the cache's contents. """ cache = GenerationalCache(2) cache.add(self.obj1) # new=[1] old=[] cache.add(self.obj2) # new=[1, 2] old=[] cache.add(self.obj3) # new=[3] old=[1, 2] cache.add(self.obj1) # new=[3, 1] old=[1, 2] self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2, self.obj3])
def test_set_size_limit(self): """ Setting the size limits the cache's size just like passing an initial size would. """ size = 10 cache = GenerationalCache(size * 100) cache.set_size(size) for value in xrange(size * 10): cache.add(StubObjectInfo(value)) self.assertEquals(len(cache.get_cached()), size * 2)
def test_clear_cache_clears_the_second_generation(self): cache = GenerationalCache(1) cache.add(self.obj1) cache.add(self.obj2) cache.clear() self.assertEqual(cache.get_cached(), [])
def test_generational_cache_clear_cache(obj1): cache = GenerationalCache() cache.add(obj1) cache.clear() assert cache.get_cached() == []
def test_cache_one_object(self): cache = GenerationalCache() cache.add(self.obj1) self.assertEqual(cache.get_cached(), [self.obj1])
def test_initially_empty(self): cache = GenerationalCache() self.assertEqual(cache.get_cached(), [])
def test_clear_cache(self): cache = GenerationalCache() cache.add(self.obj1) cache.clear() self.assertEqual(cache.get_cached(), [])
def test_cache_multiple_objects(self): cache = GenerationalCache() cache.add(self.obj1) cache.add(self.obj2) self.assertEqual(sorted(cache.get_cached()), [self.obj1, self.obj2])
def test_cache_multiple_objects(obj1, obj2): cache = GenerationalCache() cache.add(obj1) cache.add(obj2) assert sorted(cache.get_cached()) == [obj1, obj2]