def test_add_with_size_zero(Cache, obj1): """Cache is disabled entirely on add() if size is 0.""" cache = Cache(0) cache.add(obj1) # Ensure that we don't even check if obj_info is in the # cache, by testing if it was hashed. Hopefully, that means # we got a faster path. assert not obj1.hashed
def test_fit_size(Cache): """ A cache of size n can hold at least n objects. """ size = 10 cache = Cache(size) for i in iter_range(size): cache.add(StubObjectInfo(i)) assert len(cache.get_cached()) == size
def test_remove(Cache, obj1, obj2, obj3): cache = Cache(5) cache.add(obj1) cache.add(obj2) cache.add(obj3) cache.remove(obj2) assert sorted(cache.get_cached()) == [obj1, obj3]
def test_cache_reduce_max_size(obj_infos): cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) cache.set_size(3) assert [obj_info.id for obj_info in cache.get_cached()] == [9, 8, 7] # Adding items past the new maximum size should drop older ones. for obj_info in obj_infos[:2]: cache.add(obj_info) assert [obj_info.id for obj_info in cache.get_cached()] == [1, 0, 9]
def test_cache_increase_max_size(obj_infos): cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) cache.set_size(10) assert [obj_info.id for obj_info in cache.get_cached()] == [9, 8, 7, 6, 5] # Adding items past the new maximum size should drop older ones. for obj_info in obj_infos[:6]: cache.add(obj_info) cached_ids = [obj_info.id for obj_info in cache.get_cached()] assert cached_ids == [5, 4, 3, 2, 1, 0, 9, 8, 7, 6]
def test_reduce_max_size(self): cache = Cache(5) for obj_info in self.obj_infos: cache.add(obj_info) cache.set_size(3) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [9, 8, 7]) # Adding items past the new maximum size should drop older ones. for obj_info in self.obj_infos[:2]: cache.add(obj_info) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [1, 0, 9])
def test_increase_max_size(self): cache = Cache(5) for obj_info in self.obj_infos: cache.add(obj_info) cache.set_size(10) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [9, 8, 7, 6, 5]) # Adding items past the new maximum size should drop older ones. for obj_info in self.obj_infos[:6]: cache.add(obj_info) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [5, 4, 3, 2, 1, 0, 9, 8, 7, 6])
def test_add_existing(Cache, obj1, obj2, obj3): cache = Cache(5) cache.add(obj1) cache.add(obj2) cache.add(obj3) cache.add(obj2) assert sorted(cache.get_cached()) == [obj1, obj2, obj3]
def test_clear(Cache, obj_infos): """The clear method empties the cache.""" cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) cache.clear() assert cache.get_cached() == [] # Just an additional check ensuring that any additional structures # which may be used were cleaned properly as well. for obj_info in obj_infos: assert not cache.remove(obj_info)
def storm_cache_factory(): """Return a Storm Cache of the type and size specified in dbconfig.""" if dbconfig.storm_cache == 'generational': return GenerationalCache(int(dbconfig.storm_cache_size)) elif dbconfig.storm_cache == 'stupid': return StupidCache(int(dbconfig.storm_cache_size)) elif dbconfig.storm_cache == 'default': return Cache(int(dbconfig.storm_cache_size)) else: assert False, "Unknown storm_cache %s." % dbconfig.storm_cache
def test_set_zero_size(Cache, obj1, obj2): """ Setting a cache's size to zero clears the cache. """ cache = Cache() cache.add(obj1) cache.add(obj2) cache.set_size(0) assert cache.get_cached() == []
def test_cache_reduce_max_size_to_zero(obj1): """When setting the size to zero, there's an optimization.""" cache = Cache(5) cache.add(obj1) obj1.hashed = False cache.set_size(0) assert cache.get_cached() == [] # Ensure that we don't even check if obj1 is in the cache, by # testing if it was hashed. Hopefully, that means we got a # faster path. assert not obj1.hashed
def test_reduce_max_size_to_zero(self): """When setting the size to zero, there's an optimization.""" cache = Cache(5) obj_info = self.obj_infos[0] cache.add(obj_info) obj_info.hashed = False cache.set_size(0) self.assertEquals(cache.get_cached(), []) # Ensure that we don't even check if obj_info is in the # cache, by testing if it was hashed. Hopefully, that means # we got a faster path. self.assertEquals(obj_info.hashed, False)
def test_adding_similar_obj_infos(Cache): """If __eq__ is broken, this fails.""" obj_info1 = get_obj_info(StubClass()) obj_info2 = get_obj_info(StubClass()) cache = Cache(5) cache.add(obj_info1) cache.add(obj_info2) cache.add(obj_info2) cache.add(obj_info1) cached = [hash(obj_info) for obj_info in cache.get_cached()] expected = [hash(obj_info1), hash(obj_info2)] assert sorted(cached) == sorted(expected)
def test_remove_with_size_zero(Cache, obj1): """Cache is disabled entirely on remove() if size is 0.""" cache = Cache(0) cache.remove(obj1)
def test_size_and_fifo_behaviour(self): cache = Cache(5) for obj_info in self.obj_infos: cache.add(obj_info) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [9, 8, 7, 6, 5])
def test_cache_size_and_fifo_behaviour(obj_infos): cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) assert [obj_info.id for obj_info in cache.get_cached()] == [9, 8, 7, 6, 5]
def test_initially_empty(Cache): cache = Cache() assert cache.get_cached() == []