def test_set_zero_size(Cache, obj1, obj2): """ Setting a cache's size to zero clears the cache. """ cache = Cache() cache.add(obj1) cache.add(obj2) cache.set_size(0) assert cache.get_cached() == []
def test_cache_reduce_max_size(obj_infos): cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) cache.set_size(3) assert [obj_info.id for obj_info in cache.get_cached()] == [9, 8, 7] # Adding items past the new maximum size should drop older ones. for obj_info in obj_infos[:2]: cache.add(obj_info) assert [obj_info.id for obj_info in cache.get_cached()] == [1, 0, 9]
def test_cache_reduce_max_size_to_zero(obj1): """When setting the size to zero, there's an optimization.""" cache = Cache(5) cache.add(obj1) obj1.hashed = False cache.set_size(0) assert cache.get_cached() == [] # Ensure that we don't even check if obj1 is in the cache, by # testing if it was hashed. Hopefully, that means we got a # faster path. assert not obj1.hashed
def test_reduce_max_size_to_zero(self): """When setting the size to zero, there's an optimization.""" cache = Cache(5) obj_info = self.obj_infos[0] cache.add(obj_info) obj_info.hashed = False cache.set_size(0) self.assertEquals(cache.get_cached(), []) # Ensure that we don't even check if obj_info is in the # cache, by testing if it was hashed. Hopefully, that means # we got a faster path. self.assertEquals(obj_info.hashed, False)
def test_cache_increase_max_size(obj_infos): cache = Cache(5) for obj_info in obj_infos: cache.add(obj_info) cache.set_size(10) assert [obj_info.id for obj_info in cache.get_cached()] == [9, 8, 7, 6, 5] # Adding items past the new maximum size should drop older ones. for obj_info in obj_infos[:6]: cache.add(obj_info) cached_ids = [obj_info.id for obj_info in cache.get_cached()] assert cached_ids == [5, 4, 3, 2, 1, 0, 9, 8, 7, 6]
def test_increase_max_size(self): cache = Cache(5) for obj_info in self.obj_infos: cache.add(obj_info) cache.set_size(10) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [9, 8, 7, 6, 5]) # Adding items past the new maximum size should drop older ones. for obj_info in self.obj_infos[:6]: cache.add(obj_info) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [5, 4, 3, 2, 1, 0, 9, 8, 7, 6])
def test_reduce_max_size(self): cache = Cache(5) for obj_info in self.obj_infos: cache.add(obj_info) cache.set_size(3) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [9, 8, 7]) # Adding items past the new maximum size should drop older ones. for obj_info in self.obj_infos[:2]: cache.add(obj_info) self.assertEquals([obj_info.id for obj_info in cache.get_cached()], [1, 0, 9])