def test_redis_failure(self): """ Tests that the function gets ran as expected when Redis fails """ test_param = 'cache hit test' mock_client = Mock() # Simulates a cache hit redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.side_effect = BackendException # Create a function with the decorator @redis_cache.cache() def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function function_response = test_function(test_param) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) self.assertEqual(redis_cache.backend.set_cache.call_count, 0) self.assertEqual(function_response, test_param) @redis_cache.cache(invalidator=True) def test_function(a): return a function_response, invalidator = test_function(test_param) self.assertIsNone(invalidator)
def test_cache_hit(self): """ Tests a cache hit """ test_param = 'cache hit test' mock_client = Mock() # Simulates a cache hit redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = pickle.dumps(test_param) # Create a function with the decorator @redis_cache.cache() def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function function_response = test_function(test_param) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) self.assertEqual(redis_cache.backend.set_cache.call_count, 0) self.assertEqual(function_response, test_param)
def test_cache_hit_invalidate(self): """ Tests a cache hit with invalidation """ test_param = 'cache hit test' mock_client = Mock() # Simulates a cache hit redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = pickle.dumps(test_param) # Create a function with the decorator and invalidator param @redis_cache.cache(invalidator=True) def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function function_response, invalidator = test_function(test_param) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) self.assertEqual(redis_cache.backend.set_cache.call_count, 0) self.assertEqual(function_response, test_param) # Invalidator should be callable self.assertTrue(hasattr(invalidator, '__call__')) # Invalidate the cache invalidator() # Cache entry should have been deleted redis_cache.backend.invalidate_key.assert_called_once_with( expected_hash)
def test_simple_object(self): """ Tests that the cache uses an object's __str__ method to generate the signature of an object if available """ mock_client = Mock() # Simulates a cache miss redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = '' class TestClass(object): def __init__(self, parameter): self.parameter = parameter @redis_cache.cache() def cache_this_method(self): return self.parameter def __str__(self): return "<TestClass object: parameter=%s>" % self.parameter def cache_key_for(arg): fn = TestClass.cache_this_method args = [TestClass(arg)] return redis_cache._generate_cache_key(fn, args) param = 'some_param' test_class = TestClass(param) test_class.cache_this_method() expected_hash = cache_key_for(param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash)
def test_simple_object_pickle(self): """ Tests caching of a simple Python object with pickling """ mock_client = Mock() # Simulates a cache miss redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = '' # Create a function with the decorator simple_obj = SimpleObject('test', 42) @redis_cache.cache() def test_function(a): return simple_obj def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function test_param = 'input' function_response = test_function(test_param) self.assertEqual(function_response, simple_obj) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) redis_cache.backend.set_cache_and_expire.assert_called_once_with( expected_hash, pickle.dumps(simple_obj), DEFAULT_EXPIRATION) # Call that function, again, expecting a cache hit redis_cache.backend.get_cache.return_value = pickle.dumps(simple_obj) function_response = test_function(test_param) self.assertEqual(function_response, simple_obj)
def test_cache_miss_expiration(self): """ Tests a cache miss with an expiration given """ test_param = 'cache hit test' ttl = 100 mock_client = Mock() # Simulates a cache hit redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = '' # Create a function with the decorator @redis_cache.cache(expiration=ttl) def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function function_response = test_function(test_param) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) redis_cache.backend.set_cache_and_expire.assert_called_once_with( expected_hash, pickle.dumps(test_param), ttl) self.assertEqual(redis_cache.backend.set_cache.call_count, 0)
def test_cache_miss_kwargs(self): """ Tests a cache miss against a function with kwargs """ mock_client = Mock() # Simulates a cache miss redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = '' # Create a function with the decorator @redis_cache.cache(invalidator=True) def test_function(a, b, c=None, d=None): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function test_a = 'input' test_b = 'b' test_c = True test_d = False function_response, invalidator \ = test_function(test_a, test_b, c=test_c, d=test_d) expected_hash = cache_key_for(test_a, test_b, c=test_c, d=test_d) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) redis_cache.backend.set_cache_and_expire.assert_called_once_with( expected_hash, pickle.dumps(test_a), DEFAULT_EXPIRATION) # Call the cache invalidator invalidator() mock_client.invalidate_key.assert_called_once_with(expected_hash)
def __init__(self, host: str = 'localhost', port: int = 6379) -> None: self.__client = StrictRedis(host, port=port, decode_responses=True) self.__client.config_set('maxmemory', '600mb') self.__client.config_set('maxmemory-policy', 'allkeys-lru') self.__cache = RedisCache(redis_client=self.__client) self.__logger = Logger().getLogger(__file__) self.__logger.info("Initialize Cache.")
def test_cache_miss(self): """ Tests a cache miss """ mock_client = Mock() # Simulates a cache miss redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = '' # Create a function with the decorator @redis_cache.cache() def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key(test_function, args, kwargs) # Call that function test_param = 'input' function_response = test_function(test_param) expected_hash = cache_key_for(test_param) mock_client.get_cache.assert_called_once_with(expected_hash) mock_client.set_cache_and_expire.assert_called_once_with( expected_hash, pickle.dumps(test_param), DEFAULT_EXPIRATION)
def __init__(self): self.cloud_translation = GoogleTranslation() self.cache = RedisCache() self.bad_translation = { "translatedText": "", "detectedSourceLanguage": "", "input": "BAD ARGUMENT" }
def __init__(self, extension='', bot=None, loop=None): self.config = settings.DWARF_CACHE_BACKEND['redis'] self.backend = RedisCache('{}:{}'.format(self.config['HOST'], self.config['PORT']), {'db': self.config['DB'], 'password': self.config['PASSWORD']}) self.extension = extension self.bot = bot if loop is None and self.bot is not None and hasattr(bot, 'loop'): self.loop = bot.loop else: self.loop = loop
def server(http_port, redis_host, redis_port, redis_db, cache_refresh_rate, example): global redis_client, redis_cache, redis_logger, example_to_serve, plot_manager example_to_serve = os.path.realpath(example) redis_client = redis.Redis(host=redis_host, port=redis_port, db=redis_db, decode_responses=True) redis_cache = RedisCache(redis_client, refresh_rate=cache_refresh_rate) redis_logger = RedisLogger(redis_client) plot_manager = PlotManager(redis_cache) redis_cache.start() socketio.run(app, port=http_port, debug=True)
def test_cache_on_stateful_class_without_str(self): """ Tests that the cache gets populated when invoking a method on an object that is stateful and that doesn't implement a custom __str__ method. Some object are stateful, i.e. their methods will return different values at different times in the object's life-cycle. When the state of the object changes, we need to make sure that the cache doesn't return values for the previous state of the object. """ mock_client = Mock() redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client class TestClass(object): def __init__(self, state=None): self.state = state @redis_cache.cache() def some_method(self): return self.state def cache_key_for(state): fn = TestClass.some_method args = [TestClass(state)] return redis_cache._generate_cache_key(fn, args) state1 = 'some simple type state' state2 = SimpleObject('some complex type state', 123) def cache_get(cache_key): if cache_key == cache_key_for(state1): return pickle.dumps(state1) if cache_key == cache_key_for(state2): return pickle.dumps(state2) raise ValueError('called cache for hash: {}'.format(cache_key)) redis_cache.backend.get_cache.side_effect = cache_get # call the object in some initial state stateful_instance = TestClass() stateful_instance.state = state1 return_value_for_state1 = stateful_instance.some_method() # mutate the state of the object and make sure that we don't hit the # cache for the previous state stateful_instance.state = state2 return_value_for_state2 = stateful_instance.some_method() self.assertNotEqual(return_value_for_state1, return_value_for_state2)
def get_common_settings_list_by_name(self, name): cached_rules = RedisCache.find_cached_value('goose_common_settings_list_' + name) if cached_rules: return cached_rules query_string = "SELECT * FROM goose_common_settings WHERE name='%s'" %(name) records = self.get_records_list_by_query(query_string) common_settings_list = [] for item in records: if item[4]: common_settings_list.append({'attribute': item[2], 'value': item[3]}) RedisCache.cache_value('goose_common_settings_list_' + name, common_settings_list) return common_settings_list
def test_limit(): cache = RedisCache(redis_client=client) @cache.cache(limit=2) def add_limit(arg1, arg2): return arg1 + arg2 result = add_limit(3, 4) assert result == 7 result = add_limit(5, 5) assert client.zcount('rc:redis_cache.test.add_limit:keys', '-inf', '+inf') == 2 assert result == 10 result = add_limit(6, 5) assert client.zcount('rc:redis_cache.test.add_limit:keys', '-inf', '+inf') == 2 assert result == 11 result = add_limit(6, 5) assert result == 11 assert client.zrange('rc:redis_cache.test.add_limit:keys', 0, -1) == [ 'rc:redis_cache.test.add_limit:[[5, 5], {}]', 'rc:redis_cache.test.add_limit:[[6, 5], {}]' ] result = add_limit(3, 4) assert result == 7 assert client.zrange('rc:redis_cache.test.add_limit:keys', 0, -1) == [ 'rc:redis_cache.test.add_limit:[[6, 5], {}]', 'rc:redis_cache.test.add_limit:[[3, 4], {}]' ]
def test_custom_serializer_with_compress(): def dumps(value): return zlib.compress(pickle.dumps(value)) def loads(value): return pickle.loads(zlib.decompress(value)) cache = RedisCache(redis_client=client_no_decode, serializer=dumps, deserializer=loads) @cache.cache() def add_custom_serializer(arg1, arg2): return Result(arg1.value, arg2.value) result = add_custom_serializer(Arg(2), Arg(3)) assert result.sum == 5 with patch.object(client_no_decode, 'get', wraps=client_no_decode.get) as mock_get: result = add_custom_serializer(Arg(2), Arg(3)) assert result.sum == 5 mock_get.assert_called_once_with( 'rc:redis_cache.test.add_custom_serializer:eJxrYI4tZNBILkpNySyOT05MzkjVK0ktLuFyLErnKmTUbCxkqi1kjmBlYGAoS8wpTS1k8WYqTsoASbDWFrJlsHgzFye1FbLXFnKk6gEAgloWhw==' ) result = add_custom_serializer(Arg(5), Arg(5)) assert result.sum == 10
class GeoDistributedLRURedis: redis_client = RedisClient() cache = RedisCache(redis_client=redis_client.redis()) ttl = 30 maxsize = 10 @staticmethod @cache.cache(ttl=ttl, limit=maxsize) def set_localization(user, latitude: float, longitude: float, information: dict): localization = Localization() localization.user = user localization.latitude = latitude localization.longitude = longitude localization.information = information localization.save() return json.loads(localization.to_json()) @staticmethod @cache.cache(ttl=ttl, limit=maxsize) def get_localization(number_items: int = None): items = json.loads(Localization().get().to_json()) if number_items and len(items) > number_items: return items[-number_items:] return items
def test_invalidate_all(): cache = RedisCache(redis_client=client) @cache.cache() def f1_invalidate_all(arg1, arg2): return add_func(arg1, arg2) @cache.cache() def f2222_invalidate_all(arg1, arg2): return add_func(arg1, arg2) r_3_4, v_3_4 = f1_invalidate_all(3, 4) r_4_4, v_4_4 = f1_invalidate_all(4, 4) r_5_5, v_5_5 = f2222_invalidate_all(5, 5) # invalidating all caches to the function f1_invalidate_all f1_invalidate_all.invalidate_all() r2_3_4, v2_3_4 = f1_invalidate_all(3, 4) r2_4_4, v2_4_4 = f1_invalidate_all(4, 4) r2_5_5, v2_5_5 = f2222_invalidate_all(5, 5) # all caches related to f1_invalidate_all were invalidated assert r_3_4 == r2_3_4 and v_3_4 != v2_3_4 assert r_4_4 == r2_4_4 and v_4_4 != v2_4_4 # caches of f2222_invalidate_all should stay stored assert r_5_5 == r2_5_5 and v_5_5 == v2_5_5
def test_cache_on_class_without_str(self): """ Tests that the cache gets populated when invoking a method that takes a primitive type as argument on an object that does not implement a custom __str__ method. """ mock_client = Mock() redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client class TestClass(object): def __init__(self): self.call_count = collections.defaultdict(int) @redis_cache.cache() def echo(self, parameter): self.call_count[parameter] += 1 return parameter primitive_argument = 'cache hit test' complex_argument = SimpleObject('test', 42) for argument in primitive_argument, complex_argument: # on the first call, the cache is empty instance = TestClass() redis_cache.backend.get_cache.return_value = '' instance1_return1 = instance.echo(argument) # after the first call, the cache is set, i.e. all the subsequent # calls will return the previously computed value from cache redis_cache.backend.get_cache.return_value = pickle.dumps(argument) instance1_return2 = instance.echo(argument) instance1_return3 = instance.echo(argument) self.assertEqual(instance1_return1, argument) self.assertEqual(instance1_return2, argument) self.assertEqual(instance1_return3, argument) self.assertEqual(instance.call_count[argument], 1) # the cache should also be hit for calls to the same method on a new # but equivalent object equivalent_instance = TestClass() instance2_return1 = equivalent_instance.echo(argument) instance2_return2 = equivalent_instance.echo(argument) self.assertEqual(instance2_return1, argument) self.assertEqual(instance2_return2, argument) self.assertEqual(equivalent_instance.call_count[argument], 0)
def fget(self): cached_rules = RedisCache.find_cached_value('goose_known_host_content_tags') if cached_rules: return cached_rules query_string = 'SELECT domains.url, goose_domain_settings.*, domains.url FROM goose_domain_settings INNER JOIN domains ON goose_domain_settings.domain_id=domains.id' records = self.get_records_list_by_query(query_string) data = {} for item in records: if item[7]: if item[5] != None : data[item[0]] = item[5] if item[4] != None : data[item[0]] = {'reference' : self.get_domain_reference(records, item[4])} # if item[3] != None : data['regexs_references'] = {item[3] : {'reference' : self.get_domain_reference(records, item[4])}} RedisCache.cache_value('goose_known_host_content_tags', data) return data
def test_basic_mget(): cache = RedisCache(redis_client=client) @cache.cache() def add_basic_get(arg1, arg2): return arg1 + arg2 results = cache.mget(dict(fn=add_basic_get, args=(3, 4))) assert results[0] == 7 with patch.object(client, 'mget', wraps=client.mget) as mock_get: results = cache.mget( dict(fn=add_basic_get, args=(10, 2)), dict(fn=add_basic_get, args=(3, 4)), ) mock_get.assert_called_once_with( 'rc:redis_cache.test.add_basic_get:[[10, 2], {}]', 'rc:redis_cache.test.add_basic_get:[[3, 4], {}]') assert results[0] == 12 assert results[1] == 7
def test_set_connect(self): with patch('redis.StrictRedis') as mock_StrictRedis: mock_redis_instance = mock_StrictRedis.return_value actual = RedisCache.set_connect() self.assertEqual(actual, mock_redis_instance) mock_StrictRedis.assert_called_once_with(host='0.0.0.0', port=6379, password='******', charset='utf-8', decode_responses=True, socket_timeout=30)
def test_ttl(): cache = RedisCache(redis_client=client) @cache.cache(ttl=100) def add_ttl(arg1, arg2): return arg1 + arg2 result = add_ttl(3, 4) assert result == 7 result = add_ttl(3, 4) assert result == 7
def test_cache_custom_signature(self): """ Tests a cache hit with custom signature """ test_param = 'cache hit test' mock_client = Mock() # Simulates a cache hit redis_cache = RedisCache(Backend()) redis_cache.backend = mock_client redis_cache.backend.get_cache.return_value = pickle.dumps(test_param) def test_signature_builder(*args, **kwargs): return "test" # Create a function with the decorator @redis_cache.cache(signature_generator=test_signature_builder) def test_function(a): return a def cache_key_for(*args, **kwargs): return redis_cache._generate_cache_key( test_function, args, kwargs, signature_generator=test_signature_builder) # Call that function function_response = test_function(test_param) expected_hash = cache_key_for(test_param) redis_cache.backend.get_cache.assert_called_once_with(expected_hash) self.assertEqual(redis_cache.backend.set_cache.call_count, 0) self.assertEqual(function_response, test_param) # Test that the signature_generator must be callable @redis_cache.cache(signature_generator='not callable') def test_function_not_callable(a): return a with self.assertRaises(TypeError): test_function_not_callable(test_param)
def test_custom_serializer(): cache = RedisCache( redis_client=client_no_decode, serializer=pickle.dumps, deserializer=pickle.loads, ) @cache.cache() def add_custom_serializer(arg1, arg2): return Result(arg1.value, arg2.value) r1 = add_custom_serializer(Arg(2), Arg(3)) r2 = add_custom_serializer(Arg(2), Arg(3)) assert r1.sum == r2.sum and r1.verifier == r2.verifier
def test_basic_check(): cache = RedisCache(redis_client=client) @cache.cache() def add_basic(arg1, arg2): return arg1 + arg2 result = add_basic(3, 4) assert result == 7 with patch.object(client, 'get', wraps=client.get) as mock_get: result = add_basic(3, 4) mock_get.assert_called_once_with( 'rc:redis_cache.test.add_basic:[[3, 4], {}]') assert result == 7 result = add_basic(5, 5) assert result == 10
def test_custom_serializer_with_compress(): def dumps(value): return zlib.compress(pickle.dumps(value)) def loads(value): return pickle.loads(zlib.decompress(value)) cache = RedisCache( redis_client=client_no_decode, serializer=dumps, deserializer=loads, ) @cache.cache() def add_compress_serializer(arg1, arg2): return Result(arg1.value, arg2.value) r1 = add_compress_serializer(Arg(2), Arg(3)) r2 = add_compress_serializer(Arg(2), Arg(3)) assert r1.sum == r2.sum and r1.verifier == r2.verifier
def test_custom_key_serializer(): def key_serializer(args, kwargs): return f'{args}.{kwargs}' cache = RedisCache( redis_client=client_no_decode, serializer=pickle.dumps, deserializer=pickle.loads, key_serializer=key_serializer ) @cache.cache() def add_custom_key_serializer(arg1, arg2): return arg1 + arg2 r1 = add_custom_key_serializer(2, 3) r2 = add_custom_key_serializer(2, 3) assert r1 == r2 assert client.exists('rc:test_redis_cache.add_custom_key_serializer:(2, 3).{}')
def test_invalidate_not_in_cache(): cache = RedisCache(redis_client=client) @cache.cache(limit=2) def add_invalidate_not_in_cache(arg1, arg2): return arg1 + arg2 add_invalidate_not_in_cache(3, 4) add_invalidate_not_in_cache(4, 4) add_invalidate_not_in_cache.invalidate(5, 5) assert client.zrange( 'rc:redis_cache.test.add_invalidate_not_in_cache:keys', 0, -1) == [ 'rc:redis_cache.test.add_invalidate_not_in_cache:[[3, 4], {}]', 'rc:redis_cache.test.add_invalidate_not_in_cache:[[4, 4], {}]' ] assert client.get( 'rc:redis_cache.test.add_invalidate_not_in_cache:[[3, 4], {}]') == '7' assert client.get( 'rc:redis_cache.test.add_invalidate_not_in_cache:[[4, 4], {}]') == '8'
def test_invalidate_all(): cache = RedisCache(redis_client=client) @cache.cache(limit=2) def add_invalidate_all(arg1, arg2): return arg1 + arg2 add_invalidate_all(3, 4) add_invalidate_all(4, 4) assert client.zrange( 'rc:redis_cache.test.add_invalidate_all:keys', 0, -1) == [ 'rc:redis_cache.test.add_invalidate_all:[[3, 4], {}]', 'rc:redis_cache.test.add_invalidate_all:[[4, 4], {}]' ] add_invalidate_all.invalidate_all() # Check all the keys were removed assert client.zrange('rc:redis_cache.test.add_invalidate_all:keys', 0, -1) == [] assert client.exists( 'rc:redis_cache.test.add_invalidate_all:[[3, 4], {}]', 'rc:redis_cache.test.add_invalidate_all:[[4, 4], {}]') == 0
class Cache(): def __init__(self, host: str = 'localhost', port: int = 6379) -> None: self.__client = StrictRedis(host, port=port, decode_responses=True) self.__client.config_set('maxmemory', '600mb') self.__client.config_set('maxmemory-policy', 'allkeys-lru') self.__cache = RedisCache(redis_client=self.__client) self.__logger = Logger().getLogger(__file__) self.__logger.info("Initialize Cache.") def key_exists(self, *args): serialized_data = dumps([args[1:], {}]) key = f'rc:{args[0]}:{serialized_data}' return self.__client.exists(key) >= 1 def get_all_keys(self): return self.__client.keys() def get_key_count(self): return len(self.get_all_keys()) def __call__(self, ttl=60 * 60 * 24 * 7, limit=5000, namespace=None): return self.__cache.cache(ttl, limit, namespace)
def test_custom_serializer(): cache = RedisCache(redis_client=client_no_decode, serializer=pickle.dumps, deserializer=pickle.loads) @cache.cache() def add_custom_serializer(arg1, arg2): return Result(arg1.value, arg2.value) result = add_custom_serializer(Arg(2), Arg(3)) assert result.sum == 5 with patch.object(client_no_decode, 'get', wraps=client_no_decode.get) as mock_get: result = add_custom_serializer(Arg(2), Arg(3)) assert result.sum == 5 mock_get.assert_called_once_with( 'rc:redis_cache.test.add_custom_serializer:gANdcQAoY3JlZGlzX2NhY2hlLnRlc3QKQXJnCnEBKYFxAn1xA1gFAAAAdmFsdWVxBEsCc2JoASmBcQV9cQZoBEsDc2KGcQd9cQhlLg==' ) result = add_custom_serializer(Arg(5), Arg(5)) assert result.sum == 10