def test_new(self, cache_type): kwargs = {"a": 1, "b": 2} cache_class = Cache.get_scheme_class(cache_type) with patch("pycached.{}.__init__".format( cache_class.__name__)) as init: cache = Cache(cache_type, **kwargs) assert isinstance(cache, cache_class) init.assert_called_once_with(**kwargs)
def test_calls_parse_uri_path_from_cache(self): with patch("pycached.factory.Cache") as mock: mock.get_scheme_class.return_value.parse_uri_path = Mock( return_value={"arg1": "arg1"}) Cache.from_url("redis:///") mock.get_scheme_class.return_value.parse_uri_path.assert_called_once_with( "/") mock.assert_called_once_with("redis", arg1="arg1")
def test_from_url_returns_cache_from_scheme(self, scheme): assert isinstance(Cache.from_url("{}://".format(scheme)), Cache.get_scheme_class(scheme))
def test_get_scheme_class_invalid(self): with pytest.raises(InvalidCacheType): Cache.get_scheme_class("http")
def test_get_scheme_class(self, scheme): assert Cache.get_scheme_class(scheme) == CACHE_CACHES[scheme]
def test_new_invalid_cache_raises(self): with pytest.raises(InvalidCacheType) as e: Cache("file") assert str( e.value) == "Invalid cache type, you can only use {}".format( list(CACHE_CACHES.keys()))
def test_new_defaults_to_memory(self): assert isinstance(Cache(), Cache.get_scheme_class(Cache.MEMORY))
def test_from_url_invalid_protocol(self): with pytest.raises(InvalidCacheType): Cache.from_url("http://")
from pycached import Cache cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main") def redis(): cache.set("key", "value") cache.set("expire_me", "value", ttl=10) assert cache.get("key") == "value" assert cache.get("expire_me") == "value" assert cache.raw("ttl", "main:expire_me") > 0 def test_redis(): redis() cache.delete("key") cache.delete("expire_me") cache.close() if __name__ == "__main__": test_redis()
def test_from_url_calls_cache_with_args(self, url, expected_args): with patch("pycached.factory.Cache") as mock: Cache.from_url(url) mock.assert_called_once_with("redis", **expected_args)
y = fields.Number() @post_load def build_object(self, data): return MyType(data['x'], data['y']) def dumps(value): return MyTypeSchema().dumps(value).data def loads(value): return MyTypeSchema().loads(value).data cache = Cache(Cache.REDIS, namespace="main") def serializer_function(): cache.set("key", MyType(1, 2), dumps_fn=dumps) obj = cache.get("key", loads_fn=loads) assert obj.x == 1 assert obj.y == 2 assert cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) assert json.loads(cache.raw("get", "main:key")) == {"y": 2.0, "x": 1.0} def test_serializer_function(): serializer_function()
from collections import namedtuple from pycached import Cache from pycached.serializers import PickleSerializer MyObject = namedtuple("MyObject", ["x", "y"]) cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main") def complex_object(): obj = MyObject(x=1, y=2) cache.set("key", obj) my_object = cache.get("key") assert my_object.x == 1 assert my_object.y == 2 def test_python_object(): complex_object() cache.delete("key") cache.close() if __name__ == "__main__": test_python_object()
# dumps returns (data, errors), we just want to save data return super().dumps(*args, **kwargs).data def loads(self, *args, **kwargs): # dumps returns (data, errors), we just want to return data return super().loads(*args, **kwargs).data @post_load def build_my_type(self, data): return RandomModel(**data) class Meta: strict = True cache = Cache(serializer=MarshmallowSerializer(), namespace="main") def serializer(): model = RandomModel() cache.set("key", model) result = cache.get("key") assert result.int_type == model.int_type assert result.str_type == model.str_type assert result.dict_type == model.dict_type assert result.list_type == model.list_type def test_serializer():
import logging import random import time from pycached import Cache from pycached.lock import OptimisticLock, OptimisticLockError logger = logging.getLogger(__name__) cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main') def expensive_function(): logger.warning('Expensive is being executed...') time.sleep(random.uniform(0, 2)) return 'result' def my_view(): with OptimisticLock(cache, 'key') as lock: result = expensive_function() try: lock.cas(result) except OptimisticLockError: logger.warning( 'I failed setting the value because it is different since the lock started!') return result def concurrent(): cache.set('key', 'initial_value') # All three calls will read 'initial_value' as the value to check and only
def dumps(self, value): print("I've received:\n{}".format(value)) compressed = zlib.compress(value.encode()) print("But I'm storing:\n{}".format(compressed)) return compressed def loads(self, value): print("I've retrieved:\n{}".format(value)) decompressed = zlib.decompress(value).decode() print("But I'm returning:\n{}".format(decompressed)) return decompressed cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main") def serializer(): text = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in" "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur" "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" "anim id est laborum.") cache.set("key", text) print("-----------------------------------") real_value = cache.get("key") compressed_value = cache.raw("get", "main:key")