async def test_pool_reusage(self): cache = RedisCache() await cache._clear(None) other_cache = RedisCache() await other_cache._clear(None) assert len(RedisCache.pools) == 1 cache.db = 1 await cache._clear(None) assert len(RedisCache.pools) == 2 assert other_cache.db == 0
def test_cached(): cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main") loop = asyncio.get_event_loop() loop.run_until_complete(cached_call()) assert loop.run_until_complete(cache.exists("key")) is True loop.run_until_complete(cache.delete("key")) loop.run_until_complete(cache.close())
def redis_cache(event_loop): cache = RedisCache(namespace="test", pool_max_size=1) yield cache for _, pool in RedisBackend.pools.items(): pool.close() event_loop.run_until_complete(pool.wait_closed())
def redis_cache(event_loop): cache = RedisCache(namespace="test") yield cache event_loop.run_until_complete(cache.delete(pytest.KEY)) event_loop.run_until_complete(cache.delete(pytest.KEY_1)) event_loop.run_until_complete(cache.delete(pytest.KEY + '-lock')) event_loop.run_until_complete(cache.close())
def test_cache_settings(self): aiocache.settings.set_defaults( class_=aiocache.RedisCache, endpoint="127.0.0.1", port=6379, timeout=10, db=1) cache = RedisCache(db=0) assert cache.endpoint == "127.0.0.1" assert cache.port == 6379 assert cache.timeout == 10 assert cache.db == 0
def test_alias(): loop = asyncio.get_event_loop() loop.run_until_complete(default_cache()) loop.run_until_complete(alt_cache()) cache = RedisCache() loop.run_until_complete(cache.delete("key")) loop.run_until_complete(cache.close()) loop.run_until_complete(caches.get('default').close())
def redis_cache(event_loop): cache = RedisCache(namespace="test", loop=event_loop) cache.set_policy(DefaultPolicy) yield cache event_loop.run_until_complete(cache.delete(pytest.KEY)) event_loop.run_until_complete(cache.delete(pytest.KEY_1)) cache._backend._pool.close() event_loop.run_until_complete(cache._backend._pool.wait_closed())
def redis_cache(event_loop): cache = RedisCache(namespace="test", loop=event_loop) yield cache event_loop.run_until_complete(cache.delete(pytest.KEY)) event_loop.run_until_complete(cache.delete(pytest.KEY_1)) for _, pool in RedisBackend.pools.items(): pool.close() event_loop.run_until_complete(pool.wait_closed())
async def test_setup_override(self): redis_cache = RedisCache(serializer=serializers.JsonSerializer()) assert redis_cache._backend.endpoint == "127.0.0.1" assert redis_cache._backend.port == 6379 assert isinstance(redis_cache.serializer, serializers.JsonSerializer)
async def test_accept_explicit_args(self): with pytest.raises(TypeError): RedisCache(random_attr="wtf")
async def post_set(self, client, key, value, *args, took=0, **kwargs): """ Set the given key at the beginning of the queue. If the queue is full, remove the last item first. :param key: string key used in the set operation :param value: obj used in the set operation :param client: :class:`aiocache.base.BaseCache` or child instance to use to interact with the storage if needed """ if len(self.deque) == self.deque.maxlen: await client.delete(self.deque.pop()) self.deque.appendleft(key) cache = RedisCache(endpoint="127.0.0.1", port=6379, plugins=[LRUPlugin(max_keys=2)], namespace="main") async def redis(): await cache.set("key", "value") await cache.set("key_1", "value") await cache.set("key_2", "value") assert await cache.get("key") is None assert await cache.get("key_1") == "value" assert await cache.get("key_2") == "value" assert len(await cache.raw("keys", "*")) == 2 def test_redis():
import asyncio import logging import random from aiocache import RedisCache from aiocache.lock import OptimisticLock, OptimisticLockError logger = logging.getLogger(__name__) cache = RedisCache(endpoint='127.0.0.1', port=6379, namespace='main') async def expensive_function(): logger.warning('Expensive is being executed...') await asyncio.sleep(random.uniform(0, 2)) return 'result' async def my_view(): async with OptimisticLock(cache, 'key') as lock: result = await expensive_function() try: await lock.cas(result) except OptimisticLockError: logger.warning( 'I failed setting the value because it is different since the lock started!' ) return result async def concurrent():
y = fields.Number() @post_load def build_object(self, data): return MyType(data['x'], data['y']) def dumps(value): return MyTypeSchema().dumps(value).data def loads(value): return MyTypeSchema().loads(value).data cache = RedisCache(namespace="main") async def serializer_function(): await cache.set("key", MyType(1, 2), dumps_fn=dumps) obj = await cache.get("key", loads_fn=loads) assert obj.x == 1 assert obj.y == 2 assert await cache.get("key") == json.loads(('{"y": 2.0, "x": 1.0}')) assert json.loads(await cache.raw("get", "main:key")) == { "y": 2.0, "x": 1.0 }
def test_default_serializer(self): assert isinstance(RedisCache().serializer, JsonSerializer)
def __init__(self): self.redis = RedisCache(endpoint='localhost', serializer=PickleSerializer(), port=6379, timeout=0)
app.config["JWT_SECRET_KEY"] = JWT_SECRET_KEY app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(minutes=30) app.config["JWT_TOKEN_LOCATION"] = "cookies" app.config["JWT_ERROR_MESSAGE_KEY"] = "error" app.config["CORS_AUTOMATIC_OPTIONS"] = True app.config["CORS_SUPPORTS_CREDENTIALS"] = True # Construct an in-memory storage redis_port = 6379 if MODE == "testing": redis_port = 63791 elif MODE == "development": redis_port = 63790 cache = RedisCache( serializer=JsonSerializer(), password=CELERY_BROKER_PASSWORD, pool_min_size=3, port=redis_port, ) # Initialize the DB before doing anything else # to avoid circular importing db.init_app(app) JWTManager(app) CORS(app, origins=CORS_ORIGINS, supports_credentials=True) # logging.getLogger("sanic_cors").level = logging.DEBUG # Register the limiter # from ora_backend.utils.limiter import get_user_id_or_ip_addr limiter = Limiter(app, global_limits=["200/minute"],
Value = str not_found: Final = object( ) # Used as a default value for redis.get to properly handle retrieving of None values logging.basicConfig(level=logging.INFO) logger: Final = logging.getLogger("rest_storage") app: Final = FastAPI(title="TGA Digital Rest Storage", version="1.0.0", openapi_tags=[{ "name": "CRUD" }, { "name": "Bulk CRUD" }]) redis: Final = RedisCache(endpoint=os.environ["REDIS_HOST"], port=os.environ["REDIS_PORT"]) @app.put("/items/{key}", tags=["CRUD"], responses={ 201: { "content": { "application/json": { "example": { "stored": "/items/example_key" } } } } })
def create_redis_instance(): return RedisCache( endpoint='127.0.0.1', port=6379 )
import asyncio from aiocache import RedisCache class MySerializer: def dumps(self, value): return 1 def loads(self, value): return 2 cache = RedisCache(serializer=MySerializer(), namespace="main") async def serializer(): await cache.set("key", "value") assert await cache.raw("get", "main:key") == b'1' assert await cache.get("key") == 2 def test_serializer(): loop = asyncio.get_event_loop() loop.run_until_complete(serializer()) loop.run_until_complete(cache.delete("key")) if __name__ == "__main__": test_serializer()
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s') LOGGER = logging.getLogger('novels_search') # aiocache REDIS_DICT = dict( IS_CACHE=True, REDIS_ENDPOINT="", REDIS_PORT=6379, PASSWORD="", CACHE_DB=0, SESSION_DB=1, POOLSIZE=4, ) AIO_CACHE = RedisCache(endpoint=REDIS_DICT['REDIS_ENDPOINT'], port=REDIS_DICT['REDIS_PORT'], namespace="main") # mongodb MONGODB = dict( HOST="", PORT="", USERNAME='', PASSWORD='', DATABASE='owllook', ) # website WEBSITE = dict(IS_RUNNING=True, TOKEN='') AUTH = {"Owllook-Api-Key": ""}
def redis_cache(): cache = RedisCache() return cache
def test_inheritance(self): assert isinstance(RedisCache(), BaseCache)
encoding = None def dumps(self, value): print("I've received:\n{}".format(value)) compressed = zlib.compress(value.encode()) print("But I'm storing:\n{}".format(compressed)) return compressed def loads(self, value): print("I've retrieved:\n{}".format(value)) decompressed = zlib.decompress(value).decode() print("But I'm returning:\n{}".format(decompressed)) return decompressed cache = RedisCache(serializer=CompressionSerializer(), namespace="main") async def serializer(): text = ( "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt" "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation" "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in" "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur" "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit" "anim id est laborum.") await cache.set("key", text) print("-----------------------------------") real_value = await cache.get("key") compressed_value = await cache.raw("get", "main:key")
def test_parse_uri_path(self, path, expected): assert RedisCache().parse_uri_path(path) == expected
import asyncio from aiocache import RedisCache cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main") async def redis(): await cache.set("key", "value") await cache.set("expire_me", "value", ttl=10) assert await cache.get("key") == "value" assert await cache.get("expire_me") == "value" assert await cache.raw("ttl", "main:expire_me") > 0 def test_redis(): loop = asyncio.get_event_loop() loop.run_until_complete(redis()) loop.run_until_complete(cache.delete("key")) loop.run_until_complete(cache.delete("expire_me")) if __name__ == "__main__": test_redis()
import asyncio from collections import namedtuple from aiocache import RedisCache from aiocache.serializers import PickleSerializer MyObject = namedtuple("MyObject", ["x", "y"]) cache = RedisCache(serializer=PickleSerializer(), namespace="main") async def complex_object(): obj = MyObject(x=1, y=2) await cache.set("key", obj) my_object = await cache.get("key") assert my_object.x == 1 assert my_object.y == 2 def test_python_object(): loop = asyncio.get_event_loop() loop.run_until_complete(complex_object()) loop.run_until_complete(cache.delete("key")) loop.run_until_complete(cache.close()) if __name__ == "__main__": test_python_object()
def test_default_cache(): loop = asyncio.get_event_loop() loop.run_until_complete(decorator()) loop.run_until_complete(global_cache()) loop.run_until_complete(RedisCache(namespace="main").delete("key"))