def test_multiple_caches(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.10", 'port': 6378, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [ {'class': "aiocache.plugins.HitMissRatioPlugin"}, {'class': "aiocache.plugins.TimingPlugin"} ] }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", } }) default = caches.get('default') alt = caches.get('alt') assert isinstance(default, RedisCache) assert default.endpoint == "127.0.0.10" assert default.port == 6378 assert isinstance(default.serializer, PickleSerializer) assert len(default.plugins) == 2 assert isinstance(alt, SimpleMemoryCache)
def test_multiple_caches(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.10", "port": 6378, "serializer": {"class": "aiocache.serializers.PickleSerializer"}, "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], }, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) default = caches.get("default") alt = caches.get("alt") assert isinstance(default, RedisCache) assert default.endpoint == "127.0.0.10" assert default.port == 6378 assert isinstance(default.serializer, PickleSerializer) assert len(default.plugins) == 2 assert isinstance(alt, SimpleMemoryCache)
def init_cache(mode: str, redis_endpoint: str, redis_port: int, redis_timeout: int) -> bool: """Initiate aiocache cache. Args: mode (str, optional): Defaults to 'default'. Set 'redis' for Redis. redis_endpoint (str, optional): Defaults to '127.0.0.1'. IP of Redis. redis_port (int, optional): Defaults to 6379. Port of Redies server. redis_timeout (int, optional): Defaults to 1. Redis timeout. Returns: bool: True if succesuful """ aiocache_config = { 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.PickleSerializer" } }, 'redis': { 'cache': "aiocache.RedisCache", 'endpoint': redis_endpoint, 'port': redis_port, 'timeout': redis_timeout, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [{ 'class': "aiocache.plugins.HitMissRatioPlugin" }, { 'class': "aiocache.plugins.TimingPlugin" }] } } caches.set_config(aiocache_config) if mode == 'redis': try: this.cache = caches.get('redis') logger.info(f'Using redis for cache - [{redis_endpoint}]') except Exception: logger.exception('Error initiating aiocache with Redis.') raise else: try: this.cache = caches.get('default') logger.info(f'Using RAM for cache') except Exception: logger.exception( 'Error initiating aiocache with SimpleMemoryCache. ') raise return True
def test_set_config_updates_existing_values(self): assert not isinstance(caches.get("default").serializer, JsonSerializer) caches.set_config( { "default": { "cache": "aiocache.SimpleMemoryCache", "serializer": {"class": "aiocache.serializers.JsonSerializer"}, } } ) assert isinstance(caches.get("default").serializer, JsonSerializer)
def test_set_config_updates_existing_values(self): assert not isinstance(caches.get('default').serializer, JsonSerializer) caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.JsonSerializer" } } }) assert isinstance(caches.get('default').serializer, JsonSerializer)
def test_retrieve_cache(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.10", "port": 6378, "ttl": 10, "serializer": { "class": "aiocache.serializers.PickleSerializer", "encoding": "encoding", }, "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], } } ) cache = caches.get("default") assert isinstance(cache, RedisCache) assert cache.endpoint == "127.0.0.10" assert cache.port == 6378 assert cache.ttl == 10 assert isinstance(cache.serializer, PickleSerializer) assert cache.serializer.encoding == "encoding" assert len(cache.plugins) == 2
async def default_cache(): cache = caches.get('default') # This always returns the same instance await cache.set("key", "value") assert await cache.get("key") == "value" assert isinstance(cache, Cache.MEMORY) assert isinstance(cache.serializer, StringSerializer)
async def test_redis(): """ Проверка редиса :return: """ from aiocache import caches from helpers.config import Config serv_config = Config.get_config() cache_conf = serv_config.get('redis') caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': cache_conf['host'], 'port': cache_conf['port'], 'timeout': int(cache_conf['timeout']), 'namespace': str(cache_conf['namespace']), 'serializer': { 'class': "aiocache.serializers.PickleSerializer" } } }) cache = caches.get('default') await cache.set('test_a_list', [1, 2, 3, 4, 5]) await cache.set('test_b_list', ['1', '2', '3', '4', '5']) a = await cache.get('test_a_list') b = await cache.get('test_b_list') pprint(a[:2]) pprint(b[3:]) print('ok')
def test_retrieve_cache(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.10", 'port': 6378, 'ttl': 10, 'serializer': { 'class': "aiocache.serializers.PickleSerializer", 'encoding': 'encoding' }, 'plugins': [{ 'class': "aiocache.plugins.HitMissRatioPlugin" }, { 'class': "aiocache.plugins.TimingPlugin" }] } }) cache = caches.get('default') assert isinstance(cache, RedisCache) assert cache.endpoint == "127.0.0.10" assert cache.port == 6378 assert cache.ttl == 10 assert isinstance(cache.serializer, PickleSerializer) assert cache.serializer.encoding == 'encoding' assert len(cache.plugins) == 2
def __init__(self, storage): super().__init__(storage) options = app_settings['aiocache'] options['serializer'] = { 'class': "guillotina_aiocache.serializers.JsonSerializer" } caches.set_config({'default': options}) self._cache = caches.get('default')
def test_set_config_removes_existing_caches(self): caches.set_config( { "default": {"cache": "aiocache.SimpleMemoryCache"}, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) caches.get("default") caches.get("alt") assert len(caches._caches) == 2 caches.set_config( { "default": {"cache": "aiocache.SimpleMemoryCache"}, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) assert caches._caches == {}
async def delete_cache(*args, **kwargs): cache = caches.get(cache_alias) # aiocache built_cache_key = cache_key or cache_key_builder( co_name, *args, **kwargs) logger.debug(f'delete_cache::cache_key = "{built_cache_key}"') await cache.increment(f'delete_cache_count::{built_cache_key}' ) # aiocache await cache.delete(built_cache_key) # aiocache
def test_alias(): loop = asyncio.get_event_loop() loop.run_until_complete(default_cache()) loop.run_until_complete(alt_cache()) cache = Cache(Cache.REDIS) loop.run_until_complete(cache.delete("key")) loop.run_until_complete(cache.close()) loop.run_until_complete(caches.get('default').close())
def caches(loop): aiocaches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.NullSerializer' } }, 'redis': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.NullSerializer' } } }) active_caches = [ aiocaches.create(**aiocaches.get_alias_config('default')), aiocaches.create(**aiocaches.get_alias_config('redis')) ] yield active_caches loop.run_until_complete(aiocaches.get('default').clear()) loop.run_until_complete(aiocaches.get('redis').clear())
def test_set_config_removes_existing_caches(self): caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", }, }) caches.get('default') caches.get('alt') assert len(caches._caches) == 2 caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", }, }) assert caches._caches == {}
def __call__(self, f): if self.alias: self.cache = caches.get(self.alias) else: self.cache = _get_cache(cache=self._cache, serializer=self._serializer, plugins=self._plugins, **self._kwargs) @functools.wraps(f) async def wrapper(*args, **kwargs): return await self.decorator(f, *args, **kwargs) wrapper.cache = self.cache return wrapper
def test_ensure_plugins_order(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'plugins': [ {'class': "aiocache.plugins.HitMissRatioPlugin"}, {'class': "aiocache.plugins.TimingPlugin"} ] } }) cache = caches.get('default') assert isinstance(cache.plugins[0], HitMissRatioPlugin) cache = caches.create('default') assert isinstance(cache.plugins[0], HitMissRatioPlugin)
def __init__(self, config: Config = None, *, loop=None): """Initialize the bot. :param config: configuration :param loop: event loop """ #: Configuration self.config = config or Config.load() #: Logger self.logger = structlog.get_logger('tg_odesli_bot') self.logger_var = contextvars.ContextVar('logger', default=self.logger) #: Event loop self._loop = loop or asyncio.get_event_loop() #: Cache self.cache = caches.get('default') #: Telegram connect retries count self._tg_retries = 0
def test_ensure_plugins_order(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], } } ) cache = caches.get("default") assert isinstance(cache.plugins[0], HitMissRatioPlugin) cache = caches.create("default") assert isinstance(cache.plugins[0], HitMissRatioPlugin)
async def wrapper(*args, **kwargs) -> t.Any: cache = caches.get(cache_alias) # aiocache built_cache_key = cache_key or cache_key_builder( co_name, *args, **kwargs) logger.debug(f'cache_key = "{built_cache_key}"') result = await cache.get(built_cache_key) # aiocache if predicate(result): logger.debug('HIT CACHE') await cache.increment(f'hits::{built_cache_key}') # aiocache return result result = await func(*args, **kwargs) logger.debug('MISS CACHE') await cache.increment(f'misses::{built_cache_key}') # aiocache await cache.set(built_cache_key, result, ttl=ttl) # aiocache return result
async def new( request: Request, response: Response, story_id: str = Path( ..., description="""The id of the story to generate the figment for.""" ), entry: SceneEntry = Body( ..., description="""The current entry representing the move in progress""" ), suggestion_type: SuggestionType = Query( ..., description="""The suggestion type to generate""" ), ): """ Create a new figment. Returns a 404 if the story data cannot be found. """ cache = caches.get("default") story_data = await cache.get(f"{suggestion_type}:{story_id}") if story_data is None: raise HTTPException(HTTP_404_NOT_FOUND, detail="Unknown story") context_dict: Dict[str, Any] = {"data": story_data, "entry": entry.copy()} try: figment_range = request.headers.get("Range") if figment_range: context_dict["range"] = Range.validate(figment_range) except ValidationError: raise HTTPException( HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, "Invalid range specified!" ) context = FigmentContext(**context_dict) context = await Figmentators.figmentate(suggestion_type, context) if context.status == FigmentStatus.failed: raise HTTPException(HTTP_406_NOT_ACCEPTABLE, "Unable to generate suggestion!") if context.status == FigmentStatus.partial: response.status_code = HTTP_206_PARTIAL_CONTENT return context.entry
async def cache_info(*args, **kwargs) -> dict: cache = caches.get(cache_alias) # aiocache built_cache_key = cache_key or cache_key_builder( co_name, *args, **kwargs) logger.debug(f'cache_info::cache_key = "{built_cache_key}"') hits = await cache.increment(f'hits::{built_cache_key}', delta=0) # aiocache misses = await cache.increment(f'misses::{built_cache_key}', delta=0) # aiocache delete_cache_count = await cache.increment( f'delete_cache_count::{built_cache_key}', delta=0) # aiocache return { 'hits': hits, 'misses': misses, 'total': hits + misses, 'delete_cache_count': delete_cache_count, 'ttl': ttl, 'cache_alias': cache_alias, 'cache_key': built_cache_key, }
async def start(self): self.config = config self.server_config = copy.deepcopy( self.config.servers[self.server_name]) self.server_config.update(self.server_config_override) self.config.database.update(self.database_config_override) self.config.redis.update(self.redis_config_override) self.config.commands.update(self.commands_config_override) self.config.client.update(self.client_config_override) general_log_directory = os.path.dirname( self.server_config["Logging"]["General"]) errors_log_directory = os.path.dirname( self.server_config["Logging"]["Errors"]) if not os.path.exists(general_log_directory): os.mkdir(general_log_directory) if not os.path.exists(errors_log_directory): os.mkdir(errors_log_directory) self.logger = logging.getLogger('houdini') universal_handler = RotatingFileHandler( self.server_config['Logging']['General'], maxBytes=2097152, backupCount=3, encoding='utf-8') error_handler = logging.FileHandler( self.server_config['Logging']['Errors']) console_handler = logging.StreamHandler(stream=sys.stdout) log_formatter = logging.Formatter( '%(asctime)s [%(levelname)-5.5s] %(message)s') error_handler.setLevel(logging.ERROR) universal_handler.setFormatter(log_formatter) console_handler.setFormatter(log_formatter) self.logger.addHandler(universal_handler) self.logger.addHandler(console_handler) self.logger.addHandler(error_handler) level = logging.getLevelName(self.server_config['Logging']['Level']) self.logger.setLevel(level) self.server = await asyncio.start_server(self.client_connected, self.server_config['Address'], self.server_config['Port']) await self.db.set_bind('postgresql://{}:{}@{}/{}'.format( self.config.database['Username'], self.config.database['Password'], self.config.database['Address'], self.config.database['Name'])) self.logger.info('Booting Houdini') self.redis = await aioredis.create_redis_pool('redis://{}:{}'.format( self.config.redis['Address'], self.config.redis['Port']), minsize=5, maxsize=10) if self.server_config['World']: await self.redis.delete( f'houdini.players.{self.server_config["Id"]}') await self.redis.hdel(f'houdini.population', self.server_config["Id"]) caches.set_config({ 'default': { 'cache': SimpleMemoryCache, 'namespace': 'houdini', 'ttl': self.server_config['CacheExpiry'] } }) self.cache = caches.get('default') self.client_class = Penguin self.penguin_string_compiler = PenguinStringCompiler() self.anonymous_penguin_string_compiler = PenguinStringCompiler() PenguinStringCompiler.setup_default_builder( self.penguin_string_compiler) PenguinStringCompiler.setup_anonymous_default_builder( self.anonymous_penguin_string_compiler) await self.xml_listeners.setup( houdini.handlers, exclude_load='houdini.handlers.login.login') await self.xt_listeners.setup(houdini.handlers) await self.dummy_event_listeners.setup(houdini.handlers) self.logger.info('World server started') else: await self.xml_listeners.setup(houdini.handlers, 'houdini.handlers.login.login') self.logger.info('Login server started') self.items = await ItemCollection.get_collection() self.logger.info(f'Loaded {len(self.items)} clothing items') self.igloos = await IglooCollection.get_collection() self.logger.info(f'Loaded {len(self.igloos)} igloos') self.furniture = await FurnitureCollection.get_collection() self.logger.info(f'Loaded {len(self.furniture)} furniture items') self.locations = await LocationCollection.get_collection() self.logger.info(f'Loaded {len(self.locations)} igloo locations') self.flooring = await FlooringCollection.get_collection() self.logger.info(f'Loaded {len(self.flooring)} igloo flooring') self.rooms = await RoomCollection.get_collection() self.spawn_rooms = self.rooms.spawn_rooms await self.rooms.setup_tables() await self.rooms.setup_waddles() self.logger.info( f'Loaded {len(self.rooms)} rooms ({len(self.spawn_rooms)} spawn)') self.postcards = await PostcardCollection.get_collection() self.logger.info(f'Loaded {len(self.postcards)} postcards') self.stamps = await StampCollection.get_collection() self.logger.info(f'Loaded {len(self.stamps)} stamps') self.cards = await CardCollection.get_collection() self.logger.info(f'Loaded {len(self.cards)} ninja cards') self.puffles = await PuffleCollection.get_collection() self.logger.info(f'Loaded {len(self.puffles)} puffles') self.puffle_items = await PuffleItemCollection.get_collection() self.logger.info(f'Loaded {len(self.puffle_items)} puffle care items') self.characters = await CharacterCollection.get_collection() self.logger.info(f'Loaded {len(self.characters)} characters') self.permissions = await PermissionCollection.get_collection() self.logger.info( f'Multi-client support is ' f'{"enabled" if self.config.client["MultiClientSupport"] else "disabled"}' ) self.logger.info( f'Listening on {self.server_config["Address"]}:{self.server_config["Port"]}' ) if self.config.client['AuthStaticKey'] != 'houdini': self.logger.warning( 'The static key has been changed from the default, ' 'this may cause authentication issues!') await self.plugins.setup(houdini.plugins) self.heartbeat = asyncio.create_task(server_heartbeat(self)) self.egg_timer = asyncio.create_task(server_egg_timer(self)) self.music = SoundStudio(self) async with self.server: await self.server.serve_forever()
def test_reuse_instance(self): assert caches.get('default') is caches.get('default')
CACHE_NAME = 'default' CACHE_TTL_SECONDS = 5 # You can use either classes or strings for referencing classes caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.StringSerializer" }, 'ttl': CACHE_TTL_SECONDS # we keep this super short for demonstration purposes }, }) # We globally scope this for demo purposes cache = caches.get(CACHE_NAME) async def add(key, value): await cache.set(key, value) # This is just for testing # assert await cache.get(key) == value async def increment(key): await cache.increment(key) async def main(): # Presumably in practice our main routine would accept outside input # or do something more interesting than this. print("Populating cache with {} elements...".format(ELEMENTS_IN_CACHE))
def test_get_wrong_alias(self): with pytest.raises(KeyError): caches.get("wrong_cache") with pytest.raises(KeyError): caches.create("wrong_cache")
def invalidate_cache(connection, pid, channel, payload): debug('Received notification: {} {} {}'.format(pid, channel, payload)) cache = caches.get('default') app.loop.create_task(cache.delete(payload))
async def get_cache_value(): # This lazy loads a singleton so it will return the same instance every # time. If you want to create a new instance, you can use # `caches.create("default")` cache = caches.get("default") return await cache.get("my_custom_key")
import asyncio from aiocache import caches from config import MEMCACHE_CONFIG caches.set_config({ 'default': { 'cache': "aiocache.MemcachedCache", 'endpoint': MEMCACHE_CONFIG['host'], 'port': MEMCACHE_CONFIG['port'], 'serializer': { 'class': "aiocache.serializers.StringSerializer" } } }) memcache = caches.get('default') async def cache_get(key): value = await memcache.get(key) return value async def cache_set(key, value, exp=86400): result = await memcache.set(key, value, ttl=exp) return result async def cache_del(key): result = await memcache.delete(key) return result
def __init__(self): self._cache: BaseCache = caches.get("default")