def test_multiple_caches(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.10", 'port': 6378, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [ {'class': "aiocache.plugins.HitMissRatioPlugin"}, {'class': "aiocache.plugins.TimingPlugin"} ] }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", } }) default = caches.get('default') alt = caches.get('alt') assert isinstance(default, RedisCache) assert default.endpoint == "127.0.0.10" assert default.port == 6378 assert isinstance(default.serializer, PickleSerializer) assert len(default.plugins) == 2 assert isinstance(alt, SimpleMemoryCache)
def test_multiple_caches(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.10", "port": 6378, "serializer": {"class": "aiocache.serializers.PickleSerializer"}, "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], }, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) default = caches.get("default") alt = caches.get("alt") assert isinstance(default, RedisCache) assert default.endpoint == "127.0.0.10" assert default.port == 6378 assert isinstance(default.serializer, PickleSerializer) assert len(default.plugins) == 2 assert isinstance(alt, SimpleMemoryCache)
async def get_day_schedule(self, ans: Message): payload = ujson.loads(ans.payload) weekday = int(payload['day_num']) u = await UserState.get(uid=ans.from_id) if isinstance(u.context, str): u.context = ujson.loads(u.context) caches.set_config(CACHE_CONFIG) schedule = await cache.get('schedule_{}'.format(str(ans.from_id))) if schedule is None: data = await fetch_schedule_json(q=u.context['query']) schedule = ScheduleResponseBuilder(data) await cache.set('schedule_{}'.format(str(ans.from_id)), schedule, ttl=900) schedule.build_text(weekday=weekday) msg = schedule.get_text() await ans(message=msg, keyboard=keyboard_gen(SCHEDULE_KEYBOARD, one_time=False)) u.context['weekday'] = weekday await u.save()
def test_retrieve_cache(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.10", 'port': 6378, 'ttl': 10, 'serializer': { 'class': "aiocache.serializers.PickleSerializer", 'encoding': 'encoding' }, 'plugins': [{ 'class': "aiocache.plugins.HitMissRatioPlugin" }, { 'class': "aiocache.plugins.TimingPlugin" }] } }) cache = caches.get('default') assert isinstance(cache, RedisCache) assert cache.endpoint == "127.0.0.10" assert cache.port == 6378 assert cache.ttl == 10 assert isinstance(cache.serializer, PickleSerializer) assert cache.serializer.encoding == 'encoding' assert len(cache.plugins) == 2
def init_cache(app, loop): LOGGER.info("Starting aiocache") app.config.from_object(CONFIG) REDIS_DICT = CONFIG.REDIS_DICT # You can use either classes or strings for referencing classes caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': REDIS_DICT.get('REDIS_ENDPOINT', 'localhost'), 'port': REDIS_DICT.get('REDIS_PORT', 6379), 'password': REDIS_DICT.get('REDIS_PASSWORD', None), 'timeout': 10, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [{ 'class': "aiocache.plugins.HitMissRatioPlugin" }, { 'class': "aiocache.plugins.TimingPlugin" }] } })
def test_retrieve_cache_new_instance(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.10", "port": 6378, "serializer": { "class": "aiocache.serializers.PickleSerializer", "encoding": "encoding", }, "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], } } ) cache = caches.create("default") assert isinstance(cache, RedisCache) assert cache.endpoint == "127.0.0.10" assert cache.port == 6378 assert isinstance(cache.serializer, PickleSerializer) assert cache.serializer.encoding == "encoding" assert len(cache.plugins) == 2
async def test_redis(): """ Проверка редиса :return: """ from aiocache import caches from helpers.config import Config serv_config = Config.get_config() cache_conf = serv_config.get('redis') caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': cache_conf['host'], 'port': cache_conf['port'], 'timeout': int(cache_conf['timeout']), 'namespace': str(cache_conf['namespace']), 'serializer': { 'class': "aiocache.serializers.PickleSerializer" } } }) cache = caches.get('default') await cache.set('test_a_list', [1, 2, 3, 4, 5]) await cache.set('test_b_list', ['1', '2', '3', '4', '5']) a = await cache.get('test_a_list') b = await cache.get('test_b_list') pprint(a[:2]) pprint(b[3:]) print('ok')
def init_cache(mode: str, redis_endpoint: str, redis_port: int, redis_timeout: int) -> bool: """Initiate aiocache cache. Args: mode (str, optional): Defaults to 'default'. Set 'redis' for Redis. redis_endpoint (str, optional): Defaults to '127.0.0.1'. IP of Redis. redis_port (int, optional): Defaults to 6379. Port of Redies server. redis_timeout (int, optional): Defaults to 1. Redis timeout. Returns: bool: True if succesuful """ aiocache_config = { 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.PickleSerializer" } }, 'redis': { 'cache': "aiocache.RedisCache", 'endpoint': redis_endpoint, 'port': redis_port, 'timeout': redis_timeout, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [{ 'class': "aiocache.plugins.HitMissRatioPlugin" }, { 'class': "aiocache.plugins.TimingPlugin" }] } } caches.set_config(aiocache_config) if mode == 'redis': try: this.cache = caches.get('redis') logger.info(f'Using redis for cache - [{redis_endpoint}]') except Exception: logger.exception('Error initiating aiocache with Redis.') raise else: try: this.cache = caches.get('default') logger.info(f'Using RAM for cache') except Exception: logger.exception( 'Error initiating aiocache with SimpleMemoryCache. ') raise return True
def __init__(self, storage): super().__init__(storage) options = app_settings['aiocache'] options['serializer'] = { 'class': "guillotina_aiocache.serializers.JsonSerializer" } caches.set_config({'default': options}) self._cache = caches.get('default')
def reset_caches(): caches.set_config({ "default": { "cache": "aiocache.SimpleMemoryCache", "serializer": { "class": "aiocache.serializers.NullSerializer" }, } })
def reset_caches(): caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.NullSerializer" } } })
def init() -> None: """ Initialize the cache module. This must be called before any plugins using cache, and after initializing "none" module. """ bot = get_bot() caches.set_config({'default': bot.config.AIOCACHE_DEFAULT_CONFIG})
def test_set_config_updates_existing_values(self): assert not isinstance(caches.get("default").serializer, JsonSerializer) caches.set_config( { "default": { "cache": "aiocache.SimpleMemoryCache", "serializer": {"class": "aiocache.serializers.JsonSerializer"}, } } ) assert isinstance(caches.get("default").serializer, JsonSerializer)
def test_set_config_updates_existing_values(self): assert not isinstance(caches.get('default').serializer, JsonSerializer) caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.JsonSerializer" } } }) assert isinstance(caches.get('default').serializer, JsonSerializer)
def build_app(): app = web.Application( middlewares=[version_middleware, exception_handler_middleware]) caches.set_config(settings.AIO_CACHES) setup_routes(app) app.make_handler(logger=logging.config.dictConfig(settings.LOGGING), debug=settings.DEBUG) return app
def test_create_extra_args(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.9", 'db': 10, 'port': 6378 } }) cache = caches.create('default', namespace="whatever", endpoint="127.0.0.10", db=10) assert cache.namespace == "whatever" assert cache.endpoint == "127.0.0.10" assert cache.db == 10
def init() -> None: """ Initialize the cache module. """ bot = get_bot() caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': "aiocache.serializers.StringSerializer" } } }) print("cache module initialize OK")
def test_create_extra_args(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.9", "db": 10, "port": 6378, } } ) cache = caches.create("default", namespace="whatever", endpoint="127.0.0.10", db=10) assert cache.namespace == "whatever" assert cache.endpoint == "127.0.0.10" assert cache.db == 10
async def before_server_start(_app, _): """Initialize database connection and Redis cache.""" _app.ctx.engine = create_async_engine(SANIC_CONFIG["DB_URL"]) caches.set_config(redis_cache_config) _app.ctx.redis = await aioredis.Redis.from_url(_app.config["redis"]) # init extensions fabrics session.init_app( _app, interface=AIORedisSessionInterface( _app.ctx.redis, samesite="Strict", cookie_name="session" if _app.config["DEBUG"] else "__Host-session", ), )
def test_set_config_no_default(self): with pytest.raises(ValueError): caches.set_config({ 'no_default': { 'cache': "aiocache.RedisCache", 'endpoint': "127.0.0.10", 'port': 6378, 'serializer': { 'class': "aiocache.serializers.PickleSerializer" }, 'plugins': [ {'class': "aiocache.plugins.HitMissRatioPlugin"}, {'class': "aiocache.plugins.TimingPlugin"} ] } })
def test_ensure_plugins_order(self): caches.set_config({ 'default': { 'cache': "aiocache.RedisCache", 'plugins': [ {'class': "aiocache.plugins.HitMissRatioPlugin"}, {'class': "aiocache.plugins.TimingPlugin"} ] } }) cache = caches.get('default') assert isinstance(cache.plugins[0], HitMissRatioPlugin) cache = caches.create('default') assert isinstance(cache.plugins[0], HitMissRatioPlugin)
def test_set_config_no_default(self): with pytest.raises(ValueError): caches.set_config( { "no_default": { "cache": "aiocache.RedisCache", "endpoint": "127.0.0.10", "port": 6378, "serializer": {"class": "aiocache.serializers.PickleSerializer"}, "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], } } )
async def show_weekdays(self, ans: Message): caches.set_config(CACHE_CONFIG) schedule = await cache.get('schedule_{}'.format(ans.from_id)) if schedule is None: data = await fetch_schedule_json() schedule = ScheduleResponseBuilder(data) await cache.set('schedule_{}'.format(ans.from_id), schedule, ttl=900) schedule.build_weekday_keyboard() keyboard = schedule.get_keyboard() await ans(message='Выберите день недели:', keyboard=keyboard_gen(keyboard))
def test_set_config_removes_existing_caches(self): caches.set_config( { "default": {"cache": "aiocache.SimpleMemoryCache"}, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) caches.get("default") caches.get("alt") assert len(caches._caches) == 2 caches.set_config( { "default": {"cache": "aiocache.SimpleMemoryCache"}, "alt": {"cache": "aiocache.SimpleMemoryCache"}, } ) assert caches._caches == {}
def test_ensure_plugins_order(self): caches.set_config( { "default": { "cache": "aiocache.RedisCache", "plugins": [ {"class": "aiocache.plugins.HitMissRatioPlugin"}, {"class": "aiocache.plugins.TimingPlugin"}, ], } } ) cache = caches.get("default") assert isinstance(cache.plugins[0], HitMissRatioPlugin) cache = caches.create("default") assert isinstance(cache.plugins[0], HitMissRatioPlugin)
def init_cache() -> None: caches.set_config( { "default": { "cache": "aiocache.SimpleMemoryCache", "serializer": {"class": "aiocache.serializers.PickleSerializer"}, }, "redis": { "cache": "aiocache.RedisCache", "endpoint": settings.redis_host, "port": settings.redis_port, "password": settings.redis_password or None, "db": settings.redis_db_index, "timeout": 1, "serializer": {"class": "aiocache.serializers.PickleSerializer"}, }, } )
def caches(): aiocaches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.JsonSerializer' } }, 'redis': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.JsonSerializer' } } }) active_caches = [ aiocaches.create(**aiocaches.get_alias_config('default')), aiocaches.create(**aiocaches.get_alias_config('redis')) ] return active_caches
def caches(loop): aiocaches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.NullSerializer' } }, 'redis': { 'cache': "aiocache.SimpleMemoryCache", 'serializer': { 'class': 'aiocache.serializers.NullSerializer' } } }) active_caches = [ aiocaches.create(**aiocaches.get_alias_config('default')), aiocaches.create(**aiocaches.get_alias_config('redis')) ] yield active_caches loop.run_until_complete(aiocaches.get('default').clear()) loop.run_until_complete(aiocaches.get('redis').clear())
def test_set_config_removes_existing_caches(self): caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", }, }) caches.get('default') caches.get('alt') assert len(caches._caches) == 2 caches.set_config({ 'default': { 'cache': "aiocache.SimpleMemoryCache", }, 'alt': { 'cache': "aiocache.SimpleMemoryCache", }, }) assert caches._caches == {}
async def start(self): self.config = config self.server_config = copy.deepcopy( self.config.servers[self.server_name]) self.server_config.update(self.server_config_override) self.config.database.update(self.database_config_override) self.config.redis.update(self.redis_config_override) self.config.commands.update(self.commands_config_override) self.config.client.update(self.client_config_override) general_log_directory = os.path.dirname( self.server_config["Logging"]["General"]) errors_log_directory = os.path.dirname( self.server_config["Logging"]["Errors"]) if not os.path.exists(general_log_directory): os.mkdir(general_log_directory) if not os.path.exists(errors_log_directory): os.mkdir(errors_log_directory) self.logger = logging.getLogger('houdini') universal_handler = RotatingFileHandler( self.server_config['Logging']['General'], maxBytes=2097152, backupCount=3, encoding='utf-8') error_handler = logging.FileHandler( self.server_config['Logging']['Errors']) console_handler = logging.StreamHandler(stream=sys.stdout) log_formatter = logging.Formatter( '%(asctime)s [%(levelname)-5.5s] %(message)s') error_handler.setLevel(logging.ERROR) universal_handler.setFormatter(log_formatter) console_handler.setFormatter(log_formatter) self.logger.addHandler(universal_handler) self.logger.addHandler(console_handler) self.logger.addHandler(error_handler) level = logging.getLevelName(self.server_config['Logging']['Level']) self.logger.setLevel(level) self.server = await asyncio.start_server(self.client_connected, self.server_config['Address'], self.server_config['Port']) await self.db.set_bind('postgresql://{}:{}@{}/{}'.format( self.config.database['Username'], self.config.database['Password'], self.config.database['Address'], self.config.database['Name'])) self.logger.info('Booting Houdini') self.redis = await aioredis.create_redis_pool('redis://{}:{}'.format( self.config.redis['Address'], self.config.redis['Port']), minsize=5, maxsize=10) if self.server_config['World']: await self.redis.delete( f'houdini.players.{self.server_config["Id"]}') await self.redis.hdel(f'houdini.population', self.server_config["Id"]) caches.set_config({ 'default': { 'cache': SimpleMemoryCache, 'namespace': 'houdini', 'ttl': self.server_config['CacheExpiry'] } }) self.cache = caches.get('default') self.client_class = Penguin self.penguin_string_compiler = PenguinStringCompiler() self.anonymous_penguin_string_compiler = PenguinStringCompiler() PenguinStringCompiler.setup_default_builder( self.penguin_string_compiler) PenguinStringCompiler.setup_anonymous_default_builder( self.anonymous_penguin_string_compiler) await self.xml_listeners.setup( houdini.handlers, exclude_load='houdini.handlers.login.login') await self.xt_listeners.setup(houdini.handlers) await self.dummy_event_listeners.setup(houdini.handlers) self.logger.info('World server started') else: await self.xml_listeners.setup(houdini.handlers, 'houdini.handlers.login.login') self.logger.info('Login server started') self.items = await ItemCollection.get_collection() self.logger.info(f'Loaded {len(self.items)} clothing items') self.igloos = await IglooCollection.get_collection() self.logger.info(f'Loaded {len(self.igloos)} igloos') self.furniture = await FurnitureCollection.get_collection() self.logger.info(f'Loaded {len(self.furniture)} furniture items') self.locations = await LocationCollection.get_collection() self.logger.info(f'Loaded {len(self.locations)} igloo locations') self.flooring = await FlooringCollection.get_collection() self.logger.info(f'Loaded {len(self.flooring)} igloo flooring') self.rooms = await RoomCollection.get_collection() self.spawn_rooms = self.rooms.spawn_rooms await self.rooms.setup_tables() await self.rooms.setup_waddles() self.logger.info( f'Loaded {len(self.rooms)} rooms ({len(self.spawn_rooms)} spawn)') self.postcards = await PostcardCollection.get_collection() self.logger.info(f'Loaded {len(self.postcards)} postcards') self.stamps = await StampCollection.get_collection() self.logger.info(f'Loaded {len(self.stamps)} stamps') self.cards = await CardCollection.get_collection() self.logger.info(f'Loaded {len(self.cards)} ninja cards') self.puffles = await PuffleCollection.get_collection() self.logger.info(f'Loaded {len(self.puffles)} puffles') self.puffle_items = await PuffleItemCollection.get_collection() self.logger.info(f'Loaded {len(self.puffle_items)} puffle care items') self.characters = await CharacterCollection.get_collection() self.logger.info(f'Loaded {len(self.characters)} characters') self.permissions = await PermissionCollection.get_collection() self.logger.info( f'Multi-client support is ' f'{"enabled" if self.config.client["MultiClientSupport"] else "disabled"}' ) self.logger.info( f'Listening on {self.server_config["Address"]}:{self.server_config["Port"]}' ) if self.config.client['AuthStaticKey'] != 'houdini': self.logger.warning( 'The static key has been changed from the default, ' 'this may cause authentication issues!') await self.plugins.setup(houdini.plugins) self.heartbeat = asyncio.create_task(server_heartbeat(self)) self.egg_timer = asyncio.create_task(server_egg_timer(self)) self.music = SoundStudio(self) async with self.server: await self.server.serve_forever()