Exemple #1
0
    async def _startup(self) -> None:
        """Creates needed sessions.
        """

        await Sessions.smtp.connect()
        await Sessions.database.connect()
        Sessions.aiohttp = ClientSession()

        try:
            Sessions.cache = Cache(Cache.REDIS)
            await Sessions.cache.exists("connection")
        except ConnectionRefusedError:
            Sessions.cache = Cache(Cache.MEMORY)
            logger.warning(
                "Memory cache being used, use redis for production."
            )

        if self.clear_cache:
            await Sessions.cache.clear()

        if Config.upload_type == B2UploadSettings:
            await self.b2.authorize()

        self.background_tasks = await create_scheduler()
        for to_spawn in TASKS_TO_SPAWN:
            await self.background_tasks.spawn(to_spawn())

        await cache_community_types(self.community_types)
        await create_product_and_set(self.product_name)
Exemple #2
0
    def test_calls_parse_uri_path_from_cache(self):
        with patch("aiocache.factory.Cache") as mock:
            mock.get_scheme_class.return_value.parse_uri_path = Mock(return_value={"arg1": "arg1"})
            Cache.from_url("redis:///")

        mock.get_scheme_class.return_value.parse_uri_path.assert_called_once_with("/")
        mock.assert_called_once_with(mock.get_scheme_class.return_value, arg1="arg1")
Exemple #3
0
    def test_new(self, cache_type):
        kwargs = {"a": 1, "b": 2}
        cache_class = Cache.get_scheme_class(cache_type)

        with patch("aiocache.{}.__init__".format(cache_class.__name__)) as init:
            cache = Cache(cache_class, **kwargs)
            assert isinstance(cache, cache_class)
            init.assert_called_once_with(**kwargs)
Exemple #4
0
def test_cached():
    cache = Cache(Cache.REDIS,
                  endpoint="127.0.0.1",
                  port=6379,
                  namespace="main")
    loop = asyncio.get_event_loop()
    loop.run_until_complete(cached_call())
    assert loop.run_until_complete(cache.exists("key")) is True
    loop.run_until_complete(cache.delete("key"))
    loop.run_until_complete(cache.close())
Exemple #5
0
def test_alias():
    loop = asyncio.get_event_loop()
    loop.run_until_complete(default_cache())
    loop.run_until_complete(alt_cache())

    cache = Cache(Cache.REDIS)
    loop.run_until_complete(cache.delete("key"))
    loop.run_until_complete(cache.close())

    loop.run_until_complete(caches.get('default').close())
Exemple #6
0
def redis_cache(event_loop):
    cache = Cache(Cache.REDIS, namespace="test", pool_max_size=1)
    yield cache

    for _, pool in RedisBackend.pools.items():
        pool.close()
        event_loop.run_until_complete(pool.wait_closed())
Exemple #7
0
async def test_exception_in_cache_backend(
        aiohttp_client: TestClientFixture) -> None:
    """check if application still works if cache backend is misconfigured
    or cache backend doesn't work"""
    handler_hits = 0

    @cached
    async def handler(_: web.Request) -> web.Response:
        nonlocal handler_hits
        handler_hits += 1
        return web.Response(body=b"Hello world")

    app = web.Application()
    app.router.add_route('GET', '/', handler)
    cache = Cache(
        Cache.REDIS,
        endpoint="such.hostname.must.not.exist",
        serializer=PickleSerializer(),
        namespace="main",
        ttl=60,
    )
    register_cache(app, cache)
    client = await aiohttp_client(app)

    hits = 10
    for i in range(hits):
        resp = await client.get("/")
        assert await resp.read() == b"Hello world"
        assert resp.status == 200
    assert handler_hits == hits
Exemple #8
0
async def clear_cache(name: str, background_tasks: BackgroundTasks) -> None:
    cache = Cache(Cache.REDIS,
                  endpoint=app_settings.CACHE_HOST,
                  port=app_settings.CACHE_PORT)
    if name == "all":
        background_tasks.add_task(cache.delete, "/*")
    else:
        background_tasks.add_task(cache.delete, f"/{name}*")
Exemple #9
0
def initAppObjects(app):
    app["cache"] = Cache(Cache.MEMORY)
    app["resolver"] = aiodns.DNSResolver(app.loop)
    app["websockets"] = set()
    app["subscriptions"] = WEBSOCKET_SUBSCRIPTIONS
    app["publishTypes"] = WEBSOCKET_PUBLISH_TYPES
    app["threads"] = {}
    app["clientThreads"] = {}
    app["threadStops"] = {}
    app["clientThreadStops"] = {}
async def redis_prepare():
    if Config.IN_MEMORY_CACHE:
        cache = Cache()
    else:
        # noinspection PyBroadException
        try:
            sentinel = await aioredis.create_sentinel(
                sentinels=[f'redis://{Config.REDIS_HOST}:{Config.REDIS_PORT}'],
                password=Config.REDIS_PASSWORD)
            cache = RedisCache(sentinel=sentinel, master=Config.REDIS_USER)
        except:
            cache = Cache()

    caches._caches['redis'] = cache
    # noinspection PyGlobalUndefined
    global _get_user
    _get_user = cached(alias='redis',
                       ttl=60,
                       key_builder=lambda f, request, email: f'user_{email}',
                       timeout=0.5)(_get_user)
Exemple #11
0
async def init_cache(app: web.Application) -> None:
    if app['config'].redis_host:
        app['cache'] = Cache(
            Cache.REDIS,
            endpoint=app['config'].redis_host,
            port=app['config'].redis_port,
            namespace='jaundice',
            serializer=JsonSerializer(),
        )
    else:
        app['cache'] = None
Exemple #12
0
async def test_cache_middleware(aiohttp_client: TestClientFixture) -> None:
    """
    test if cache middleware works
    and doesn't prevent other middlewares from execution
    """

    handler_hits = 0
    before_cache_middleware_hits = 0
    after_cache_middleware_hits = 0

    @web.middleware
    async def before_cache_middleware(request: web.Request,
                                      handler: Any) -> web.Response:
        nonlocal before_cache_middleware_hits
        before_cache_middleware_hits += 1
        return await handler(request)

    @web.middleware
    async def after_cache_middleware(request: web.Request,
                                     handler: Any) -> web.Response:
        nonlocal after_cache_middleware_hits
        after_cache_middleware_hits += 1
        return await handler(request)

    @cached
    async def handler(_: web.Request) -> web.Response:
        nonlocal handler_hits
        handler_hits += 1
        return web.Response(body=b"Hello world")

    app = web.Application(middlewares=[before_cache_middleware])
    app.router.add_route('GET', '/', handler)
    cache = Cache(
        Cache.MEMORY,
        serializer=PickleSerializer(),
        namespace="0",
        ttl=60,
    )
    register_cache(app, cache)

    # it's artificial case
    app.middlewares.append(after_cache_middleware)
    client = await aiohttp_client(app)

    hits = 10
    for i in range(hits):
        resp: aiohttp.ClientResponse = await client.get("/")
        assert await resp.read() == b"Hello world"
        assert resp.status == 200

    assert handler_hits == 1
    assert after_cache_middleware_hits == 1
    assert before_cache_middleware_hits == hits
Exemple #13
0
 async def start(cls):
     cls.fernet = Fernet(cls.salt.encode())
     await db_models.db.set_bind(bind=cls.db_url, min_size=1)
     cls.redis = Cache.from_url(cls.redis_url)
     cls.application = await Application.get_main() if not cls.test else await Application.get_test()
     cls.intra = IntraAPI(config=cls)
     await cls.intra.load()
     courses = await Courses.get_courses()
     cls.courses = {cursus.id: cursus.name for cursus in courses}
     cls.cursus_id = [cursus.id for cursus in courses if cursus.is_primary][0]
     cls.local = Localization()
     cls.local.load(data=read_json(cls.localization))
     cls.sub_apps = SubApps(intra=cls.intra, local=cls.local)
Exemple #14
0
async def init(loop):
    conf = load_config(PROJ_ROOT / 'config' / 'config.yml')

    app = web.Application(loop=loop)

    app.router.add_route('GET', "/api/v2/{item}", method)
    app.router.add_route('GET', "/api/{item}", method)
    cache = Cache(plugins=[HitMissRatioPlugin(), TimingPlugin()])

    print(conf['allowed_items'])

    if 'host' in conf:
        host = conf['host']
    else:
        host = '127.0.0.1'

    if 'port' in conf:
        port = conf['port']
    else:
        port = '443'

    if 'access_log_format' in conf:
        access_log_format = conf['access_log_format']
    else:
        access_log_format = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'

    if 'scheme' in conf:
        if conf['scheme'] == 'https':
            if 'sslcertchain' and 'sslkey' in conf:
                ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
                ssl_context.load_verify_locations(conf['sslcertchain'],
                                                  conf['sslkey'])
                ssl_context.load_cert_chain(conf['sslcertchain'],
                                            conf['sslkey'])
            else:
                raise NameError(
                    'sslcertchain / sslkey missing in the configuration')
        else:
            ssl_context = None
    else:
        ssl_context = None

    app['config'] = conf

    user, password, realm = conf['authentication']['user'], conf[
        'authentication']['password'], conf['authentication']['realm']
    await setup(app, AllowedHosts(conf['allowed_hosts']),
                BasicAuth(user, password, realm))
    app['cache'] = cache
    return app, cache, host, port, access_log_format, ssl_context
Exemple #15
0
async def clear_cache(name: str, background_tasks: BackgroundTasks) -> None:
    cache = Cache(Cache.REDIS,
                  endpoint=app_settings.CACHE_HOST,
                  port=app_settings.CACHE_PORT)
    # TODO: this try/except wrap temporarily fixes an issue in the aiocache library, resulting in an TypeError if
    # there are no keys in Redis with the given namespace. This is solved on the main branch in aoicache but is not
    # yet released (23 June 2022).
    try:
        if name == "all":
            await cache.clear(namespace="orchestrator")
        else:
            await cache.clear(namespace=f"orchestrator:{name}")
    except TypeError:
        pass
Exemple #16
0
def endpoint_cache(function: _AsyncCallable) -> _AsyncCallable:
    from .routing import request_headers, response_headers  # noqa:F401

    vf = CachedValidatedFunction(function, config={})
    cache: BaseCache = AioCache.from_url(CACHE_URI)  # type:ignore
    config: CacheConfig = getattr(function, "cache_config",
                                  CacheConfig.new(function))

    cache.namespace, cache.ttl = config.namespace, config.ttl.total_seconds()

    if not CACHE_ENABLED:
        config.enabled = False

    @wraps(function)
    async def wrapper(*args, **kwargs):
        cache_policy: str = request_headers.get().get("cache-control",
                                                      "public")

        if (not config.enabled) or (cache_policy.casefold() == "no-store"):
            return await vf.call(*args, **kwargs)

        key = hashlib.md5((model :=
                           vf.serialize(args=args, kwargs=kwargs)).json(
                               exclude={"self"},
                               sort_keys=True,
                               ensure_ascii=False).encode()).hexdigest()

        if cache_policy.casefold() == "no-cache":
            await cache.delete(key)

        if await cache.exists(key):
            logger.debug(f"Request to endpoint <g>{function.__qualname__}</g> "
                         f"restoring from <e>{key=}</e> in cache data.")
            response_headers.get().setdefault("X-Cache-Hit", key)
            result, cache_date = await cache.get(key)
        else:
            result, cache_date = await vf.execute(model), datetime.now()
            await cache.set(key, (result, cache_date))

        response_headers.get().update({
            "Cache-Control":
            "public",
            "Expires":
            format_date_time(cache_date.timestamp() + cache.ttl),
        })

        return result

    return wrapper  # type:ignore
Exemple #17
0
async def get_with_client(url, access_token=None, cache=None, client=None):
    """Fetches the prismic api JSON.
    Returns :class:`~Api` object.

    :param url: URL to the api of the repository (mandatory).
    :param access_token: The access token (optional).
    :param cache: The cache object. Optional, will default to a in-memory cache if None is passed.
    :param client: The httpx client. If not passed, one client will be created for each subsequent http request.
    """
    if cache is None:
        cache = Cache(Cache.MEMORY)

    return Api(
        await get_json(url, access_token=access_token, cache=cache, ttl=5, client=client),
        access_token,
        cache,
        client
    )
Exemple #18
0
 def __init__(
     self,
     app: "Application",
     twitch_bearer_token: str,
     logger: "Logger" = None,
     *args,
     **kwargs,
 ):
     self.app = app
     self.config = config = app["config"]
     intents = discord.Intents.default()
     intents.members = True
     super().__init__(
         owner_id=self.config["bot"].owner_id,
         command_prefix=determine_prefix,
         status=discord.Status.idle,
         intents=intents,
         *args,
         **kwargs,
     )
     self.db = app["db"]
     self.logger = logger
     self.description = config["bot"].description
     self.default_prefix = config["bot"].default_prefix
     self.session = aiohttp.ClientSession()
     self.pool = concurrent.futures.ThreadPoolExecutor()
     self.launch_time = None
     self.twitch_client = twitch.Client(config["twitch"].client_id,
                                        twitch_bearer_token, self.session)
     self.dbl_client = (dbl.DBLClient(
         self, config["bot"].dbl_token, session=self.session, autopost=True)
                        if config["bot"].dbl_token else None)
     self.service_guild: Optional[discord.Guild] = None
     self.errors_channel: Optional[discord.TextChannel] = None
     self.reports_channel: Optional[discord.TextChannel] = None
     self.log_channel: Optional[discord.TextChannel] = None
     cors = self.app["cors"]
     resource = cors.add(
         self.app.router.add_resource(r"/wh/twitch/{topic}/{id}"))
     cors.add(resource.add_route("GET", self.handler_get))
     cors.add(resource.add_route("POST", self.handler_post))
     self.cache = Cache()
Exemple #19
0
async def main():
    PORT = config["server"]["port"]
    DEBUG_MODE = config["debug_mode"]

    # Create the global connection pool.
    async with aiomysql.create_pool(
        host=config["db"]["host"],
        port=config["db"]["port"],
        user=config["db"]["user"],
        password=str(config["db"]["pwd"]),
        db=config["db"]["database"],
    ) as db:
        cache = Cache(Cache.MEMCACHED,
                      endpoint="127.0.0.1",
                      port=11211,
                      namespace="main")
        # await maybe_create_tables(db)
        app = Application(db, cache)
        if DEBUG_MODE:
            # 单进程启动
            app.listen(PORT)
        else:
            # 多进程启动
            server = HTTPServer(app)
            server.ssl_options = {
                "certfile": config["server"]["ssl_key"],
                "keyfile": config["server"]["ssl_cert"]
            }
            # 在Linux系统bind方法不起作用,需要使用listen;在macOS系统listen方法不起作用,需要使用bind
            if sys.platform == 'linux':
                server.listen(PORT)
            else:
                server.bind(PORT)
            server.start(num_processes=config["server"]["num_processes"])
        # the server will simply run until interrupted
        # with Ctrl-C, but if you want to shut down more gracefully,
        # call shutdown_event.set().
        print(f"tornado running on port {PORT}")
        shutdown_event = tornado.locks.Event()
        await shutdown_event.wait()
async def cache():
    conf = load_config(PROJ_ROOT / 'config' / 'config-gunicorn.yml')

    logging.basicConfig(level=logging.DEBUG)
    app = web.Application()

    app.router.add_route('GET', "/api/v2/{item}", method)
    app.router.add_route('GET', "/api/v2/{item}/{domain}", method)
    app.router.add_route('GET', "/api/{item}", method)
    app.router.add_route('GET', "/api/{item}/{domain}", method)
    memcached_host = conf['cache']['memcached_host']
    memcached_port = conf['cache']['memcached_port']
    #cache = Cache(plugins=[HitMissRatioPlugin(), TimingPlugin()])
    lookup_type = {}
    cache = Cache(Cache.MEMCACHED,
                  endpoint=memcached_host,
                  port=memcached_port,
                  serializer=JsonSerializer(),
                  plugins=[HitMissRatioPlugin(),
                           TimingPlugin()])

    if 'statsd' in conf:
        if conf['statsd']['enable']:
            hostname = socket.gethostname().split('.', 1)[0]
            c = statsd.StatsClient(conf['statsd']['host'],
                                   conf['statsd']['port'],
                                   prefix=conf['statsd']['prefix'])
            t = MetricsTimer(conf['statsd']['interval'], cache_metrics, cache,
                             lookup_type, c, hostname)

    app['config'] = conf

    user, password, realm = conf['authentication']['user'], conf[
        'authentication']['password'], conf['authentication']['realm']
    await setup(app, AllowedHosts(conf['allowed_hosts']),
                BasicAuth(user, password, realm))
    app['cache'] = cache
    app['lookup_type'] = lookup_type
    return app
 async def test_cache(self, mocked_fetch, morpher):
     mocked_fetch.side_effect = fetch_return_inosmi_html
     results = []
     cache = Cache(Cache.MEMORY)
     await process_article(
         session=None,
         morph=morpher,
         charged_words=['бодрость'],
         url='http://localhost',
         results=results,
         cache=cache,
     )
     assert mocked_fetch.call_count == 1
     await process_article(
         session=None,
         morph=morpher,
         charged_words=['бодрость'],
         url='http://localhost',
         results=results,
         cache=cache,
     )
     assert mocked_fetch.call_count == 1  # second call returned from cache
Exemple #22
0
async def get(url, access_token=None, cache=None, **client_kwargs):
    """Fetches the prismic api JSON. Generates only one httpx client for the async context.
    Yields :class:`~Api` object.

    Usage:
    >>> import prismic
    >>> async with prismic.get("http://your-repo.prismic.io/api", "access_token") as api:
    ...     doc = await api.get_by_uid("speculoos-macaron")

    :param url: URL to the api of the repository (mandatory).
    :param access_token: The access token (optional).
    :param cache: The cache object. Optional, will default to a in-memory cache if None is passed.
    """
    if cache is None:
        cache = Cache(Cache.MEMORY)

    async with httpx.AsyncClient(**client_kwargs) as client:
        yield Api(
            await get_json(url, access_token=access_token, cache=cache, ttl=5, client=client),
            access_token,
            cache,
            client
        )
Exemple #23
0
import asyncio

from aiocache import caches, Cache
from aiocache.serializers import PickleSerializer

cache = Cache(Cache.REDIS,
              endpoint="127.0.0.1",
              port=6379,
              namespace="cache_hh",
              serializer=PickleSerializer())
cache_time = 86400
Exemple #24
0
logger = logging.getLogger(__name__)


class MyCustomPlugin(BasePlugin):
    async def pre_set(self, *args, **kwargs):
        logger.info("I'm the pre_set hook being called with %s %s" %
                    (args, kwargs))

    async def post_set(self, *args, **kwargs):
        logger.info("I'm the post_set hook being called with %s %s" %
                    (args, kwargs))


cache = Cache(plugins=[HitMissRatioPlugin(),
                       TimingPlugin(),
                       MyCustomPlugin()],
              namespace="main")


async def run():
    await cache.set("a", "1")
    await cache.set("b", "2")
    await cache.set("c", "3")
    await cache.set("d", "4")

    possible_keys = ["a", "b", "c", "d", "e", "f"]

    for t in range(1000):
        await cache.get(random.choice(possible_keys))

    assert cache.hit_miss_ratio["hit_ratio"] > 0.5
Exemple #25
0
import asyncio
import logging

from aiocache import Cache
from aiocache.lock import RedLock

logger = logging.getLogger(__name__)
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')


async def expensive_function():
    logger.warning('Expensive is being executed...')
    await asyncio.sleep(1)
    return 'result'


async def my_view():

    async with RedLock(cache, 'key', lease=2):  # Wait at most 2 seconds
        result = await cache.get('key')
        if result is not None:
            logger.info('Found the value in the cache hurray!')
            return result

        result = await expensive_function()
        await cache.set('key', result)
        return result


async def concurrent():
    await asyncio.gather(my_view(), my_view(), my_view())
Exemple #26
0
from aiocache import Cache

from backend import config
from backend.db_clients import get_db_client

# Initialize cache
cache = Cache()
# Initialize db client
db_client = get_db_client((config.DATABASE_TYPE))
Exemple #27
0
async def get_cached_tags():
    cache = Cache()
    result = await cache.get("tags")
    logger.info("from cache, tags in size = {}".format(
        0 if result is None else len(result)))
    return result
Exemple #28
0
async def get_cached_categories():
    cache = Cache()
    result = await cache.get("categories")
    logger.info("from cached, categories in size = {}".format(
        0 if result is None else len(result)))
    return result
Exemple #29
0
async def get_cached_products(page):
    cache = Cache()
    result = await cache.get("products{}".format(page))
    logger.info("from cache, products in page = {0} in size = {1}".format(
        page, 0 if result is None else len(result)))
    return result
Exemple #30
0
     "project-jupyter/jupyter-meta-documentation/translate/#{language}/{resource}/1"
     "?q={query_string}"),
}

FILTER_RESOURCES_TO_BE_TRANSLATED = {
    "python": lambda r: r.split("--")[0] in ["bugs", "howto", "library"],
    "jupyter": None,
}

WEEK_IN_SECONDS = 604_800

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()

logger.info(config.CACHE_URL)
cache = Cache.from_url(config.CACHE_URL)

STRINGS_CACHE = defaultdict(dict)


async def transifex_api(url, project, data=None, retrying=False, ttl=3600):
    url = urljoin(TRANSIFEX_API[project], url)
    if not data and (in_cache := await cache.get(url)):
        return in_cache

    if retrying:
        logger.debug("retrying url=%s", url)

    auth = aiohttp.BasicAuth(login="******", password=config.TRANSIFEX_TOKEN)
    async with aiohttp.ClientSession(auth=auth) as session:
        http_method = session.put if data else session.get