Example #1
0
    async def _startup(self) -> None:
        """Creates needed sessions.
        """

        await Sessions.smtp.connect()
        await Sessions.database.connect()
        Sessions.aiohttp = ClientSession()

        try:
            Sessions.cache = Cache(Cache.REDIS)
            await Sessions.cache.exists("connection")
        except ConnectionRefusedError:
            Sessions.cache = Cache(Cache.MEMORY)
            logger.warning(
                "Memory cache being used, use redis for production."
            )

        if self.clear_cache:
            await Sessions.cache.clear()

        if Config.upload_type == B2UploadSettings:
            await self.b2.authorize()

        self.background_tasks = await create_scheduler()
        for to_spawn in TASKS_TO_SPAWN:
            await self.background_tasks.spawn(to_spawn())

        await cache_community_types(self.community_types)
        await create_product_and_set(self.product_name)
Example #2
0
def redis_cache(event_loop):
    cache = Cache(Cache.REDIS, namespace="test", pool_max_size=1)
    yield cache

    for _, pool in RedisBackend.pools.items():
        pool.close()
        event_loop.run_until_complete(pool.wait_closed())
Example #3
0
async def test_exception_in_cache_backend(
        aiohttp_client: TestClientFixture) -> None:
    """check if application still works if cache backend is misconfigured
    or cache backend doesn't work"""
    handler_hits = 0

    @cached
    async def handler(_: web.Request) -> web.Response:
        nonlocal handler_hits
        handler_hits += 1
        return web.Response(body=b"Hello world")

    app = web.Application()
    app.router.add_route('GET', '/', handler)
    cache = Cache(
        Cache.REDIS,
        endpoint="such.hostname.must.not.exist",
        serializer=PickleSerializer(),
        namespace="main",
        ttl=60,
    )
    register_cache(app, cache)
    client = await aiohttp_client(app)

    hits = 10
    for i in range(hits):
        resp = await client.get("/")
        assert await resp.read() == b"Hello world"
        assert resp.status == 200
    assert handler_hits == hits
Example #4
0
async def clear_cache(name: str, background_tasks: BackgroundTasks) -> None:
    cache = Cache(Cache.REDIS,
                  endpoint=app_settings.CACHE_HOST,
                  port=app_settings.CACHE_PORT)
    if name == "all":
        background_tasks.add_task(cache.delete, "/*")
    else:
        background_tasks.add_task(cache.delete, f"/{name}*")
Example #5
0
    def test_new(self, cache_type):
        kwargs = {"a": 1, "b": 2}
        cache_class = Cache.get_scheme_class(cache_type)

        with patch("aiocache.{}.__init__".format(cache_class.__name__)) as init:
            cache = Cache(cache_class, **kwargs)
            assert isinstance(cache, cache_class)
            init.assert_called_once_with(**kwargs)
Example #6
0
def initAppObjects(app):
    app["cache"] = Cache(Cache.MEMORY)
    app["resolver"] = aiodns.DNSResolver(app.loop)
    app["websockets"] = set()
    app["subscriptions"] = WEBSOCKET_SUBSCRIPTIONS
    app["publishTypes"] = WEBSOCKET_PUBLISH_TYPES
    app["threads"] = {}
    app["clientThreads"] = {}
    app["threadStops"] = {}
    app["clientThreadStops"] = {}
Example #7
0
def test_alias():
    loop = asyncio.get_event_loop()
    loop.run_until_complete(default_cache())
    loop.run_until_complete(alt_cache())

    cache = Cache(Cache.REDIS)
    loop.run_until_complete(cache.delete("key"))
    loop.run_until_complete(cache.close())

    loop.run_until_complete(caches.get('default').close())
Example #8
0
def test_cached():
    cache = Cache(Cache.REDIS,
                  endpoint="127.0.0.1",
                  port=6379,
                  namespace="main")
    loop = asyncio.get_event_loop()
    loop.run_until_complete(cached_call())
    assert loop.run_until_complete(cache.exists("key")) is True
    loop.run_until_complete(cache.delete("key"))
    loop.run_until_complete(cache.close())
async def redis_prepare():
    if Config.IN_MEMORY_CACHE:
        cache = Cache()
    else:
        # noinspection PyBroadException
        try:
            sentinel = await aioredis.create_sentinel(
                sentinels=[f'redis://{Config.REDIS_HOST}:{Config.REDIS_PORT}'],
                password=Config.REDIS_PASSWORD)
            cache = RedisCache(sentinel=sentinel, master=Config.REDIS_USER)
        except:
            cache = Cache()

    caches._caches['redis'] = cache
    # noinspection PyGlobalUndefined
    global _get_user
    _get_user = cached(alias='redis',
                       ttl=60,
                       key_builder=lambda f, request, email: f'user_{email}',
                       timeout=0.5)(_get_user)
Example #10
0
async def init_cache(app: web.Application) -> None:
    if app['config'].redis_host:
        app['cache'] = Cache(
            Cache.REDIS,
            endpoint=app['config'].redis_host,
            port=app['config'].redis_port,
            namespace='jaundice',
            serializer=JsonSerializer(),
        )
    else:
        app['cache'] = None
Example #11
0
async def test_cache_middleware(aiohttp_client: TestClientFixture) -> None:
    """
    test if cache middleware works
    and doesn't prevent other middlewares from execution
    """

    handler_hits = 0
    before_cache_middleware_hits = 0
    after_cache_middleware_hits = 0

    @web.middleware
    async def before_cache_middleware(request: web.Request,
                                      handler: Any) -> web.Response:
        nonlocal before_cache_middleware_hits
        before_cache_middleware_hits += 1
        return await handler(request)

    @web.middleware
    async def after_cache_middleware(request: web.Request,
                                     handler: Any) -> web.Response:
        nonlocal after_cache_middleware_hits
        after_cache_middleware_hits += 1
        return await handler(request)

    @cached
    async def handler(_: web.Request) -> web.Response:
        nonlocal handler_hits
        handler_hits += 1
        return web.Response(body=b"Hello world")

    app = web.Application(middlewares=[before_cache_middleware])
    app.router.add_route('GET', '/', handler)
    cache = Cache(
        Cache.MEMORY,
        serializer=PickleSerializer(),
        namespace="0",
        ttl=60,
    )
    register_cache(app, cache)

    # it's artificial case
    app.middlewares.append(after_cache_middleware)
    client = await aiohttp_client(app)

    hits = 10
    for i in range(hits):
        resp: aiohttp.ClientResponse = await client.get("/")
        assert await resp.read() == b"Hello world"
        assert resp.status == 200

    assert handler_hits == 1
    assert after_cache_middleware_hits == 1
    assert before_cache_middleware_hits == hits
Example #12
0
async def init(loop):
    conf = load_config(PROJ_ROOT / 'config' / 'config.yml')

    app = web.Application(loop=loop)

    app.router.add_route('GET', "/api/v2/{item}", method)
    app.router.add_route('GET', "/api/{item}", method)
    cache = Cache(plugins=[HitMissRatioPlugin(), TimingPlugin()])

    print(conf['allowed_items'])

    if 'host' in conf:
        host = conf['host']
    else:
        host = '127.0.0.1'

    if 'port' in conf:
        port = conf['port']
    else:
        port = '443'

    if 'access_log_format' in conf:
        access_log_format = conf['access_log_format']
    else:
        access_log_format = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'

    if 'scheme' in conf:
        if conf['scheme'] == 'https':
            if 'sslcertchain' and 'sslkey' in conf:
                ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
                ssl_context.load_verify_locations(conf['sslcertchain'],
                                                  conf['sslkey'])
                ssl_context.load_cert_chain(conf['sslcertchain'],
                                            conf['sslkey'])
            else:
                raise NameError(
                    'sslcertchain / sslkey missing in the configuration')
        else:
            ssl_context = None
    else:
        ssl_context = None

    app['config'] = conf

    user, password, realm = conf['authentication']['user'], conf[
        'authentication']['password'], conf['authentication']['realm']
    await setup(app, AllowedHosts(conf['allowed_hosts']),
                BasicAuth(user, password, realm))
    app['cache'] = cache
    return app, cache, host, port, access_log_format, ssl_context
Example #13
0
async def clear_cache(name: str, background_tasks: BackgroundTasks) -> None:
    cache = Cache(Cache.REDIS,
                  endpoint=app_settings.CACHE_HOST,
                  port=app_settings.CACHE_PORT)
    # TODO: this try/except wrap temporarily fixes an issue in the aiocache library, resulting in an TypeError if
    # there are no keys in Redis with the given namespace. This is solved on the main branch in aoicache but is not
    # yet released (23 June 2022).
    try:
        if name == "all":
            await cache.clear(namespace="orchestrator")
        else:
            await cache.clear(namespace=f"orchestrator:{name}")
    except TypeError:
        pass
Example #14
0
async def get_with_client(url, access_token=None, cache=None, client=None):
    """Fetches the prismic api JSON.
    Returns :class:`~Api` object.

    :param url: URL to the api of the repository (mandatory).
    :param access_token: The access token (optional).
    :param cache: The cache object. Optional, will default to a in-memory cache if None is passed.
    :param client: The httpx client. If not passed, one client will be created for each subsequent http request.
    """
    if cache is None:
        cache = Cache(Cache.MEMORY)

    return Api(
        await get_json(url, access_token=access_token, cache=cache, ttl=5, client=client),
        access_token,
        cache,
        client
    )
Example #15
0
 def __init__(
     self,
     app: "Application",
     twitch_bearer_token: str,
     logger: "Logger" = None,
     *args,
     **kwargs,
 ):
     self.app = app
     self.config = config = app["config"]
     intents = discord.Intents.default()
     intents.members = True
     super().__init__(
         owner_id=self.config["bot"].owner_id,
         command_prefix=determine_prefix,
         status=discord.Status.idle,
         intents=intents,
         *args,
         **kwargs,
     )
     self.db = app["db"]
     self.logger = logger
     self.description = config["bot"].description
     self.default_prefix = config["bot"].default_prefix
     self.session = aiohttp.ClientSession()
     self.pool = concurrent.futures.ThreadPoolExecutor()
     self.launch_time = None
     self.twitch_client = twitch.Client(config["twitch"].client_id,
                                        twitch_bearer_token, self.session)
     self.dbl_client = (dbl.DBLClient(
         self, config["bot"].dbl_token, session=self.session, autopost=True)
                        if config["bot"].dbl_token else None)
     self.service_guild: Optional[discord.Guild] = None
     self.errors_channel: Optional[discord.TextChannel] = None
     self.reports_channel: Optional[discord.TextChannel] = None
     self.log_channel: Optional[discord.TextChannel] = None
     cors = self.app["cors"]
     resource = cors.add(
         self.app.router.add_resource(r"/wh/twitch/{topic}/{id}"))
     cors.add(resource.add_route("GET", self.handler_get))
     cors.add(resource.add_route("POST", self.handler_post))
     self.cache = Cache()
Example #16
0
async def main():
    PORT = config["server"]["port"]
    DEBUG_MODE = config["debug_mode"]

    # Create the global connection pool.
    async with aiomysql.create_pool(
        host=config["db"]["host"],
        port=config["db"]["port"],
        user=config["db"]["user"],
        password=str(config["db"]["pwd"]),
        db=config["db"]["database"],
    ) as db:
        cache = Cache(Cache.MEMCACHED,
                      endpoint="127.0.0.1",
                      port=11211,
                      namespace="main")
        # await maybe_create_tables(db)
        app = Application(db, cache)
        if DEBUG_MODE:
            # 单进程启动
            app.listen(PORT)
        else:
            # 多进程启动
            server = HTTPServer(app)
            server.ssl_options = {
                "certfile": config["server"]["ssl_key"],
                "keyfile": config["server"]["ssl_cert"]
            }
            # 在Linux系统bind方法不起作用,需要使用listen;在macOS系统listen方法不起作用,需要使用bind
            if sys.platform == 'linux':
                server.listen(PORT)
            else:
                server.bind(PORT)
            server.start(num_processes=config["server"]["num_processes"])
        # the server will simply run until interrupted
        # with Ctrl-C, but if you want to shut down more gracefully,
        # call shutdown_event.set().
        print(f"tornado running on port {PORT}")
        shutdown_event = tornado.locks.Event()
        await shutdown_event.wait()
 async def test_cache(self, mocked_fetch, morpher):
     mocked_fetch.side_effect = fetch_return_inosmi_html
     results = []
     cache = Cache(Cache.MEMORY)
     await process_article(
         session=None,
         morph=morpher,
         charged_words=['бодрость'],
         url='http://localhost',
         results=results,
         cache=cache,
     )
     assert mocked_fetch.call_count == 1
     await process_article(
         session=None,
         morph=morpher,
         charged_words=['бодрость'],
         url='http://localhost',
         results=results,
         cache=cache,
     )
     assert mocked_fetch.call_count == 1  # second call returned from cache
Example #18
0
async def cache():
    conf = load_config(PROJ_ROOT / 'config' / 'config-gunicorn.yml')

    logging.basicConfig(level=logging.DEBUG)
    app = web.Application()

    app.router.add_route('GET', "/api/v2/{item}", method)
    app.router.add_route('GET', "/api/v2/{item}/{domain}", method)
    app.router.add_route('GET', "/api/{item}", method)
    app.router.add_route('GET', "/api/{item}/{domain}", method)
    memcached_host = conf['cache']['memcached_host']
    memcached_port = conf['cache']['memcached_port']
    #cache = Cache(plugins=[HitMissRatioPlugin(), TimingPlugin()])
    lookup_type = {}
    cache = Cache(Cache.MEMCACHED,
                  endpoint=memcached_host,
                  port=memcached_port,
                  serializer=JsonSerializer(),
                  plugins=[HitMissRatioPlugin(),
                           TimingPlugin()])

    if 'statsd' in conf:
        if conf['statsd']['enable']:
            hostname = socket.gethostname().split('.', 1)[0]
            c = statsd.StatsClient(conf['statsd']['host'],
                                   conf['statsd']['port'],
                                   prefix=conf['statsd']['prefix'])
            t = MetricsTimer(conf['statsd']['interval'], cache_metrics, cache,
                             lookup_type, c, hostname)

    app['config'] = conf

    user, password, realm = conf['authentication']['user'], conf[
        'authentication']['password'], conf['authentication']['realm']
    await setup(app, AllowedHosts(conf['allowed_hosts']),
                BasicAuth(user, password, realm))
    app['cache'] = cache
    app['lookup_type'] = lookup_type
    return app
Example #19
0
async def get(url, access_token=None, cache=None, **client_kwargs):
    """Fetches the prismic api JSON. Generates only one httpx client for the async context.
    Yields :class:`~Api` object.

    Usage:
    >>> import prismic
    >>> async with prismic.get("http://your-repo.prismic.io/api", "access_token") as api:
    ...     doc = await api.get_by_uid("speculoos-macaron")

    :param url: URL to the api of the repository (mandatory).
    :param access_token: The access token (optional).
    :param cache: The cache object. Optional, will default to a in-memory cache if None is passed.
    """
    if cache is None:
        cache = Cache(Cache.MEMORY)

    async with httpx.AsyncClient(**client_kwargs) as client:
        yield Api(
            await get_json(url, access_token=access_token, cache=cache, ttl=5, client=client),
            access_token,
            cache,
            client
        )
Example #20
0
    DEFAULT_ENCODING = None

    def dumps(self, value):
        print("I've received:\n{}".format(value))
        compressed = zlib.compress(value.encode())
        print("But I'm storing:\n{}".format(compressed))
        return compressed

    def loads(self, value):
        print("I've retrieved:\n{}".format(value))
        decompressed = zlib.decompress(value).decode()
        print("But I'm returning:\n{}".format(decompressed))
        return decompressed


cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main")


async def serializer():
    text = (
        "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt"
        "ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
        "ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in"
        "reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur"
        "sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit"
        "anim id est laborum.")
    await cache.set("key", text)
    print("-----------------------------------")
    real_value = await cache.get("key")
    compressed_value = await cache.raw("get", "main:key")
    assert len(compressed_value) < len(real_value.encode())
Example #21
0
logger = logging.getLogger(__name__)


class MyCustomPlugin(BasePlugin):
    async def pre_set(self, *args, **kwargs):
        logger.info("I'm the pre_set hook being called with %s %s" %
                    (args, kwargs))

    async def post_set(self, *args, **kwargs):
        logger.info("I'm the post_set hook being called with %s %s" %
                    (args, kwargs))


cache = Cache(plugins=[HitMissRatioPlugin(),
                       TimingPlugin(),
                       MyCustomPlugin()],
              namespace="main")


async def run():
    await cache.set("a", "1")
    await cache.set("b", "2")
    await cache.set("c", "3")
    await cache.set("d", "4")

    possible_keys = ["a", "b", "c", "d", "e", "f"]

    for t in range(1000):
        await cache.get(random.choice(possible_keys))

    assert cache.hit_miss_ratio["hit_ratio"] > 0.5
Example #22
0
from aiocache import Cache

from backend import config
from backend.db_clients import get_db_client

# Initialize cache
cache = Cache()
# Initialize db client
db_client = get_db_client((config.DATABASE_TYPE))
Example #23
0
import asyncio
import logging

from aiocache import Cache
from aiocache.lock import RedLock

logger = logging.getLogger(__name__)
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')


async def expensive_function():
    logger.warning('Expensive is being executed...')
    await asyncio.sleep(1)
    return 'result'


async def my_view():

    async with RedLock(cache, 'key', lease=2):  # Wait at most 2 seconds
        result = await cache.get('key')
        if result is not None:
            logger.info('Found the value in the cache hurray!')
            return result

        result = await expensive_function()
        await cache.set('key', result)
        return result


async def concurrent():
    await asyncio.gather(my_view(), my_view(), my_view())
Example #24
0
async def get_cached_categories():
    cache = Cache()
    result = await cache.get("categories")
    logger.info("from cached, categories in size = {}".format(
        0 if result is None else len(result)))
    return result
Example #25
0
async def get_cached_tags():
    cache = Cache()
    result = await cache.get("tags")
    logger.info("from cache, tags in size = {}".format(
        0 if result is None else len(result)))
    return result
Example #26
0
async def get_cached_products(page):
    cache = Cache()
    result = await cache.get("products{}".format(page))
    logger.info("from cache, products in page = {0} in size = {1}".format(
        page, 0 if result is None else len(result)))
    return result
Example #27
0
import asyncio

from aiocache import caches, Cache
from aiocache.serializers import PickleSerializer

cache = Cache(Cache.REDIS,
              endpoint="127.0.0.1",
              port=6379,
              namespace="cache_hh",
              serializer=PickleSerializer())
cache_time = 86400
Example #28
0
from ast import literal_eval
from concurrent.futures import ThreadPoolExecutor, as_completed
from dotenv import load_dotenv
from pypi_tools.logic import parse_xml_file_with_version
from concurrent.futures import ThreadPoolExecutor
import asyncio
from random import choice
import asyncio

from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer

load_dotenv()
client = bigquery.Client()

cache = Cache(Cache.REDIS, endpoint=os.environ.get('REDIS_HOST', '127.0.0.1'), port=6379, namespace="main")

async def get_download_stats_for_period_from_today(package_name, days, current_date):
    """ method returns a sorted by date form yesterday to past dict with downloads numbers """
    threads = []
    results = []
    dates = [int((current_date - timedelta(days=i+1)).isoformat().replace("-", "")) for i in range(days)]
    cached_data = await asyncio.gather(*[cache.get(f'{package_name}:{date}') for date in dates])
    cached_result = {}
    dates_with_no_cache = []
    for num, result in enumerate(cached_data):
        date_ = dates[num]
        if result:
            cached_result[date_] = result
        else:
            dates_with_no_cache.append(date_)
Example #29
0
from bs4 import BeautifulSoup, Tag
from fastapi import FastAPI, WebSocket
from fastapi.encoders import jsonable_encoder
from fastapi.responses import Response

app = FastAPI()
queue: Queue = None
caches.set_config({
    'default': {
        'cache': "aiocache.SimpleMemoryCache",
        'serializer': {
            'class': "aiocache.serializers.PickleSerializer"
        }
    }
})
cache = Cache(Cache.MEMORY)

Mutations = Union[Literal["PREVIOUS_NODE"], Literal["NEXT_NODE"],
                  Literal["CLICK_NODE"], Literal["PUSH_ROUTE"]]

from pydantic import BaseModel


class Navigation(BaseModel):
    mutation: Mutations
    data: dict


@app.post("/navigate")
async def post(navigation: Navigation):
    resp = {
Example #30
0
async def reuse_data():
    cache = Cache(serializer=JsonSerializer())  # Not ideal to define here
    data = await cache.get("my_custom_key"
                           )  # Note the key is defined in `cached` decorator
    return data