Exemplo n.º 1
0
def get_app(config: Optional[Config] = None) -> web.Application:

    if config is None:
        config = Config()  # use default values

    # could be configurable too, but who need it?
    charged_words = read_charged_words([
        os.path.join(BASE_DIR, "charged_dict/negative_words.txt"),
        os.path.join(BASE_DIR, "charged_dict/positive_words.txt"),
    ])

    app = web.Application(middlewares=[error_middleware])
    app["filter_config"] = config

    app.add_routes([web.get('/', handle_news_list)])
    app.cleanup_ctx.append(aiohttp_client)
    scorer = ArticlesScorer(
        charged_words=charged_words,
        morph=pymorphy2.MorphAnalyzer(),
    )
    app["scorer"] = scorer

    if config.redis_host:
        cache_decorator = cached(
            cache=Cache.REDIS,
            serializer=PickleSerializer(),
            endpoint=config.redis_host,
            port=config.redis_port,
            namespace="main",
            ttl=60,
        )
        decorated_rate_article = cache_decorator(score_article)
        scorer.score_article = decorated_rate_article  # use custom strategy

    return app
Exemplo n.º 2
0
def filecache(basedir):
    # Import from here since these are optional dependencies
    from aiocache import cached
    from aiocache.serializers import PickleSerializer
    from aiofilecache import FileCache

    return cached(cache=FileCache, serializer=PickleSerializer(),
                  basedir=basedir, timeout=None, key_builder=_key_builder)
Exemplo n.º 3
0
    async def test_cached_arg_key_from_attr(self, mocker, memory_mock_cache):
        cached_decorator = cached(key_from_attr="key")

        resp1 = await cached_decorator(stub)(key="2")
        resp2 = await cached_decorator(stub)(key="2")

        assert resp1 == resp2

        memory_mock_cache.set.assert_called_with("2", mock.ANY, ttl=0)
Exemplo n.º 4
0
    async def test_cached_with_cache_exception_set(self, mocker, memory_mock_cache):
        module = sys.modules[globals()['__name__']]
        mocker.spy(module, 'stub')
        cached_decorator = cached(key="key")

        memory_mock_cache.set = asynctest.CoroutineMock(side_effect=ConnectionRefusedError())

        await cached_decorator(stub)()
        assert stub.call_count == 1
Exemplo n.º 5
0
    def test_alias_takes_precedence(self, mock_cache):
        with patch(
                "aiocache.decorators.caches.get",
                MagicMock(return_value=mock_cache)) as mock_get:
            c = cached(alias='default', cache=SimpleMemoryCache, namespace='test')
            c(stub)

            mock_get.assert_called_with('default')
            assert c.cache is mock_cache
Exemplo n.º 6
0
    async def test_cached_arg_key_from_attr(self, mocker, memory_mock_cache):
        cached_decorator = cached(key_from_attr="keys")

        resp1 = await cached_decorator(arg_return_dict)("asd")
        resp2 = await cached_decorator(arg_return_dict)("asd")

        assert resp1 is resp2

        memory_mock_cache.set.assert_called_with("asd", mock.ANY, ttl=0)
Exemplo n.º 7
0
    def test_init(self):
        c = cached(
            ttl=1, key="key", key_builder="fn", cache=SimpleMemoryCache,
            plugins=None, alias=None, noself=False, namespace="test")

        assert c.ttl == 1
        assert c.key == "key"
        assert c.key_builder == "fn"
        assert c.cache is None
        assert c._cache == SimpleMemoryCache
        assert c._serializer is None
        assert c._kwargs == {'namespace': 'test'}
Exemplo n.º 8
0
    async def test_cached_key(self, mocker, memory_mock_cache):
        module = sys.modules[globals()['__name__']]
        mocker.spy(module, 'stub')
        cached_decorator = cached(key="key")

        resp1 = await cached_decorator(stub)()
        resp2 = await cached_decorator(stub)()

        assert stub.call_count == 1
        assert resp1 is resp2

        assert await memory_mock_cache.get("key") is not None
Exemplo n.º 9
0
    async def test_cached_key_from_attr(self, mocker, memory_mock_cache):
        module = sys.modules[globals()['__name__']]
        mocker.spy(module, 'stub')
        cached_decorator = cached(key_from_attr="key")

        resp1 = await cached_decorator(stub)(key='key')
        resp2 = await cached_decorator(stub)(key='key')

        assert stub.call_count == 1
        assert resp1 is resp2

        memory_mock_cache.get.assert_called_with('key')
        memory_mock_cache.set.assert_called_with('key', mock.ANY, ttl=0)
Exemplo n.º 10
0
    async def test_cached_ttl(self, mocker, memory_mock_cache):
        module = sys.modules[globals()['__name__']]
        mocker.spy(module, 'stub')
        cached_decorator = cached(ttl=10)

        resp1 = await cached_decorator(stub)(1)
        resp2 = await cached_decorator(stub)(1)

        assert stub.call_count == 1
        assert resp1 is resp2

        memory_mock_cache.get.assert_called_with('stubstub(1,){}')
        assert memory_mock_cache.get.call_count == 1
        assert memory_mock_cache.exists.call_count == 2
        memory_mock_cache.set.assert_called_with('stubstub(1,){}', mock.ANY, ttl=10)
        assert memory_mock_cache.set.call_count == 1
Exemplo n.º 11
0
    def __init__(self,
                 websession: ClientSession,
                 *,
                 cache_seconds: int = DEFAULT_CACHE_SECONDS) -> None:
        """Initialize."""
        # Since this library is built on an unofficial data source, let's be responsible
        # citizens and not allow updates faster than every 60 seconds:
        if cache_seconds < DEFAULT_CACHE_SECONDS:
            _LOGGER.warning(
                "Setting cache timeout to lowest allowed: %s seconds",
                DEFAULT_CACHE_SECONDS,
            )
            cache_seconds = DEFAULT_CACHE_SECONDS

        self._websession = websession
        self.dump = cached(key=DEFAULT_CACHE_KEY,
                           ttl=cache_seconds)(self._dump)
Exemplo n.º 12
0
async def redis_prepare():
    if Config.IN_MEMORY_CACHE:
        cache = Cache()
    else:
        # noinspection PyBroadException
        try:
            sentinel = await aioredis.create_sentinel(
                sentinels=[f'redis://{Config.REDIS_HOST}:{Config.REDIS_PORT}'],
                password=Config.REDIS_PASSWORD)
            cache = RedisCache(sentinel=sentinel, master=Config.REDIS_USER)
        except:
            cache = Cache()

    caches._caches['redis'] = cache
    # noinspection PyGlobalUndefined
    global _get_user
    _get_user = cached(alias='redis',
                       ttl=60,
                       key_builder=lambda f, request, email: f'user_{email}',
                       timeout=0.5)(_get_user)
Exemplo n.º 13
0
    def __init__(
        self,
        *,
        session: Optional[ClientSession] = None,
        cache_seconds: int = DEFAULT_CACHE_SECONDS,
    ) -> None:
        """Initialize."""
        # Since this library is built on an unofficial data source, let's be responsible
        # citizens and not allow updates faster than every 60 seconds:
        if cache_seconds < DEFAULT_CACHE_SECONDS:
            _LOGGER.warning(
                "Setting cache timeout to lowest allowed: %s seconds",
                DEFAULT_CACHE_SECONDS,
            )
            cache_seconds = DEFAULT_CACHE_SECONDS

        self._currently_retrying: bool = False
        self._session: ClientSession = session
        self.dump: Callable = cached(key=DEFAULT_CACHE_KEY,
                                     ttl=cache_seconds)(self._dump)
Exemplo n.º 14
0
def cached(expire_after=None, key=None, include_self=True):
    """Creates a decorator that caches the return value of the
    decorated function or method.

    The decorated function can be a regular function, a method,
    a coroutine function or a coroutine method. The decorated
    function returns the cached return value only if the
    arguments passed to the function are equal to a set of
    parameters that have been passed to the function before.

    :param expire_after:
        When to discard the cached return value after it has
        been cached. In seconds The default is ``None``, which means
        the cached return value does never expire and will not be
        discarded.
    :type expire_after: Optional[int]
    :param key:
        The custom key to save the cached return value in.
        If not provided, a key will be assigned automatically.
        :type key: Optional[str]
    :param include_self:
        Whether or not ``self`` should be included when checking
        whether or not the parameters passed to the decorated
        function are equal to ones that were passed to the
        function before.
    :type include_self: Optional[bool]

    Example: ::

        @cached(expire_after=1800)
        async def expensive_coroutine():
            # highly complicated and expensive calculation
            await asyncio.sleep(10)
            return 1 + 1
    """
    # TODO check parameter for custom validity/integrity checks
    return aiocache.cached(key=key,
                           ttl=expire_after,
                           alias='default',
                           noself=not include_self)
Exemplo n.º 15
0
 def decorator(self, mocker, mock_cache):
     with patch("aiocache.decorators._get_cache", return_value=mock_cache):
         yield cached()
Exemplo n.º 16
0
#!/usr/bin/env python3.6
# encoding: utf-8

import logging

import aiocache
import asyncpg
import discord
from discord.ext import commands

from reactor_bot import emoji_utils

logger = logging.getLogger('cogs.db')
cached = aiocache.cached(ttl=20, serializer=None)


class Database:
    SETTINGS_UPDATED_MESSAGE = (
        '\N{white heavy check mark} Done. '
        'Note that it may take up to twenty seconds for your changes to take effect.'
    )

    def __init__(self, bot):
        self.bot = bot
        self._init_task = self.bot.loop.create_task(self._init())

    def __unload(self):
        self._init_task.cancel()

        try:
            self.bot.loop.create_task(self.pool.close())
Exemplo n.º 17
0
    async def test_cached_func_exception(self, mocker, memory_mock_cache):
        cached_decorator = cached(key="key")

        with pytest.raises(ValueError):
            await cached_decorator(raise_exception)()