def test_calls_parse_uri_path_from_cache(self): with patch("aiocache.factory.Cache") as mock: mock.get_scheme_class.return_value.parse_uri_path = Mock(return_value={"arg1": "arg1"}) Cache.from_url("redis:///") mock.get_scheme_class.return_value.parse_uri_path.assert_called_once_with("/") mock.assert_called_once_with(mock.get_scheme_class.return_value, arg1="arg1")
async def start(cls): cls.fernet = Fernet(cls.salt.encode()) await db_models.db.set_bind(bind=cls.db_url, min_size=1) cls.redis = Cache.from_url(cls.redis_url) cls.application = await Application.get_main() if not cls.test else await Application.get_test() cls.intra = IntraAPI(config=cls) await cls.intra.load() courses = await Courses.get_courses() cls.courses = {cursus.id: cursus.name for cursus in courses} cls.cursus_id = [cursus.id for cursus in courses if cursus.is_primary][0] cls.local = Localization() cls.local.load(data=read_json(cls.localization)) cls.sub_apps = SubApps(intra=cls.intra, local=cls.local)
def endpoint_cache(function: _AsyncCallable) -> _AsyncCallable: from .routing import request_headers, response_headers # noqa:F401 vf = CachedValidatedFunction(function, config={}) cache: BaseCache = AioCache.from_url(CACHE_URI) # type:ignore config: CacheConfig = getattr(function, "cache_config", CacheConfig.new(function)) cache.namespace, cache.ttl = config.namespace, config.ttl.total_seconds() if not CACHE_ENABLED: config.enabled = False @wraps(function) async def wrapper(*args, **kwargs): cache_policy: str = request_headers.get().get("cache-control", "public") if (not config.enabled) or (cache_policy.casefold() == "no-store"): return await vf.call(*args, **kwargs) key = hashlib.md5((model := vf.serialize(args=args, kwargs=kwargs)).json( exclude={"self"}, sort_keys=True, ensure_ascii=False).encode()).hexdigest() if cache_policy.casefold() == "no-cache": await cache.delete(key) if await cache.exists(key): logger.debug(f"Request to endpoint <g>{function.__qualname__}</g> " f"restoring from <e>{key=}</e> in cache data.") response_headers.get().setdefault("X-Cache-Hit", key) result, cache_date = await cache.get(key) else: result, cache_date = await vf.execute(model), datetime.now() await cache.set(key, (result, cache_date)) response_headers.get().update({ "Cache-Control": "public", "Expires": format_date_time(cache_date.timestamp() + cache.ttl), }) return result return wrapper # type:ignore
"project-jupyter/jupyter-meta-documentation/translate/#{language}/{resource}/1" "?q={query_string}"), } FILTER_RESOURCES_TO_BE_TRANSLATED = { "python": lambda r: r.split("--")[0] in ["bugs", "howto", "library"], "jupyter": None, } WEEK_IN_SECONDS = 604_800 logging.basicConfig(level=logging.INFO) logger = logging.getLogger() logger.info(config.CACHE_URL) cache = Cache.from_url(config.CACHE_URL) STRINGS_CACHE = defaultdict(dict) async def transifex_api(url, project, data=None, retrying=False, ttl=3600): url = urljoin(TRANSIFEX_API[project], url) if not data and (in_cache := await cache.get(url)): return in_cache if retrying: logger.debug("retrying url=%s", url) auth = aiohttp.BasicAuth(login="******", password=config.TRANSIFEX_TOKEN) async with aiohttp.ClientSession(auth=auth) as session: http_method = session.put if data else session.get
def setup_cache(app: web.Application) -> None: cache = Cache.from_url(app['config']['cache_url']) cache.serializer = JsonSerializer() app['cache'] = cache
def test_from_url_returns_cache_from_scheme(self, scheme): assert isinstance(Cache.from_url("{}://".format(scheme)), Cache.get_scheme_class(scheme))
def test_from_url_invalid_protocol(self): with pytest.raises(InvalidCacheType): Cache.from_url("http://")
def test_from_url_calls_cache_with_args(self, url, expected_args): with patch("aiocache.factory.Cache") as mock: Cache.from_url(url) mock.assert_called_once_with(mock.get_scheme_class.return_value, **expected_args)
def test_from_url_calls_cache_with_args(self, url, expected_args): with patch("aiocache.factory.Cache") as mock: Cache.from_url(url) mock.assert_called_once_with("redis", **expected_args)