def includeme(config): # Register whatever file storage backend has been configured for storing # our package files. storage_class = config.maybe_dotted( config.registry.settings["files.backend"], ) config.register_service_factory(storage_class.create_service, IFileStorage) # Register our service which will handle get the download statistics for # a project. config.register_service( RedisDownloadStatService( config.registry.settings["download_stats.url"], ), IDownloadStatService, ) # Register our origin cache keys config.register_origin_cache_keys( Project, cache_keys=["project/{obj.normalized_name}"], purge_keys=["project/{obj.normalized_name}", "all-projects"], ) config.register_origin_cache_keys( Release, cache_keys=["project/{obj.project.normalized_name}"], purge_keys=["project/{obj.project.normalized_name}", "all-projects"], ) # Add a periodic task to compute trending once a day, assuming we have # been configured to be able to access BigQuery. if config.get_settings().get("warehouse.trending_table"): config.add_periodic_task(crontab(minute=0, hour=3), compute_trending)
def includeme(config): # Register whatever file storage backend has been configured for storing # our package files. storage_class = config.maybe_dotted( config.registry.settings["files.backend"], ) config.register_service_factory(storage_class.create_service, IFileStorage) # Register our service which will handle get the download statistics for # a project. config.register_service( RedisDownloadStatService( config.registry.settings["download_stats.url"], ), IDownloadStatService, ) # Register our origin cache keys config.register_origin_cache_keys( Project, cache_keys=["project/{obj.normalized_name}"], purge_keys=["project/{obj.normalized_name}", "all-projects"], ) config.register_origin_cache_keys( Release, cache_keys=["project/{obj.project.normalized_name}"], purge_keys=["project/{obj.project.normalized_name}", "all-projects"], )
def test_get_weekly_stats(self, keys, result): svc = RedisDownloadStatService("") svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys)) call_keys = [ "downloads:daily:12-01-{:02d}:foo".format(i + 7) for i in reversed(range(8)) ] assert svc.get_weekly_stats("foo") == result assert svc.redis.mget.calls == [pretend.call(*call_keys)]
def test_get_daily_stats(self, keys, result): svc = RedisDownloadStatService("") svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys)) call_keys = (["downloads:hour:12-01-14-00:foo"] + [ "downloads:hour:12-01-13-{:02d}:foo".format(i) for i in reversed(range(24)) ] + ["downloads:hour:12-01-12-23:foo"]) assert svc.get_daily_stats("foo") == result assert svc.redis.mget.calls == [pretend.call(*call_keys)]
def test_creates_redis(self, monkeypatch): redis_obj = pretend.stub() redis_cls = pretend.stub( from_url=pretend.call_recorder(lambda u: redis_obj), ) monkeypatch.setattr(redis, "StrictRedis", redis_cls) url = pretend.stub() svc = RedisDownloadStatService(url) assert svc.redis is redis_obj assert redis_cls.from_url.calls == [pretend.call(url)]
def includeme(config): # Register our service which will handle get the download statistics for # a project. config.register_service( RedisDownloadStatService( config.registry.settings["download_stats.url"], ), IDownloadStatService, ) # Register our origin cache keys config.register_origin_cache_keys( Project, "project", "project/{obj.normalized_name}", ) config.register_origin_cache_keys( Release, "project", "project/{obj.project.normalized_name}", )