def _redis_delete_old_and_set_new( self, labels: Mapping[SiteId, _Labels], label_type: str, pipeline: Pipeline, ) -> None: sites_list: List[SiteId] = [] for site_id, label in labels.items(): if site_id not in self._sites_to_update: continue if not label: continue label_key = "%s:%s:%s" % (self._namespace, site_id, label_type) pipeline.delete(label_key) # NOTE: Mapping is invariant in its key because of __getitem__, so for mypy's sake we # make a copy below. This doesn't matter from a performance view, hset is iterating over # the dict anyway, and after that there is some serious I/O going on. # NOTE: pylint is too dumb to see the need for the comprehension. # pylint: disable=unnecessary-comprehension pipeline.hset(label_key, mapping={k: v for k, v in label.items()}) if site_id not in sites_list: sites_list.append(site_id) for site_id in sites_list: self._redis_set_last_program_start(site_id, pipeline)
def test_ratelimit_redis(get: mock.MagicMock, getboolean: mock.MagicMock, getint: mock.MagicMock, pipeline: Pipeline): """ This test will only cover aurweb.ratelimit's Redis path if a real Redis server is configured. Otherwise, it'll use the database. """ # We'll need a Request for everything here. request = Request() # Run check_ratelimit for our request_limit. These should succeed. for i in range(4): assert not check_ratelimit(request) # This check_ratelimit should fail, being the 4001th request. assert check_ratelimit(request) # Delete the Redis keys. host = request.client.host pipeline.delete(f"ratelimit-ws:{host}") pipeline.delete(f"ratelimit:{host}") one, two = pipeline.execute() assert one and two # Should be good to go again! assert not check_ratelimit(request)
def test_rpc_ratelimit(getint: mock.MagicMock, client: TestClient, pipeline: Pipeline, packages: List[Package]): params = {"v": 5, "type": "suggest-pkgbase", "arg": "big"} for i in range(4): # The first 4 requests should be good. with client as request: response = request.get("/rpc", params=params) assert response.status_code == int(HTTPStatus.OK) # The fifth request should be banned. with client as request: response = request.get("/rpc", params=params) assert response.status_code == int(HTTPStatus.TOO_MANY_REQUESTS) # Delete the cached records. pipeline.delete("ratelimit-ws:testclient") pipeline.delete("ratelimit:testclient") one, two = pipeline.execute() assert one and two # The new first request should be good. with client as request: response = request.get("/rpc", params=params) assert response.status_code == int(HTTPStatus.OK)
def cache_last_stolen(team_id: int, current_round: int, pipe: Pipeline) -> None: """ Caches stolen flags from "flag_lifetime" rounds. Just adds commands to pipeline stack, don't forget to execute afterwards. :param team_id: attacker team id :param current_round: current round :param pipe: redis connection to add command to """ game_config = game.get_current_game_config() with utils.db_cursor() as (_, curs): curs.execute( _SELECT_LAST_STOLEN_TEAM_FLAGS_QUERY, { 'round': current_round - game_config.flag_lifetime, 'attacker_id': team_id, }, ) flags = curs.fetchall() key = CacheKeys.team_stolen_flags(team_id) pipe.delete(key) if flags: pipe.sadd(key, *(flag[0] for flag in flags))
def cache_tasks(pipe: Pipeline) -> None: """ Put active tasks table data from database to cache. Just adds commands to pipeline stack don't forget to execute afterwards. """ with utils.db_cursor(dict_cursor=True) as (_, curs): curs.execute(models.Task.get_select_active_query()) tasks = curs.fetchall() tasks = list(models.Task.from_dict(task) for task in tasks) key = CacheKeys.tasks() pipe.delete(key) if tasks: pipe.sadd(key, *(task.to_json() for task in tasks))
def cache_teams(pipe: Pipeline) -> None: """ Put "teams" table data from database to cache. Just adds commands to pipeline stack, don't forget to execute afterwards. """ with utils.db_cursor(dict_cursor=True) as (_, curs): curs.execute(models.Team.get_select_active_query()) teams = curs.fetchall() teams = list(models.Team.from_dict(team) for team in teams) key = CacheKeys.teams() pipe.delete(key) if teams: pipe.sadd(key, *[team.to_json() for team in teams]) for team in teams: pipe.set(CacheKeys.team_by_token(team.token), team.id)