async def simple(con: Connection, steam_ids: typing.List[int]): """ Outputs player ratings compatible with balance.py plugin from minqlx-plugins Args: steam_ids (list): array of steam ids Returns: { "ok": True "players": [...], "deactivated": [] } """ players = {} query = """ SELECT steam_id, gametype_short, mean, n FROM gametype_ratings gr LEFT JOIN gametypes gt ON gr.gametype_id = gt.gametype_id WHERE steam_id = ANY($1)""" async for row in con.cursor(query, steam_ids): steam_id, gametype, rating, n = (str(row[0]), row[1], round(row[2], 2), row[3]) if steam_id not in players: players[steam_id] = {"steamid": steam_id} players[steam_id][gametype] = {"games": n, "elo": rating} return prepare_result(players)
async def list( connection: asyncpg.Connection, table_schema: str = constants.MIGRATIONS_SCHEMA, table_name: str = constants.MIGRATIONS_TABLE, ) -> model.MigrationHistory: logger.debug('Getting a history of migrations') history = model.MigrationHistory() await connection.reload_schema_state() async with connection.transaction(): async for record in connection.cursor(""" select revision, label, timestamp, direction from {table_schema}.{table_name} order by timestamp asc; """.format( table_schema=table_schema, table_name=table_name, )): history.append( model.MigrationHistoryEntry( revision=model.Revision(record['revision']), label=record['label'], timestamp=record['timestamp'], direction=model.MigrationDir(record['direction']), ), ) return history
async def export(con: Connection, gametype_id: int): query = """ SELECT p.steam_id, p.name, gr.mean, gr.n FROM players p LEFT JOIN gametype_ratings gr ON gr.steam_id = p.steam_id WHERE gr.gametype_id = $1 ORDER BY gr.mean DESC """ result = [] async for row in con.cursor(query, gametype_id): result.append( { "_id": str(row[0]), "name": clean_name(row[1]), "rating": row[2], "n": row[3], } ) return {"ok": True, "response": result}
async def get_events(connection: Connection, aggregate_id: ID, schema: Optional[EventSchema] = None) -> List[Event]: query = (stored_events.select().where( stored_events.c.aggregate_id == aggregate_id).order_by( stored_events.c.aggregate_version)) return [(schema or EventSchema()).loads(row["state"]) async for row in connection.cursor(query)]
async def tags(self, db: Connection) -> AsyncGenerator[LogTag, None]: sql = f""" SELECT t.* FROM logger_log_tag lt JOIN logger_tag t ON t.id = lt."tagID" WHERE "logID" = $1; """ async for result in db.cursor(sql, self.pk): yield LogTag(rowdata=result)
async def test_unique_agent_instances(migrate_v6_to_v7: None, postgresql_client: Connection) -> None: # assert that existing documents have been merged and expired state has been set correctly async with postgresql_client.transaction(): records: Cursor = postgresql_client.cursor(""" SELECT COUNT(*) FROM public.agentinstance GROUP BY tid, process, name ; """) assert all([record["count"] == 1 async for record in records]) # assert unique constraint is present constraints = await postgresql_client.fetch(""" SELECT pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_constraint r WHERE conname='agentinstance_unique' """) assert len(constraints) == 1 assert constraints[0]["condef"] == "UNIQUE (tid, process, name)"
async def export(con: Connection, gametype_id: int): query = """ SELECT p.steam_id, p.name, {COLUMN}, gr.n FROM players p LEFT JOIN gametype_ratings gr ON gr.steam_id = p.steam_id WHERE gr.gametype_id = $1 ORDER BY {COLUMN} DESC """.format(COLUMN=rating_column(gametype_id)) result = [] async for row in con.cursor(query, gametype_id): result.append({ "_id": str(row[0]), "name": clean_name(row[1]), "rating": row[2], "n": row[3], }) return {"ok": True, "response": result}
async def fetch( con: Connection, steam_ids: typing.List[int], mapname: typing.Optional[str] = None, bigger_numbers: bool = False, with_qlstats_policy: bool = False, ): """ Outputs player ratings compatible with balance.py plugin from minqlx-plugins """ players = {} for steam_id in map(str, steam_ids): players[steam_id] = {"steamid": steam_id} for gametype in SUPPORTED_GAMETYPES: players[steam_id][gametype] = { "games": 0, "elo": INITIAL_R1_MEAN[gametype] if gametype not in AVG_PERF_GAMETYPES else INITIAL_R2_VALUE[gametype], } if bigger_numbers: players[steam_id][gametype]["elo"] = int( players[steam_id][gametype]["elo"] * 60) if mapname: map_id = await get_map_id(con, mapname, False) query = MAP_BASED_RATINGS_SQL query_args = (steam_ids, cache.AVG_PERF_GAMETYPE_IDS, map_id) else: query = COMMON_RATINGS_SQL query_args = (steam_ids, cache.AVG_PERF_GAMETYPE_IDS) async for row in con.cursor(query, *query_args): steam_id, gametype, rating, n = ( str(row[0]), row[1], round(row[2], 2) if bigger_numbers is False else int(row[2] * 60), row[3], ) if steam_id not in players: players[steam_id] = {"steamid": steam_id} players[steam_id][gametype] = {"games": n, "elo": rating} result = prepare_result(players) if with_qlstats_policy is False: return result try: r = await request("http://qlstats.net/elo/" + "+".join(map(lambda id_: str(id_), steam_ids))) except requests.exceptions.RequestException: return result if not r.ok: return result try: qlstats_data = r.json() except Exception as e: log_exception(e) return result qlstats_data["players"] = result["players"] for steam_id, info in result["playerinfo"].items(): qlstats_data["playerinfo"][steam_id]["ratings"] = info["ratings"] return qlstats_data
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False): await con.set_type_codec( "json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog" ) query = get_sql_top_players_query_by_gametype_id( gametype_id ) + "LIMIT {LIMIT} OFFSET {OFFSET}".format( LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page) ) start_timestamp = 0 if show_inactive is False: start_timestamp = cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME result = [] player_count = 0 async for row in con.cursor(query, start_timestamp, gametype_id): if row[0] != None: result.append( { "_id": str(row[0]), "name": row[1], "model": ( row[2] + ("/default" if row[2].find("/") == -1 else "") ).lower(), "rating": round(row[3], 2), "rd": round(row[4], 2), "n": row[5], "rank": row[7], } ) player_count = row[6] steam_ids = list(map(lambda player: int(player["_id"]), result)) query = """ SELECT s.steam_id, CEIL(AVG(CASE WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1 WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1 ELSE 0 END)*100) FROM matches m LEFT JOIN scoreboards s ON s.match_id = m.match_id WHERE m.gametype_id = $1 AND s.steam_id = ANY($2) GROUP BY s.steam_id; """ for row in await con.fetch(query, gametype_id, steam_ids): try: result_index = steam_ids.index(row[0]) result[result_index]["win_ratio"] = int(row[1]) except ValueError: pass # must not happen return { "ok": True, "response": result, "page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE), }
async def get_player_info_old(con: Connection, steam_id: int): result = {} for gametype, gametype_id in cache.GAMETYPE_IDS.items(): query = """ SELECT p.steam_id, p.name, p.model, g.gametype_short, gr.{RATING_COLUMN}, gr.n, m.match_id::text, m.timestamp, m.old_{RATING_COLUMN}, rt.rank, rt.count FROM players p LEFT JOIN gametype_ratings gr ON gr.steam_id = p.steam_id LEFT JOIN gametypes g on gr.gametype_id = g.gametype_id LEFT JOIN ( SELECT m.match_id, m.timestamp, m.gametype_id, s.old_{RATING_COLUMN} FROM matches m LEFT JOIN scoreboards s ON s.match_id = m.match_id WHERE s.steam_id = $3 AND m.gametype_id = $2 ORDER BY m.timestamp DESC LIMIT 50 ) m ON m.gametype_id = g.gametype_id LEFT JOIN ({SQL_TOP_PLAYERS_BY_GAMETYPE}) rt ON rt.steam_id = p.steam_id WHERE p.steam_id = $3 AND g.gametype_id = $2 ORDER BY m.timestamp ASC """.format( RATING_COLUMN=rating_column(gametype_id), SQL_TOP_PLAYERS_BY_GAMETYPE= get_sql_top_players_query_by_gametype_id(gametype_id), ) last_ratings = {} async for row in con.cursor( query, cache.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME, gametype_id, steam_id, ): result["_id"] = str(row[0]) result["name"] = row[1] result["model"] = row[2] rating = round(row[8], 2) if row[8] is not None else None if gametype not in last_ratings: last_ratings[gametype] = rating if rating is not None else 1 if rating is None: rating = last_ratings[gametype] else: last_ratings[gametype] = rating if gametype not in result: result[gametype] = { "rating": round(row[4], 2) if row[4] is not None else 0, "n": row[5], "history": [], "rank": row[9], "max_rank": row[10], } result[gametype]["history"].append({ "match_id": row[6], "timestamp": row[7], "rating": rating }) return {"ok": True, "player": result}
def _run_on_conn_iter(self, conn: Connection, stmt: str, params: List[Expr]) -> CursorFactory: return conn.cursor(stmt, *params)
async def _calc_ratings_avg_perf(con: Connection, match_id: str, gametype_id: int, map_id: Optional[int] = None): def extra_factor(gametype, matches, wins, losses): try: return { "tdm": (1 + (0.15 * (wins / matches - losses / matches))) }[gametype] except KeyError: return 1 if map_id is None: ratings_subquery = """ SELECT steam_id, r2_value AS rating FROM gametype_ratings WHERE gametype_id = $1 """ query_params = [gametype_id, match_id] else: ratings_subquery = """ SELECT steam_id, r2_value as rating FROM map_gametype_ratings WHERE gametype_id = $1 AND map_id = $3 """ query_params = [gametype_id, match_id, map_id] query = """ SELECT s.steam_id, team, s.match_perf, gr.rating FROM scoreboards s LEFT JOIN ({SUBQUERY}) gr ON gr.steam_id = s.steam_id WHERE match_perf IS NOT NULL AND match_id = $2 """.format(SUBQUERY=ratings_subquery) result = {} async for row in con.cursor(query, *query_params): steam_id = row[0] team = row[1] match_perf = row[2] old_rating = row[3] if old_rating is None: new_rating = match_perf else: query = """ SELECT COUNT(1), SUM(win) as wins, SUM(loss) as losses, AVG(rating) FROM ( SELECT CASE WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1 WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1 ELSE 0 END as win, CASE WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 0 WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 0 ELSE 1 END as loss, s.match_perf as rating FROM matches m LEFT JOIN scoreboards s on s.match_id = m.match_id WHERE s.steam_id = $1 AND m.gametype_id = $2 AND (m.post_processed = TRUE OR m.match_id = $3) AND s.match_perf IS NOT NULL ORDER BY m.timestamp DESC LIMIT {MOVING_AVG_COUNT} ) t""".format(MOVING_AVG_COUNT=MOVING_AVG_COUNT) row = await con.fetchrow(query, steam_id, gametype_id, match_id) gametype = [ k for k, v in cache.GAMETYPE_IDS.items() if v == gametype_id ][0] new_rating = row[3] * extra_factor(gametype, row[0], row[1], row[2]) result[steam_id] = {"old": old_rating, "new": new_rating, "team": team} return result
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False): await con.set_type_codec( "json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog" ) query = SQL_TOP_PLAYERS_BY_GAMETYPE + "LIMIT {LIMIT} OFFSET {OFFSET}".format( LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page) ) start_timestamp = 0 if show_inactive is False: start_timestamp = LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME result = [] player_count = 0 async for row in con.cursor(query, start_timestamp, gametype_id): if row[0] != None: result.append( { "_id": str(row[0]), "name": row[1], "model": ( row[2] + ("/default" if row[2].find("/") == -1 else "") ).lower(), "rating": round(row[3], 2), "rd": round(row[4], 2), "n": row[5], "rank": row[7], } ) player_count = row[6] steam_ids = list(map(lambda player: int(player["_id"]), result)) query = """ SELECT s.steam_id, CEIL(AVG(CASE WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1 WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1 ELSE 0 END)*100) FROM matches m LEFT JOIN scoreboards s ON s.match_id = m.match_id WHERE m.gametype_id = $1 AND s.steam_id = ANY($2) GROUP BY s.steam_id; """ for row in await con.fetch(query, gametype_id, steam_ids): try: result_index = steam_ids.index(row[0]) result[result_index]["win_ratio"] = int(row[1]) except ValueError: pass # must not happen return { "ok": True, "response": result, "page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE), }