async def get_units_trained(keys, context): """Get counts of units trained bucketed by interval.""" where, values = compound_where(keys, ('match_id', 'player_number')) query = """ select player_number, x.match_id, objects.id as object_id, name, count, inter as timestamp, extract(epoch from inter)::integer as timestamp_secs from ( select oi.match_id, player_number, ois.dataset_id, case when ois.object_id=any(:villager_ids) then :normalized_villager_id when ois.object_id=any(:monk_ids) then :normalized_monk_id else ois.object_id end as obj_id, to_timestamp(floor((extract('epoch' from created) / :interval )) * :interval) as inter, count(ois.instance_id) as count from object_instance_states as ois join ( select min(id) as id,min(timestamp) from object_instance_states where ({}) and player_number > 0 and class_id=70 and not (object_id=any(:herdable_ids)) group by instance_id ) as s on ois.id=s.id join object_instances as oi on ois.instance_id=oi.instance_id and oi.match_id=ois.match_id where oi.created > '00:00:10' group by player_number, obj_id, ois.dataset_id, oi.match_id, inter order by count desc ) as x join objects on x.obj_id=objects.id and x.dataset_id=objects.dataset_id order by timestamp, name """.format(where) results = await context.database.fetch_all(query, values=dict(values, interval=300, **NORMALIZE_VALUES)) return by_key(results, ('match_id', 'player_number'))
async def object_count_query(database, match_ids, object_ids): """Count objects per interval.""" query = """ select ts.match_id, ts.number as player_number, ts.ts as timestamp, extract(epoch from ts.ts)::integer as timestamp_secs, sum(case when oi.created is null then 0 else 1 end) as count from ( SELECT match_id, number, make_interval(secs => extract('epoch' from generate_series(min(to_timestamp(0))::timestamp, max(to_timestamp(extract('epoch' from duration)))::timestamp, :sample_rate * interval '1 second'))) AS ts FROM players join matches on players.match_id=matches.id where match_id = any(:match_ids) GROUP BY match_id, number ) as ts left join ( select match_id, initial_player_number, created, destroyed from object_instances where initial_object_id = any(:object_ids) and match_id = any(:match_ids) ) as oi on oi.created <= (ts + (:sample_rate * interval '1 second')) and (oi.destroyed is null or oi.destroyed >= ts) and ts.match_id=oi.match_id and ts.number=oi.initial_player_number group by ts.match_id, ts.number, ts.ts order by ts.match_id, ts.number, ts.ts """ results = await database.fetch_all(query, values=dict(match_ids=match_ids, object_ids=object_ids, sample_rate=30)) return by_key(results, ('match_id', 'player_number'))
async def get_apm(keys, context): """Compute actions per minute.""" query = """ select x.match_id, x.player_number, x.ts as timestamp, extract(epoch from x.ts)::integer as timestamp_secs, sum(x.actions) over (partition by x.match_id, x.player_number ORDER BY x.ts rows between (60/:sample_rate)-1 preceding and current row) as actions from ( select al.match_id, al.player_number, al.ts, count(action_log.action_id) as actions from action_log right join ( select match_id, number as player_number, make_interval(secs => extract('epoch' from generate_series(min(to_timestamp(0))::timestamp, max(to_timestamp(extract('epoch' from duration)))::timestamp, :sample_rate * interval '1 seconds'))) AS ts from players join matches on players.match_id=matches.id where match_id = any(:match_ids) group by match_id, number ) as al on action_log.match_id=al.match_id and make_interval(secs => floor((extract('epoch' from action_log.timestamp) / :sample_rate )) * :sample_rate)=al.ts and action_log.player_number=al.player_number where action_log.match_id = any(:match_ids) group by al.match_id, al.player_number, al.ts ) as x order by x.match_id, x.player_number, x.ts """ results = await context.database.fetch_all(query, values=dict(match_ids=[k[0] for k in keys], sample_rate=30)) return by_key(results, ('match_id', 'player_number'))
async def odds_query(database, teams, type_id, match_filters=None, civ_filter=False, user_filter=False): # pylint: disable=too-many-arguments, too-many-locals """Run a query with odds constraints.""" start_time = time.time() team_size = 'v'.join(str(len(t)) for t in teams.values()) values = {'team_size': team_size, 'type_id': type_id} match_query = """ select matches.id, players.winner, players.{} from matches join players on matches.id=players.match_id """ for i, team in teams.items(): if user_filter and civ_filter: key = 'civilization_id' keys = [(p['user_id'], p['civilization_id']) for p in team] player_filters, player_values = compound_where( keys, ('players.user_id', 'players.civilization_id')) values.update(player_values) elif civ_filter: key = 'civilization_id' player_filters = " players.civilization_id=any(:civilization_ids_{})".format( i) values.update({ 'civilization_ids_{}'.format(i): [p['civilization_id'] for p in team] }) elif user_filter: key = 'user_id' player_filters = " players.user_id=any(:user_ids_{})".format(i) values.update( {'user_ids_{}'.format(i): [p['user_id'] for p in team]}) team_query = """ select match_id from players where {} group by match_id, team_id having count(distinct players.{}) = {} """.format(player_filters, key, len({p[key] for p in team})) match_query += " join ({0}) as t{1} on matches.id=t{1}.match_id".format( team_query, i) match_query += " where matches.team_size=:team_size and matches.type_id=:type_id" match_query = match_query.format(key) if match_filters is not None: match_query += ' and ' + match_filters[0] values.update(match_filters[1]) result = await database.fetch_all(match_query, values=values) result = compute_odds(by_key(result, 'id').values(), key, teams) LOGGER.debug("computed odds in %f", time.time() - start_time) return result
async def get_odds(database, params): """Get odds based on parameters.""" LOGGER.info("generating odds") start_time = time.time() players = [ dict(civilization_id=data['civilization_id'], user_id=data['user_id'], winner=data['winner'], team_id=data['team_id']) for data in params['players'] ] teams = by_key(players, 'team_id') num_unique_civs = len( {p['civilization_id'] for p in players if 'civilization_id' in p}) keys = [] queries = [] map_filter = ("matches.map_name=:map_name", { 'map_name': params['map_name'] }) if 'teams' in params: keys.append('teams') queries.append( odds_query(database, teams, params['type_id'], user_filter=True)) if 'map_name' in params: keys.append('teams_and_map') queries.append( odds_query(database, teams, params['type_id'], match_filters=map_filter, user_filter=True)) if num_unique_civs > 1: keys.append('teams_and_civilizations') queries.append( odds_query(database, teams, params['type_id'], civ_filter=True, user_filter=True)) keys.append('civilizations') queries.append( odds_query(database, teams, params['type_id'], civ_filter=True)) if 'map_name' in params and num_unique_civs > 1: keys.append('civilizations_and_map') queries.append( odds_query(database, teams, params['type_id'], match_filters=map_filter, civ_filter=True)) results = await asyncio.gather(*queries) LOGGER.debug("computed all odds in %f", time.time() - start_time) return dict(zip(keys, results))
async def get_map_events(keys, context): """Get events on a map.""" query = """ select events.id, events.name, event_maps.name as map_name from event_maps join events on event_maps.event_id=events.id where event_maps.name=any(:names) """ return by_key(await context.database.fetch_all(query, values={'names': keys}), 'map_name')
def make_files(player_data, file_data, match_id): """Make files structures.""" by_number = by_key(player_data, 'number') return [ dict(file_, download_link=f"/api/download/{file_['id']}", owner=by_number[file_['owner_number']][0]) for file_ in file_data[match_id] ]
def make_files(player_data, file_data, match_id, url_func): """Make files structures.""" by_number = by_key(player_data, 'number') return [ dict(file_, download_link=url_func('download', file_id=file_['id']), owner=by_number[file_['owner_number']][0]) for file_ in file_data[match_id] ]
def make_teams(player_data, match_id): """Make team structures.""" team_data = [ dict(team_id=team_id, winner=any([p['winner'] for p in team]), players=team, match_id=match_id) for team_id, team in by_key(player_data, 'team_id').items() ] winning_team = next((t for t in team_data if t['winner']), None) return team_data, winning_team
async def get_research_by_player(keys, context): """Get researches.""" where, values = compound_where(keys, ('match_id', 'player_number')) query = """ select name, started::interval(0), finished::interval(0), player_number, match_id, extract(epoch from started)::integer as started_secs, extract(epoch from finished)::integer as finished_secs from research join technologies on research.technology_id=technologies.id and research.dataset_id=technologies.dataset_id where {} order by started """.format(where) results = await context.database.fetch_all(query, values=values) return by_key(results, ('match_id', 'player_number'))
async def get_villager_allocation(keys, context): """Get villager allocation per player bucketed by interval.""" where, values = compound_where(keys, ('match_id', 'player_number')) query = """ select vils.match_id, vils.player_number, vils.res as name, buckets.inter as timestamp, extract(epoch from buckets.inter)::integer as timestamp_secs, count(distinct vils.instance_id) from ( with subquery as ( select row_number() over (order by x.instance_id, inter), x.match_id, ois.player_number, x.instance_id, x.inter, objects.name, oi.created, oi.destroyed, case when object_id = any(:food_vils) then 'Food' when object_id = any(:wood_vils) then 'Wood' when object_id = any(:gold_vils) then 'Gold' when object_id = any(:stone_vils) then 'Stone' else 'idle' end as res from ( select max(id) as id, match_id, instance_id, make_interval(secs => floor((extract('epoch' from timestamp) / :interval )) * :interval) as inter from object_instance_states where object_id = any(:resource_vils) and ({where}) group by instance_id, inter, match_id ) as x join object_instance_states as ois on x.match_id=ois.match_id and x.id=ois.id join object_instances oi on oi.match_id=ois.match_id and oi.instance_id=ois.instance_id join objects on objects.id=ois.object_id and objects.dataset_id=ois.dataset_id ) select f.*, l.inter as next from subquery as f left join subquery as l on f.row_number=l.row_number-1 and f.instance_id=l.instance_id ) as vils join ( select make_interval(secs => floor((extract('epoch' from timestamp) / :interval )) * :interval) as inter from object_instance_states as ois where ({where}) group by inter order by inter ) as buckets on vils.inter <= buckets.inter and (vils.next is null or vils.next > buckets.inter) and vils.created <= (buckets.inter + (:interval * interval '1 second')) and (vils.destroyed is null or vils.destroyed >= buckets.inter) group by vils.match_id, vils.player_number, vils.res, buckets.inter order by buckets.inter, vils.player_number, vils.res """.format(where=where) results = await context.database.fetch_all(query, values=dict( values, resource_vils=RESOURCE_VILLAGER_IDS, food_vils=FOOD_VILLAGER_IDS, wood_vils=WOOD_VILLAGER_IDS, gold_vils=GOLD_VILLAGER_IDS, stone_vils=STONE_VILLAGER_IDS, interval=300 )) return by_key(results, ('match_id', 'player_number'))
async def get_transactions(keys, context): """Get transactions.""" where, values = compound_where(keys, ('players.match_id', 'player_number')) query = """ select players.match_id, timestamp::interval(0), extract(epoch from timestamp)::integer as timestamp_secs, player_number, case when action_id=123 then 'Gold' else resources.name end as sold_resource, (amount * 100) as sold_amount, case when action_id=123 then resources.name else 'Gold' end as bought_resource from players left join transactions on transactions.match_id=players.match_id and transactions.player_number = players.number join resources on transactions.resource_id=resources.id where {} order by timestamp """.format(where) results = await context.database.fetch_all(query, values=values) return by_key(results, ('match_id', 'player_number'), defaults=keys)
async def get_timeseries(keys, context): """Get timeseries data.""" where, values = compound_where(keys, ('match_id', 'player_number')) query = """ select player_number, match_id, timestamp, population, military, percent_explored, relic_gold, total_food, total_gold, total_stone, total_wood, trade_profit, value_current_units, value_lost_buildings, value_lost_units, value_objects_destroyed, value_spent_objects, value_spent_research, extract(epoch from timestamp)::integer as timestamp_secs, tribute_sent, tribute_received, kills, deaths, razes, kills - deaths as kd_delta, case when value_lost_units+value_lost_buildings > 0 then value_objects_destroyed/(value_lost_units+value_lost_buildings)::float else 0.0 end as damage, case when value_spent_research > 100 then value_objects_destroyed/(value_spent_research)::float * 100 else 0.0 end as roi from timeseries where {} order by timestamp """.format(where) results = await context.database.fetch_all(query, values=values) return by_key(results, ('match_id', 'player_number'))
async def get_maps(database): """Get all maps.""" query = """ select (case when maps.id > 0 then true else false end) as builtin, map_name as name, count(matches.id) as count from matches left join maps on matches.map_name=maps.name and matches.dataset_id=maps.dataset_id group by map_name, maps.id order by count(matches.id) desc """ event_query = """ select events.id, events.name as name, event_maps.name as map_name from event_maps join events on event_maps.event_id=events.id """ total, results, events = await asyncio.gather( database.fetch_one('select count(*) as count from matches'), database.fetch_all(query), database.fetch_all(event_query)) event_data = by_key(events, 'map_name') return [ dict(map_, percent=map_['count'] / total['count'], events=event_data[map_['name']]) for map_ in results ]
async def get_map_control(keys, context): """Get estimated map control from actions.""" query = """ with subquery as ( select x.match_id, x.player_number, x.team_id, make_interval(secs => floor((extract('epoch' from x.ts) / :sample_rate )) * :sample_rate) as timestamp, case when avg(x.ma) is not null then avg(x.ma) else 0 end as control from ( select al.match_id, al.player_number, al.team_id, al.ts, max(round((((to_me - to_opp) + between) / (between * 2)) * 100)) as control, avg(max(round((((to_me - to_opp) + between) / (between * 2)) * 100))) over (partition by al.match_id, al.team_id, al.player_number ORDER BY al.ts rows between :sample_rate-1 preceding and current row) as ma from ( select al.match_id, al.player_number, al.timestamp, opps.number as other_number, sqrt(power(al.action_x-players.start_x, 2) + power(al.action_y-players.start_y, 2)) as to_me, sqrt(power(al.action_x-opps.start_x, 2) + power(al.action_y-opps.start_y, 2)) as to_opp, sqrt(power(opps.start_x-players.start_x, 2) + power(opps.start_y-players.start_y, 2)) as between from action_log as al join players on al.match_id=players.match_id and al.player_number=players.number join players as opps on opps.match_id=players.match_id and opps.number!=players.number and opps.team_id!=players.team_id where al.action_x is not null and al.action_y is not null and al.match_id = any(:match_ids) ) as x right join ( select match_id, team_id, number as player_number, make_interval(secs => extract('epoch' from generate_series(min(to_timestamp(0))::timestamp, max(to_timestamp(extract('epoch' from duration)))::timestamp, interval '1 seconds'))) AS ts from players join matches on players.match_id=matches.id where match_id = any(:match_ids) group by match_id, team_id, number ) as al on x.match_id=al.match_id and x.player_number=al.player_number and make_interval(secs => floor((extract('epoch' from x.timestamp) / 1 )) * 1)=al.ts group by al.match_id, al.player_number, al.team_id, al.ts ) as x group by x.match_id, x.player_number, x.team_id, make_interval(secs => floor((extract('epoch' from x.ts) / :sample_rate)) * :sample_rate) ) select a.match_id, a.player_number, a.timestamp, extract(epoch from a.timestamp)::integer as timestamp_secs, round((((a.control-b.control)+100)/200)*100) as control_percent from subquery as a join subquery as b on a.match_id=b.match_id and a.timestamp=b.timestamp and a.player_number != b.player_number and a.team_id != b.team_id order by a.timestamp """ results = await context.database.fetch_all( query, values=dict(match_ids=[k[0] for k in keys], sample_rate=30)) return by_key(results, ('match_id', 'player_number'))
async def get_match(keys, context): """Get a match.""" player_query = """ select players.match_id, players.team_id, players.number, players.name, players.winner, teams.winner as t_winner, player_colors.name as color, players.color_id, civilizations.id as civilization_id, civilizations.name as civilization_name, players.dataset_id, players.platform_id, players.user_id, players.user_name, rate_snapshot, rate_before, rate_after, mvp, human, score, military_score, economy_score, technology_score, society_score, units_killed, buildings_razed, buildings_lost, units_converted, food_collected, wood_collected, stone_collected, gold_collected, tribute_sent, tribute_received, trade_gold, relic_gold, units_lost, feudal_time, castle_time, imperial_time, extract(epoch from feudal_time)::integer as feudal_time_secs, extract(epoch from castle_time)::integer as castle_time_secs, extract(epoch from imperial_time)::integer as imperial_time_secs, explored_percent, research_count, total_wonders, total_castles, total_relics, villager_high, people.id as person_id, people.country, people.name as person_name from players join teams on players.team_id=teams.team_id and players.match_id=teams.match_id join player_colors on players.color_id=player_colors.id join civilizations on players.dataset_id=civilizations.dataset_id and players.civilization_id=civilizations.id join datasets on players.dataset_id=datasets.id left join platforms on players.platform_id=platforms.id left join users on players.platform_id=users.platform_id and players.user_id=users.id left join people on users.person_id=people.id where players.match_id=any(:match_id) """ file_query = """ select id, match_id, size, original_filename, language, encoding, owner_number from files where match_id=any(:match_id) """ match_query = """ select matches.id, map_name, rms_seed, matches.dataset_id, datasets.name as dataset_name, matches.platform_id, platforms.name as platform_name, platforms.url as platform_url, platforms.match_url as platform_match_url, matches.event_id, events.name as event_name, matches.tournament_id, tournaments.name as tournament_name, matches.series_id, series_metadata.name as series_name, matches.ladder_id, ladders.name as ladder_name, difficulties.name as difficulty, game_types.name as type, matches.type_id, map_reveal_choices.name as map_reveal_choice, map_sizes.name as map_size, speeds.name as speed, starting_ages.name as starting_age, starting_resources.name as starting_resources, victory_conditions.name as victory_condition, played, rated, diplomacy_type, team_size, platform_match_id, cheats, population_limit, lock_teams, mirror, dataset_version, postgame, has_playback, duration::interval(0), versions.name as version, extract(epoch from duration)::integer as duration_secs, winning_team_id, game_version, save_version, build, rms_seed, rms_custom, direct_placement, fixed_positions, guard_state, effect_quantity from matches join versions on matches.version_id=versions.id join datasets on matches.dataset_id=datasets.id join difficulties on matches.difficulty_id=difficulties.id join game_types on matches.type_id=game_types.id join map_reveal_choices on matches.map_reveal_choice_id=map_reveal_choices.id join map_sizes on matches.map_size_id=map_sizes.id join speeds on matches.speed_id=speeds.id left join platforms on matches.platform_id=platforms.id left join starting_ages on matches.starting_age_id=starting_ages.id left join starting_resources on matches.starting_resources_id=starting_resources.id left join victory_conditions on matches.victory_condition_id=victory_conditions.id left join ladders on matches.ladder_id=ladders.id and matches.platform_id=ladders.platform_id left join events on matches.event_id=events.id left join tournaments on matches.tournament_id=tournaments.id left join series_metadata on matches.series_id=series_metadata.series_id where matches.id=any(:id) """ matches, players, files = await asyncio.gather( context.database.fetch_all(match_query, values={'id': keys}), context.database.fetch_all(player_query, values={'match_id': keys}), context.database.fetch_all(file_query, values={'match_id': keys})) output = {} for match in matches: match_id = match['id'] player_data = make_players(by_key(players, 'match_id'), match_id) team_data, winning_team = make_teams(player_data, match_id) output[match_id] = dict( match, players=player_data, teams=team_data, winning_team=winning_team, minimap_link=context.request.url_for('minimap', match_id=match_id), event=dict(id=match['event_id'], name=match['event_name']) if match['event_id'] else None, tournament=dict(id=match['tournament_id'], name=match['tournament_name']) if match['tournament_id'] else None, series=dict(id=match['series_id'], name=match['series_name']) if match['series_id'] else None, files=make_files(player_data, by_key(files, 'match_id'), match_id, context.request.url_for), dataset=dict(id=match['dataset_id'], name=match['dataset_name']), platform=dict(id=match['platform_id'], name=match['platform_name'], url=match['platform_url'], match_url=match['platform_match_url']) if match['platform_id'] else None, ladder=dict(id=match['ladder_id'], name=match['ladder_name'], platform_id=match['platform_id']) if match['ladder_id'] else None) return output
async def get_event(database, event_id): """Get an event.""" events_query = 'select id, name, year from events where id=:event_id' tournaments_query = 'select id, event_id, name from tournaments where event_id=:event_id' series_query = """ select series.id, series.played, series_metadata.name, rounds.tournament_id from series join rounds on series.round_id=rounds.id join tournaments on rounds.tournament_id=tournaments.id join series_metadata on series.id=series_metadata.series_id where tournaments.event_id=:event_id order by series.id """ participants_query = """ select series_id, participants.name, score, winner from participants join series on participants.series_id=series.id join rounds on series.round_id=rounds.id join tournaments on rounds.tournament_id=tournaments.id where tournaments.event_id=:event_id """ maps_query = """ select map_name, avg(matches.duration)::interval(0) as avg_duration, count(distinct match_id) as matches, max(players.dataset_id) as dataset_id, round(count(distinct match_id)/(select count(*) from matches where event_id=:event_id)::numeric, 2) as played_percent, mode() within group (order by civilizations.id) as most_played_civ_id, mode() within group (order by civilizations.name) as most_played_civ_name from players join civilizations on civilizations.dataset_id=players.dataset_id and civilizations.id = players.civilization_id join matches on players.match_id=matches.id where event_id=:event_id group by map_name order by count(*) desc """ players_query = """ select max(players.name) as name, max(players.platform_id) as platform_id, max(user_id) as user_id, max(people.id) as person_id, max(people.name) as person_name, max(people.country) as country, count(*) as matches, round(sum(players.winner::int)/count(*)::numeric, 2) as win_percent, max(matches.dataset_id) as dataset_id, avg(matches.duration)::interval(0) as avg_duration, mode() within group (order by civilizations.id) as most_played_civ_id, mode() within group (order by civilizations.name) as most_played_civ_name, mode() within group (order by matches.map_name) as most_played_map from players join civilizations on civilizations.dataset_id=players.dataset_id and civilizations.id = players.civilization_id join matches on players.match_id=matches.id left join users on players.platform_id=users.platform_id and players.user_id=users.id left join people on users.person_id=people.id where event_id=:event_id group by case when people.id is not null then people.id::varchar else players.name end order by count(*) desc, sum(players.winner::int)/count(*)::numeric desc """ event, tournaments, series, maps, players, participants = await asyncio.gather( database.fetch_one(events_query, values={'event_id': event_id}), database.fetch_all(tournaments_query, values={'event_id': event_id}), database.fetch_all(series_query, values={'event_id': event_id}), database.fetch_all(maps_query, values={'event_id': event_id}), database.fetch_all(players_query, values={'event_id': event_id}), database.fetch_all(participants_query, values={'event_id': event_id}) ) series_data = by_key(series, 'tournament_id') participant_data = by_key(participants, 'series_id') return dict( event, maps=[ dict( map=dict( name=m['map_name'] ), average_duration=m['avg_duration'], match_count=m['matches'], played_percent=m['played_percent'], most_played_civilization=dict( id=m['most_played_civ_id'], name=m['most_played_civ_name'], dataset_id=m['dataset_id'] ) ) for m in maps ], players=[ dict( player=dict( name=player['name'], user=dict( id=player['user_id'], name=player['name'], platform_id=player['platform_id'], person=dict( id=player['person_id'], country=player['country'], name=player['person_name'] ) if player['person_id'] else None ) if player['user_id'] else None ), match_count=player['matches'], win_percent=player['win_percent'], average_duration=player['avg_duration'], most_played_map=player['most_played_map'], most_played_civilization=dict( id=player['most_played_civ_id'], name=player['most_played_civ_name'], dataset_id=player['dataset_id'] ) ) for player in players ], tournaments=[dict( tournament, series=[dict( series_, participants=participant_data[series_['id']], ) for series_ in series_data[tournament['id']]] ) for tournament in tournaments] )