def setup_session(url: str) -> None: discord = make_session(state=session.get('oauth2_state')) token = discord.fetch_token(TOKEN_URL, client_secret=OAUTH2_CLIENT_SECRET, authorization_response=url) session.permanent = True session['oauth2_token'] = token discord = make_session(token=session.get('oauth2_token')) user = discord.get(API_BASE_URL + '/users/@me').json() session['id'] = user['id'] session['discord_id'] = user['id'] session['discord_locale'] = user['locale'] guilds = discord.get(API_BASE_URL + '/users/@me/guilds').json() wrong_guilds = False # protect against an unexpected response from discord session['in_guild'] = False session['admin'] = False session['demimod'] = False for guild in guilds: if isinstance(guild, dict) and 'id' in guild: if guild['id'] == configuration.get('guild_id'): session['admin'] = ( guild['permissions'] & 0x10000000 ) != 0 # Check for the MANAGE_ROLES permissions on Discord as a proxy for "is admin". session['demimod'] = ( guild['permissions'] & 0x20000 ) != 0 # Check for the "Mention @everyone" permissions on Discord as a proxy for "is demimod". session['in_guild'] = True else: wrong_guilds = True if wrong_guilds: logger.warning( 'auth.py: unexpected discord response. Guilds: {g}'.format( g=guilds))
def find_matches(d: deck.Deck, rows: ResultSet) -> MatchListType: matches = [] for row in rows: tds = row.find_all('td') if 'No matches were found for this deck' in tds[0].renderContents().decode('utf-8'): logger.warning('Skipping {identifier} because it played no matches.'.format(identifier=d.identifier)) break round_type, num = re.findall(r'([TR])(\d+)', tds[0].string)[0] num = int(num) if round_type == 'R': elimination = 0 round_num = num elif round_type == 'T': elimination = num round_num += 1 else: raise InvalidDataException('Round was neither Swiss (R) nor Top 4/8 (T) in {round_type} for {id}'.format(round_type=round_type, id=d.id)) if 'Bye' in tds[1].renderContents().decode('utf-8') or 'No Deck Found' in tds[5].renderContents().decode('utf-8'): left_games, right_games, right_identifier = 2, 0, None else: left_games, right_games = tds[2].string.split(' - ') href = tds[5].find('a')['href'] right_identifier = re.findall(r'id=(\d+)', href)[0] matches.append({ 'round': round_num, 'elimination': elimination, 'left_games': left_games, 'left_identifier': d.identifier, 'right_games': right_games, 'right_identifier': right_identifier }) return matches
def run() -> None: sql = """ SELECT GROUP_CONCAT(d.person_id) AS people, GROUP_CONCAT(dm.games) AS games FROM `match` AS m INNER JOIN deck_match AS dm ON dm.match_id = m.id INNER JOIN deck AS d ON dm.deck_id = d.id GROUP BY m.id ORDER BY m.date, `round` """ matches = db().select(sql) for m in matches: match(m) current = person.load_people() people_by_id = {p.id: p for p in current} sql = 'UPDATE person SET elo = %s WHERE id = %s' for person_id, new_elo in sorted(PEOPLE.items(), key=lambda x: -x[1]): p = people_by_id[int(person_id)] if p.elo != new_elo: logger.warning('{id} currently has Elo of {current_elo} and we are setting it to {new_elo}'.format(id=p.id, current_elo=p.elo, new_elo=new_elo)) db().execute(sql, [new_elo, p.id])
def not_found(self, e: Exception) -> Union[Response, Tuple[str, int]]: if request.path.startswith('/error/HTTP_BAD_GATEWAY'): return return_json(generate_error('BADGATEWAY', 'Bad Gateway'), status=502) referrer = ', referrer: ' + request.referrer if request.referrer else '' logger.warning('404 Not Found ' + request.path + referrer) if request.path.startswith('/api/'): return return_json(generate_error('NOTFOUND', 'Endpoint not found'), status=404) view = NotFound(e) return view.page(), 404
def acceptable_file(path: str) -> bool: if not os.path.exists(path): return False if os.path.getsize( path ) >= 6860: # This is a few bytes smaller than a completely empty graph on prod. return True logger.warning('Chart at {path} is suspiciously small.'.format(path=path)) return False
def message_to_queue(message): try: message = json.loads(message) except json.decoder.JSONDecodeError as e: logger.warning("message from bus is not a valid json: " + str(e)) logger.debug(message) return message_queue.MessageQueue.put_message(message) logger.info("Message arrived from bus and inserted in queue") logger.debug(json.dumps(message)) ValidatorThreadHandler.init_validator()
def add_cards(deck_id: int, cards: CardsDescription) -> None: try: db().begin('add_cards') for name, n in cards.get('maindeck', {}).items(): insert_deck_card(deck_id, name, n, False) for name, n in cards.get('sideboard', {}).items(): insert_deck_card(deck_id, name, n, True) db().commit('add_cards') except InvalidDataException as e: logger.warning('Unable to add_cards to {deck_id} with {cards}', e) db().rollback('add_cards') raise
def tournament_matches(d: deck.Deck) -> List[bs4.element.Tag]: url = 'https://gatherling.com/deck.php?mode=view&id={identifier}'.format(identifier=d.identifier) s = fetch_tools.fetch(url, character_encoding='utf-8', retry=True) soup = BeautifulSoup(s, 'html.parser') anchor = soup.find(string='MATCHUPS') if anchor is None: logger.warning('Skipping {id} because it has no MATCHUPS.'.format(id=d.id)) return [] table = anchor.findParents('table')[0] rows = table.find_all('tr') rows.pop(0) # skip header rows.pop() # skip empty last row return find_matches(d, rows)
def squash(p1id: int, p2id: int, col1: str, col2: str) -> None: logger.warning('Squashing {p1id} and {p2id} on {col1} and {col2}'.format( p1id=p1id, p2id=p2id, col1=col1, col2=col2)) db().begin('squash') new_value = db().value( 'SELECT {col2} FROM person WHERE id = %s'.format(col2=col2), [p2id]) db().execute('UPDATE deck SET person_id = %s WHERE person_id = %s', [p1id, p2id]) db().execute('DELETE FROM person WHERE id = %s', [p2id]) db().execute( 'UPDATE person SET {col2} = %s WHERE id = %s'.format(col2=col2), [new_value, p1id]) db().commit('squash')
def validate_803(msg): """ find the incident in the local storage and continue validation according to the info from TOP803""" logger.info("Message TOP803 is processed.") logger.debug("TOP803 message: " + str(msg)) if msg['body']['reportID'] not in shared.processed_mgs: logger.warning( "Message TOP803 does not correspond to a stored report. ID: " + str(msg['body']['reportID'])) return Validator.__incident_spam(msg['body']['reportID'], Validator.__rule_1_after(msg))
def __init_subclass__(cls) -> None: if cls.key is not None: # in case anyone ever makes a poor sportsmanship achievement called DROP TABLE cls.key = re.sub('[^A-Za-z0-9_]+', '', cls.key) if cls.key in [c.key for c in cls.all_achievements]: logger.warning( f"Two achievements have the same normalised key {cls.key}. This won't do any permanent damage to the database but the results are almost certainly not as intended." ) # pylint can't determine that we have verified cls.key to be a str if cls.key[-7:] == '_detail': # pylint: disable=unsubscriptable-object logger.warning( f"Achievement key {cls.key} should not end with the string '_detail'." ) cls.all_achievements.append(cls())
def preaggregate(table: str, sql: str) -> None: lock_key = f'preaggregation:{table}' try: db().get_lock(lock_key, 60 * 5) except DatabaseException as e: logger.warning(f'Not preaggregating {table} because of {e}') db().execute(f'DROP TABLE IF EXISTS _new{table}') db().execute(sql) db().execute(f'DROP TABLE IF EXISTS _old{table}') db().execute(f'CREATE TABLE IF NOT EXISTS {table} (_ INT)' ) # Prevent error in RENAME TABLE below if bootstrapping. db().execute( f'RENAME TABLE {table} TO _old{table}, _new{table} TO {table}') db().execute(f'DROP TABLE IF EXISTS _old{table}') db().release_lock(lock_key)
def __incident_spam(reportID, spam): """ when the message is detected to be spam send update TOP801 to KBS Even if not detected to be spam inform KBS via TOP801. So that it is known that the validation step works. """ logger.info("Message IS " + ('' if spam else 'NOT ') + "SPAM! " + " Passed validation successfully. ID: " + str(reportID)) msg = Validator.generate_TOP801(reportID, spam) if msg is None: logger.warning("TOP801 was not generated correctly") return Validator.bus_prod.send(topic=msg['header']['topicName'], message=json.dumps(msg))
def cache_all_rules() -> None: table = '_applied_rules' logger.warning(f'Creating {table}') sql = """ CREATE TABLE IF NOT EXISTS _new{table} ( deck_id INT NOT NULL, rule_id INT NOT NULL, archetype_id INT NOT NULL, archetype_name TEXT, PRIMARY KEY (deck_id, rule_id), FOREIGN KEY (deck_id) REFERENCES deck (id) ON UPDATE CASCADE ON DELETE CASCADE, FOREIGN KEY (rule_id) REFERENCES rule (id) ON UPDATE CASCADE ON DELETE CASCADE, FOREIGN KEY (archetype_id) REFERENCES archetype (id) ON UPDATE CASCADE ON DELETE CASCADE ) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci AS {apply_rules_query} """.format(table=table, apply_rules_query=apply_rules_query( deck_query=classified_decks_query())) preaggregation.preaggregate(table, sql) logger.warning(f'Done creating {table}')
def setup() -> None: db().execute( 'CREATE TABLE IF NOT EXISTS db_version (version INTEGER UNIQUE NOT NULL)' ) version = db_version() patches = os.listdir('decksite/sql') patches.sort(key=lambda n: int(n.split('.')[0])) for fn in patches: path = os.path.join('decksite/sql', fn) n = int(fn.split('.')[0]) if version < n: logger.warning('Patching database to v{0}'.format(n)) fh = open(path, 'r') sql = fh.read() for stmt in sql.split(';'): if stmt.strip() != '': db().execute(stmt) fh.close() db().execute( 'INSERT INTO db_version (version) VALUES ({n})'.format(n=n))
def run() -> None: run_elo = False # pylint: disable=consider-using-enumerate for i in range(0, len(USERNAME_COLUMNS)): # pylint: disable=consider-using-enumerate for j in range(i + 1, len(USERNAME_COLUMNS)): sql = """ SELECT p1.id AS p1_id, p2.id AS p2_id, '{col1}' AS col1, '{col2}' AS col2 FROM person AS p1 LEFT JOIN person AS p2 ON p1.{col1} = p2.{col2} AND p1.id <> p2.id WHERE p1.id IS NOT NULL AND p2.id IS NOT NULL """.format(col1=USERNAME_COLUMNS[i], col2=USERNAME_COLUMNS[j]) pairs = [Container(row) for row in db().select(sql)] if len(pairs) > 0: run_elo = True for pair in pairs: person.squash(pair.p1_id, pair.p2_id, pair.col1, pair.col2) if run_elo: logger.warning('Running maintenance task to correct all Elo ratings.') elo.run()
def validate(message): """ read the message and determine if it is TOP 030 or TOP 803 and invoke the corresponding method """ print("Thread id in validator: " + str(threading.get_ident())) try: inc_topic = message['header']['topicName'] except (KeyError, TypeError, ValueError, IndexError) as e: logger.warning("could not read topicName from message. Do nothing") logger.debug(e) logger.debug(message) return logger.info("Message is now processed. TOPIC: " + str(inc_topic)) if inc_topic == 'TOP030_REPORT_REQUESTED': Validator.validate_TOP030(message) elif inc_topic == 'TOP803_WEATHER_REPORT': Validator.validate_803(message) else: logger.warning( "Message read in validator is not TOP030 nor TOP803")
def login(user: Optional[str] = None, password: Optional[str] = None) -> None: if user is None: user = configuration.get_str('to_username') if password is None: password = configuration.get_str('to_password') if user == '' or password == '': logger.warning('No TappedOut credentials provided') return url = 'https://tappedout.net/accounts/login/' session = SESSION response = session.get(url) match = re.search( r"<input type='hidden' name='csrfmiddlewaretoken' value='(\w+)' />", response.text) if match is None: # Already logged in? return csrf = match.group(1) data = { 'csrfmiddlewaretoken': csrf, 'next': '/', 'username': user, 'password': password, } headers = { 'referer': url, } logger.warning('Logging in to TappedOut as {0}'.format(user)) response = session.post(url, data=data, headers=headers) if response.status_code == 403: logger.warning('Failed to log in')
def scrape(limit: int = 1) -> None: page = 1 while page <= limit: time.sleep(0.1) url = 'https://www.mtggoldfish.com/deck/custom/penny_dreadful?page={n}#online'.format( n=page) soup = BeautifulSoup( fetch_tools.fetch(url, character_encoding='utf-8'), 'html.parser') raw_decks = soup.find_all('div', {'class': 'deck-tile'}) if len(raw_decks) == 0: logger.warning( 'No decks found in {url} so stopping.'.format(url=url)) break for raw_deck in raw_decks: d = Container({'source': 'MTG Goldfish'}) a = raw_deck.select_one('.title > span.deck-price-online > a') d.identifier = re.findall(r'/deck/(\d+)#online', a.get('href'))[0] d.url = 'https://www.mtggoldfish.com/deck/{identifier}#online'.format( identifier=d.identifier) d.name = a.contents[0].strip() d.mtggoldfish_username = without_by( raw_deck.select_one( 'div.deck-tile-author').contents[0].strip()) try: d.created_date = scrape_created_date(d) except InvalidDataException as e: msg = f'Got {e} trying to find a created_date in {d}, {raw_deck}' logger.error(msg) raise InvalidDataException(msg) from e time.sleep(5) d.cards = scrape_decklist(d) err = vivify_or_error(d) if err: logger.warning(err) continue deck.add_deck(d) page += 1
def tournament_deck(cells: ResultSet, competition_id: int, date: datetime.datetime, final: Dict[str, int]) -> Optional[deck.Deck]: d: deck.RawDeckDescription = {'source': 'Gatherling', 'competition_id': competition_id, 'created_date': dtutil.dt2ts(date)} player = cells[2] username = aliased(player.a.contents[0].string) d['mtgo_username'] = username d['finish'] = final.get(username) link = cells[4].a d['url'] = gatherling_url(link['href']) d['name'] = link.string if cells[5].find('a'): d['archetype'] = cells[5].a.string else: d['archetype'] = cells[5].string gatherling_id = urllib.parse.parse_qs(urllib.parse.urlparse(str(d['url'])).query)['id'][0] d['identifier'] = gatherling_id existing = deck.get_deck_id(d['source'], d['identifier']) if existing is not None: return deck.load_deck(existing) dlist = decklist.parse(fetch_tools.post(gatherling_url('deckdl.php'), {'id': gatherling_id})) d['cards'] = dlist if len(dlist['maindeck']) + len(dlist['sideboard']) == 0: logger.warning('Rejecting deck with id {id} because it has no cards.'.format(id=gatherling_id)) return None return deck.add_deck(d)
def ad_hoc() -> None: login() logger.warning('Logged in to TappedOut: {is_authorised}'.format(is_authorised=is_authorised())) raw_decks = fetch_decks() for raw_deck in raw_decks: try: if is_authorised(): details = fetch_deck_details(raw_deck) if details is None: logger.warning(f'Failed to get details for {raw_deck}') else: raw_deck.update(details) # type: ignore raw_deck = set_values(raw_deck) deck.add_deck(raw_deck) except InvalidDataException as e: logger.warning('Skipping {slug} because of {e}'.format(slug=raw_deck.get('slug', '-no slug-'), e=e))
def run() -> None: api_key = configuration.get('poeditor_api_key') if api_key is None: logger.warning('Missing poeditor.com API key') return client = POEditorAPI(api_token=api_key) languages = client.list_project_languages('162959') # pull down translations for locale in languages: logger.warning('Found translation for {code}: {percent}%'.format( code=locale['code'], percent=locale['percentage'])) if locale['percentage'] > 0: path = os.path.join('shared_web', 'translations', locale['code'].replace('-', '_'), 'LC_MESSAGES') if not os.path.exists(path): os.makedirs(path) pofile = os.path.join(path, 'messages.po') logger.warning('Saving to {0}'.format(pofile)) if os.path.exists(pofile): os.remove(pofile) client.export(project_id='162959', language_code=locale['code'], local_file=pofile, filters=['translated', 'not_fuzzy']) # Compile .po files into .mo files validate_translations.ad_hoc() compiler = compile_catalog() compiler.directory = os.path.join('shared_web', 'translations') compiler.domain = ['messages'] compiler.run() # hack for English - We need an empty folder so that Enlish shows up in the 'Known Languages' list. path = os.path.join('shared_web', 'translations', 'en', 'LC_MESSAGES') if not os.path.exists(path): os.makedirs(path)
def validate_TOP030(message): """ δες αν εχει ξαναέρθει msg με το incident id που ηρθε τώρα (είτε τοπικά είτε σε ένα csv). Αν έχει ξαναέρθει, τότε μην κάνεις τίποτα. Αν δεν έχει ξαναέρθει, τότε δες αν το msg έχει attachment και incident type. Αν δεν έχει, τότε μην κάνεις τίποτα Αν έχει (incident type) Βαλε το στην λίστα των incidents που έχουν επεξεργαστεί (έχουν περάσει ή περνανε τώρα validation) Δες αν το incident type == Heatwave (?) Αν οχι, τότε βάλε spam == false Αν ναι, τότε ρώτα το CRCL για να σου πει τις καιρικές συνθήκες στην περιοχή του incident """ report_id = None report_type = None inc_long = None inc_lat = None report_time = None report_spam = None # print(message['body']['incidents']) logger.debug("Processed TOP030 message: " + str(message)) header = message['header'] try: inc_long = float(message['body']['position']['long']) inc_lat = float(message['body']['position']['lat']) except (KeyError, TypeError, ValueError, IndexError) as e: logger.info( "Incoming message does not have location, validation will stop." ) logger.debug(str(type(e)) + str(e)) logger.debug(message) return try: incidents = message['body']['incidents'] except (KeyError, TypeError) as e: logger.info("No reports in TOP030, validation will stop.") logger.debug(str(type(e)) + str(e)) logger.debug(str(message)) return if len(incidents) == 0: logger.info("No incidents in TOP030.") for inc in incidents: try: report_id = inc['reportId'] report_type = inc['incidentType'] report_time = inc['timestamp'] except (KeyError, TypeError, ValueError, IndexError) as e: logger.warning( "Incident does not have report ID / incident Type / timestamp" ) logger.debug(str(type(e)) + str(e)) return if report_id in shared.processed_mgs: logger.debug("Report already processed. ReportId: " + str(report_id)) continue shared.processed_mgs[report_id] = {'inc': inc, 'header': header} # TODO: check if spam field is already there, and if is spam=True/False stop validation (not None) logger.info("Report is checked to determine if it is spam. ID:" + str(report_id)) if Validator.__rule_1_pre(report_type) is True: logger.info("Asking CRCL for report with ID:" + str(report_id)) t_802 = Validator.generate_TOP802(message, report_id, inc_long, inc_lat, report_time) Validator.bus_prod.send(topic=t_802['header']['topicName'], message=json.dumps(t_802)) else: Validator.__incident_spam(report_id, False)
def report(form: ReportForm) -> bool: try: db().get_lock('deck_id:{id}'.format(id=form.entry)) db().get_lock('deck_id:{id}'.format(id=form.opponent)) pdbot = form.get('api_token', None) == configuration.get('pdbot_api_token') entry_deck_id = int(form.entry) opponent_deck_id = int(form.opponent) ds = {d.id: d for d in deck.load_decks(f'd.id IN ({entry_deck_id}, {opponent_deck_id})')} entry_deck = ds.get(entry_deck_id) opponent_deck = ds.get(opponent_deck_id) if not entry_deck: form.errors['entry'] = 'This deck does not appear to exist. Please try again.' return False if not opponent_deck: form.errors['opponent'] = 'This deck does not appear to exist. Please try again.' return False if not pdbot: if entry_deck.retired: form.errors['entry'] = 'Your deck is retired, you cannot report results for it. If you need to do this, contact a mod on the Discord.' return False if opponent_deck.retired: form.errors['opponent'] = "Your opponent's deck is retired, you cannot report results against it. If you need to do this, please contact a mod on the Discord." return False for m in match.load_matches_by_deck(form): if int(form.opponent) == m.opponent_deck_id: form.errors['result'] = 'This match was reported as You {game_wins}–{game_losses} {opponent} {date}'.format(game_wins=m.game_wins, game_losses=m.game_losses, opponent=m.opponent, date=dtutil.display_date(m.date)) return False counts = deck.count_matches(form.entry, form.opponent) if counts[int(form.entry)] >= 5: form.errors['entry'] = 'You already have 5 matches reported' return False if counts[int(form.opponent)] >= 5: form.errors['opponent'] = 'Your opponent already has 5 matches reported' return False if pdbot: mtgo_match_id = form.get('matchID', None) else: mtgo_match_id = None match.insert_match(dtutil.now(), form.entry, form.entry_games, form.opponent, form.opponent_games, None, None, mtgo_match_id) if not pdbot: if configuration.get('league_webhook_id') and configuration.get('league_webhook_token'): fetch_tools.post_discord_webhook( configuration.get_str('league_webhook_id'), configuration.get_str('league_webhook_token'), '{entry} reported {f.entry_games}-{f.opponent_games} vs {opponent}'.format(f=form, entry=entry_deck.person, opponent=opponent_deck.person), ) else: logger.warning('Not posting manual report to discord because not configured.') return True except LockNotAcquiredException: form.errors['entry'] = 'Cannot report right now, somebody else is reporting a match for you or your opponent. Try again a bit later' return False finally: db().release_lock('deck_id:{id}'.format(id=form.opponent)) db().release_lock('deck_id:{id}'.format(id=form.entry))
def stats() -> Response: val: Dict[str, Any] = {} try: last_switcheroo = calc_last_switcheroo() if last_switcheroo: start = last_switcheroo.start_time_aware() if start: val['last_switcheroo'] = dtutil.dt2ts(start) except AttributeError as e: logger.warning(f'Unable to calculate last_switcheroo: {e}') val['formats'] = {} base_query = db.DB.session.query(match.Match.format_id, Format.name, func.count(match.Match.format_id)).join( match.Match.format).group_by( match.Match.format_id) for m in base_query.order_by(func.count( match.Match.format_id).desc()).all(): (format_id, format_name, num_matches) = m val['formats'][format_name] = {} val['formats'][format_name]['name'] = format_name val['formats'][format_name]['num_matches'] = num_matches last_week = dtutil.now() - dtutil.ts2dt(7 * 24 * 60 * 60) for m in base_query.filter(match.Match.start_time > last_week).order_by( func.count(match.Match.format_id).desc()).all(): (format_id, format_name, num_matches) = m val['formats'][format_name]['last_week'] = {} val['formats'][format_name]['last_week']['num_matches'] = num_matches stmt = text(""" SELECT b.* FROM user AS b INNER JOIN ( SELECT user.id FROM user LEFT JOIN match_players ON match_players.user_id = user.id LEFT JOIN `match` ON `match`.id = match_players.match_id WHERE `match`.format_id = :fid AND `match`.start_time IS NOT NULL AND `match`.start_time > DATE_SUB(NOW(), INTERVAL 7 DAY) GROUP BY user.id ) AS a ON a.id = b.id """) players = db.DB.session.query( db.User).from_statement(stmt).params(fid=format_id).all() val['formats'][format_name]['last_week']['recent_players'] = [ p.name for p in players ] last_last_week = dtutil.now() - dtutil.ts2dt(2 * 7 * 24 * 60 * 60) for m in base_query.filter(match.Match.start_time < last_week).filter( match.Match.start_time > last_last_week).order_by( func.count(match.Match.format_id).desc()).all(): (format_id, format_name, num_matches) = m val['formats'][format_name]['last_last_week'] = {} val['formats'][format_name]['last_last_week'][ 'num_matches'] = num_matches stmt = text(""" SELECT b.* FROM user AS b INNER JOIN ( SELECT user.id FROM user LEFT JOIN match_players ON match_players.user_id = user.id LEFT JOIN `match` ON `match`.id = match_players.match_id WHERE `match`.format_id = :fid AND `match`.start_time IS NOT NULL AND `match`.start_time > DATE_SUB(NOW(), INTERVAL 14 DAY) AND `match`.start_time < DATE_SUB(NOW(), INTERVAL 7 DAY) GROUP BY user.id ) AS a ON a.id = b.id """) players = db.DB.session.query( db.User).from_statement(stmt).params(fid=format_id).all() val['formats'][format_name]['last_last_week']['recent_players'] = [ p.name for p in players ] last_month = dtutil.now() - dtutil.ts2dt(30 * 24 * 60 * 60) for m in base_query.filter(match.Match.start_time > last_month).order_by( func.count(match.Match.format_id).desc()).all(): (format_id, format_name, num_matches) = m val['formats'][format_name]['last_month'] = {} val['formats'][format_name]['last_month']['num_matches'] = num_matches stmt = text(""" SELECT b.* FROM user AS b INNER JOIN ( SELECT user.id FROM user LEFT JOIN match_players ON match_players.user_id = user.id LEFT JOIN `match` ON `match`.id = match_players.match_id WHERE `match`.format_id = :fid AND `match`.start_time IS NOT NULL AND `match`.start_time > DATE_SUB(NOW(), INTERVAL 30 DAY) GROUP BY user.id ) AS a ON a.id = b.id """) players = db.DB.session.query( db.User).from_statement(stmt).params(fid=format_id).all() val['formats'][format_name]['last_month']['recent_players'] = [ p.name for p in players ] return return_json(val)