def batch_query_match_history(session, pids): pids_list = list(pids) total = len(pids_list) batch = 50 player_matches = {} any_error = False log("Begin querying %s players" % total) for idx in range(0, total, batch): next_idx = idx + batch bound = next_idx if next_idx < total else total sub_pids = pids_list[idx:bound] log('Attempting to query players %s-%s of %s' % (idx, bound, total)) is_error, matches, player_matches_dict = _bulk_query_match_history( session, sub_pids ) any_error = any_error or is_error save_match_list(matches) player_matches.update(player_matches_dict) set_player_updated_at(player_matches.keys()) with DATABASE.atomic(): cache = MatchCache() for pid, matches in player_matches.items(): cache.process_matches(pid, matches) cache.save() return any_error
def bulk_insert(model, data_source): total = len(data_source) batch = 1000 log("Trying to insert %s rows" % total) with DATABASE.atomic(): for idx in range(0, total, batch): (model.insert_many(data_source[idx:idx + batch]).execute()) log("Inserted %s" % (idx + batch))
def bulk_insert(model, data_source): total = len(data_source) batch = 1000 log("Trying to insert %s rows" % total) with DATABASE.atomic(): for idx in range(0, total, batch): ( model .insert_many(data_source[idx:idx + batch]) .execute() ) log("Inserted %s" % (idx + batch))
def batch_query_match_history(session, pids): pids_list = list(pids) total = len(pids_list) batch = 50 player_matches = {} any_error = False log("Begin querying %s players" % total) for idx in range(0, total, batch): next_idx = idx + batch bound = next_idx if next_idx < total else total sub_pids = pids_list[idx:bound] log('Attempting to query players %s-%s of %s' % (idx, bound, total)) is_error, matches, player_matches_dict = _bulk_query_match_history( session, sub_pids) any_error = any_error or is_error save_match_list(matches) player_matches.update(player_matches_dict) set_player_updated_at(player_matches.keys()) with DATABASE.atomic(): cache = MatchCache() for pid, matches in player_matches.items(): cache.process_matches(pid, matches) cache.save() return any_error
def ensure_test_tables(): ensure_testing() if not Player.table_exists(): log('creating test tables') DATABASE.create_tables([Player, Match, Rank])
def create_demo_db(): if not ENVARS.is_demo_web_server(): raise Exception('wrong database! call this again with FGC_DEMO_DB=1') DATABASE.create_tables([Player, Match, Rank]) load_csv(Rank, 'temp/rank.csv') load_csv(Player, 'temp/player.csv')
def _close_db_conn(): if not DATABASE.is_closed(): DATABASE.close()
def _open_db_conn(): DATABASE.connect()
batch_query_match_history, fix_player_names, ) from py.src.store import ( load_subscribed_player_ids, ) PLAYER_BATCH_SIZE = 300 if __name__ == "__main__": set_log_file("task_matches") log("task_matches begin") with create_session() as session: try: test_cookie_status(session) DATABASE.connect() pids = load_subscribed_player_ids(batch_size=PLAYER_BATCH_SIZE) any_error = batch_query_match_history(session, pids) fix_player_names(session) except Exception as e: log_exception(e) send_error_message("FATAL ERROR when pulling match data") else: if any_error: # suppress non-fatal errors for now # send_error_message('non-fatal errors when pulling matches') pass if not DATABASE.is_closed(): DATABASE.close() log("task_matches complete")