def batch_query_match_history(session, pids): pids_list = list(pids) total = len(pids_list) batch = 50 player_matches = {} any_error = False log("Begin querying %s players" % total) for idx in range(0, total, batch): next_idx = idx + batch bound = next_idx if next_idx < total else total sub_pids = pids_list[idx:bound] log('Attempting to query players %s-%s of %s' % (idx, bound, total)) is_error, matches, player_matches_dict = _bulk_query_match_history( session, sub_pids ) any_error = any_error or is_error save_match_list(matches) player_matches.update(player_matches_dict) set_player_updated_at(player_matches.keys()) with DATABASE.atomic(): cache = MatchCache() for pid, matches in player_matches.items(): cache.process_matches(pid, matches) cache.save() return any_error
def backload_all_players(): log('backloading match_last_updated players') log('building cache') cache = MatchCache() batch_size = 500 backfill_player_ids = load_player_ids_to_backfill( batch_size=batch_size, ) log('backloading %s players...' % batch_size) for player_id in backfill_player_ids: backload_player(cache, player_id) log("saving") cache.save() log("done")
def backload_volatile_players(): log('backloading volatile players') log('building cache') cache = MatchCache() batch_size = 600 backfill_player_ids = load_volatile_player_ids( batch_size=batch_size, ) log('backloading %s players...' % batch_size) for player_id in backfill_player_ids: backload_player(cache, player_id) log("saving") cache.save() log("done")
def _create_cache(self): with mock.patch( 'py.src.match.model.cache.download_global_cache') as m_dw: with mock.patch( 'py.src.match.model.cache.get_player_ticks') as m_pt: with mock.patch( 'py.src.match.model.cache.get_global_ranked_match_cache' ) as m_gc: m_pt.return_value = {} m_gc.return_value = None cache = MatchCache() self.assertEqual(1, m_dw.call_count) return cache
def batch_query_match_history(session, pids): pids_list = list(pids) total = len(pids_list) batch = 50 player_matches = {} any_error = False log("Begin querying %s players" % total) for idx in range(0, total, batch): next_idx = idx + batch bound = next_idx if next_idx < total else total sub_pids = pids_list[idx:bound] log('Attempting to query players %s-%s of %s' % (idx, bound, total)) is_error, matches, player_matches_dict = _bulk_query_match_history( session, sub_pids) any_error = any_error or is_error save_match_list(matches) player_matches.update(player_matches_dict) set_player_updated_at(player_matches.keys()) with DATABASE.atomic(): cache = MatchCache() for pid, matches in player_matches.items(): cache.process_matches(pid, matches) cache.save() return any_error