def save_all_metadata(): last_id = 1 is_done = False page = 2000 dict_size = 0 big_dict = {} while not is_done: print 'page', page result, is_done = PyLogsTF.get_match_metadata(last_id, page=page) for match_id, meta_data in result.iteritems(): big_dict[match_id] = meta_data dict_size += 1 if dict_size >= 1009: save_to_disk.save_match('initial_import%s' % page, json.dumps(big_dict), folder='meta_data') big_dict = {} dict_size = 0 page += 1 if big_dict: save_to_disk.save_match('initial_import%s' % page, json.dumps(big_dict), folder='meta_data')
def save_all_matches(last_file_path): latest_match = PyLogsTF.latest_match() parallel_save(0, latest_match) set_last_match_saved(last_file_path, latest_match)
def run(last_file_path): last_match_saved = get_last_match_saved(last_file_path) latest_match = PyLogsTF.latest_match() save_new_matches(last_match_saved, latest_match, save_to_disk.save_match) set_last_match_saved(last_file_path, latest_match)
def save_one_match(match_id): try: save_to_disk.save_match(match_id, PyLogsTF.get(match_id)) except: pass