def _load_matches(self, w, filename, start_index=0, num=10000): print("Loading matches from: '{}'".format(filename)) i = 0 abs_path = os.path.join(settings.BASE_DIR, 'item_analyzer', 'dataset', filename) cache_dir = os.path.join(settings.BASE_DIR, 'item_analyzer', 'dataset', 'cache') with open(abs_path, 'r') as mfile: matches = json.load(mfile) if(start_index): matches[start_index:] for match_id in matches: print('Loading match: {}'.format(match_id)) while(True): if(w.can_make_request()): if(i == num): return try: match_dict = w.get_match(match_id, include_timeline=True) match = Match.from_dict(match_dict) with open(os.path.join(cache_dir, "{}.pkl".format(match_id)), 'wb') as f: pickle.dump(match_dict, f) i+=1 break except LoLException as le: print('League says: {}'.format(le.error)) time.sleep(2) else: print('Too many queries!') pass time.sleep(1)
def _load_match(self, filename): print("Loading match from: '{}'".format(filename)) abs_path = os.path.join(settings.BASE_DIR, "item_analyzer", "dataset", filename) with open(abs_path, "r") as mfile: match_data = json.load(mfile) match = Match.from_dict(match_data) match.save()
def _load_match(self, filename): print("Loading match from: '{}'".format(filename)) abs_path = os.path.join(settings.BASE_DIR, 'item_analyzer', 'dataset', filename) with open(abs_path, 'r') as mfile: match_data = json.load(mfile) match = Match.from_dict(match_data) match.save()