def get_or_fetch_stage_file(tourney_id_to_fetch: str, stage_id_to_fetch: str, force: bool = False) -> Optional[dict]: if not tourney_id_to_fetch or not stage_id_to_fetch: raise ValueError(f'get_or_fetch_stage_file: Expected ids. {tourney_id_to_fetch=} {stage_id_to_fetch=}') _stages = get_stage_ids_for_tourney(tourney_id_to_fetch, force=force) _stages = set([stage_id for stage_id in _stages if is_valid_battlefy_id(stage_id)]) assert stage_id_to_fetch in _stages _stage_path = join(STAGES_SAVE_DIR, tourney_id_to_fetch.__str__(), f'{stage_id_to_fetch}-battlefy.json') if force or not isfile(_stage_path): _stage_contents = fetch_address(STAGE_INFO_FETCH_ADDRESS_FORMAT.format(stage_id=stage_id_to_fetch)) if len(_stage_contents) == 0: print(f'ERROR get_or_fetch_stage_file: Nothing exists at {tourney_id_to_fetch=} / {stage_id_to_fetch=}') return None # Save the data _stage_dir = join(STAGES_SAVE_DIR, tourney_id_to_fetch.__str__()) if not exists(_stage_dir): makedirs(_stage_dir) save_as_json_to_file(_stage_path, _stage_contents) print(f'OK! (Saved read stage {_stage_path})') else: _stage_contents = load_json_from_file(_stage_path) if isinstance(_stage_contents, list): _stage_contents = _stage_contents[0] return _stage_contents
def enumerate_latest_snapshot_sources_file() -> Generator[Source, None, None]: file = get_latest_snapshot_sources_file() if file: print('Loading sources from ' + file) loaded = load_json_from_file(file) for d in loaded: yield Source.from_dict(d) else: print('Sources file not found.')
def load_latest_snapshot_sources_file() -> Optional[List[Source]]: file = get_latest_snapshot_sources_file() if file: print('Loading sources from ' + file) loaded = load_json_from_file(file) return [Source.from_dict(d) for d in loaded] else: print('Sources file not found.') return None
def load_latest_snapshot_teams_file() -> Optional[List[Team]]: file = get_latest_snapshot_teams_file() if file: print('Loading teams from ' + file) loaded = load_json_from_file(file) return [Team.from_dict(d) for d in loaded] else: print('Teams file not found.') return None
def load_latest_snapshot_players_file() -> Optional[List[Player]]: file = get_latest_snapshot_players_file() if file: print('Loading players from ' + file) loaded = load_json_from_file(file) return [Player.from_dict(d) for d in loaded] else: print('Players file not found.') return None
def get_or_fetch_tourney_teams_file(tourney_id_to_fetch: str, force: bool = False) -> Optional[List[dict]]: if not exists(TOURNEY_TEAMS_SAVE_DIR): makedirs(TOURNEY_TEAMS_SAVE_DIR) filename: str = f'{tourney_id_to_fetch}.json' matched_tourney_files = glob.glob(join(TOURNEY_TEAMS_SAVE_DIR, f'*{filename}')) full_path = matched_tourney_files[0] if len(matched_tourney_files) else join(TOURNEY_TEAMS_SAVE_DIR, filename) if force or not isfile(full_path): teams_contents = fetch_address(TEAMS_FETCH_ADDRESS_FORMAT % tourney_id_to_fetch) if len(teams_contents) == 0: print(f'ERROR get_or_fetch_tourney_teams_file: Nothing exists at {tourney_id_to_fetch=}.') return None # To name this file, we need the tourney file that goes with it. info_contents = get_or_fetch_tourney_info_file(tourney_id_to_fetch, force=force) if '_id' in info_contents and 'slug' in info_contents and 'startTime' in info_contents: start_time: datetime = isoparse(info_contents['startTime']) filename = f'{start_time.strftime("%Y-%m-%d")}-{info_contents["slug"]}-' \ f'{tourney_id_to_fetch}.json' full_path = join(TOURNEY_TEAMS_SAVE_DIR, filename) else: print(f"Couldn't name the downloaded tourney teams file as the tourney info is incomplete: " f"{'_id' in info_contents=} " f"{'slug' in info_contents=} " f"{'startTime' in info_contents=}") print(f'OK! (Saved read tourney to {full_path})') # else save_as_json_to_file(full_path, teams_contents) print(f'OK! (Saved read tourney teams file to {full_path})') if force: # We just downloaded so no need to force get this again for stage_id in get_stage_ids_for_tourney(tourney_id_to_fetch, force=False): get_or_fetch_stage_file(tourney_id_to_fetch, stage_id, force=True) else: teams_contents = load_json_from_file(full_path) return teams_contents
def get_or_fetch_tourney_info_file(tourney_id_to_fetch: str, force: bool = False) -> Optional[dict]: if not exists(TOURNEY_INFO_SAVE_DIR): makedirs(TOURNEY_INFO_SAVE_DIR) filename: str = f'{tourney_id_to_fetch}.json' matched_tourney_files = glob.glob(join(TOURNEY_INFO_SAVE_DIR, f'*{filename}')) full_path = matched_tourney_files[0] if len(matched_tourney_files) else join(TOURNEY_INFO_SAVE_DIR, filename) if force or not isfile(full_path): tourney_contents = fetch_address(TOURNAMENT_INFO_FETCH_ADDRESS_FORMAT.format(tourney_id=tourney_id_to_fetch)) if len(tourney_contents) == 0: print(f'ERROR get_or_fetch_tournament_file: Nothing exists at {tourney_id_to_fetch=}.') return None if isinstance(tourney_contents, list): tourney_contents = tourney_contents[0] if '_id' in tourney_contents and 'slug' in tourney_contents and 'startTime' in tourney_contents: start_time: datetime = isoparse(tourney_contents['startTime']) filename = f'{start_time.strftime("%Y-%m-%d")}-{tourney_contents["slug"]}-' \ f'{tourney_id_to_fetch}.json' full_path = join(TOURNEY_INFO_SAVE_DIR, filename) else: print(f"Couldn't name the downloaded tourney info file: " f"{'_id' in tourney_contents=} " f"{'slug' in tourney_contents=} " f"{'startTime' in tourney_contents=}") print(f'OK! (Saved read tourney info file to {full_path})') save_as_json_to_file(full_path, tourney_contents) else: tourney_contents = load_json_from_file(full_path) if isinstance(tourney_contents, list): tourney_contents = tourney_contents[0] return tourney_contents
def get_or_fetch_challonge_tourney_info_file_combined( combined_tourney_name_to_fetch: str) -> Optional[dict]: """ Get or fetch the specified tournament's information file given by the fully-qualified tournament name. :param combined_tourney_name_to_fetch: The organisation name and its tournament id, e.g. "inkleagues-SXD8" :return: The tournament information json """ if not exists(TOURNEY_INFO_SAVE_DIR): makedirs(TOURNEY_INFO_SAVE_DIR) filename: str = f'{combined_tourney_name_to_fetch}.json' matched_tourney_files = glob.glob( join(TOURNEY_INFO_SAVE_DIR, f'*{filename}')) full_path = matched_tourney_files[0] if len( matched_tourney_files) else join(TOURNEY_INFO_SAVE_DIR, filename) if not isfile(full_path): tourney_contents = challonge.tournaments.show( combined_tourney_name_to_fetch) print(tourney_contents) if isinstance(tourney_contents, str): tourney_contents = json.loads(tourney_contents) if len(tourney_contents) == 0: print( f'ERROR get_or_fetch_challonge_tourney_info_file_combined: Nothing exists at {combined_tourney_name_to_fetch=}.' ) return None if isinstance(tourney_contents, list): tourney_contents = tourney_contents[0] # Handle tournament contents... if 'id' in tourney_contents and 'name' in tourney_contents and 'started_at' in tourney_contents: tourney_name = tourney_contents["name"].replace(" ", "-") start_time: datetime = tourney_contents['started_at'] filename = f'{start_time.strftime("%Y-%m-%d")}-{tourney_name}-{combined_tourney_name_to_fetch}.json' full_path = join(TOURNEY_INFO_SAVE_DIR, filename) save_as_json_to_file(full_path, tourney_contents) print(f'OK! (Saved read tourney info file to {full_path})') # Retrieve the participants for a given tournament. team_contents = challonge.participants.index( tourney_contents["id"]) if isinstance(team_contents, str): team_contents = json.loads(team_contents) full_path = join(TOURNEY_TEAMS_SAVE_DIR, filename) save_as_json_to_file(full_path, team_contents) print(f'OK! (Saved read teams file to {full_path})') parent_dir = join(STAGES_SAVE_DIR, combined_tourney_name_to_fetch) makedirs(parent_dir) full_path = join(parent_dir, filename) save_as_json_to_file( full_path, challonge.matches.index(tourney_contents["id"])) print(f'OK! (Saved read stages results file to {full_path})') else: print( f"Couldn't name the downloaded tourney info file. Not using: " f"{'id' in tourney_contents=} " f"{'name' in tourney_contents=} " f"{'started_at' in tourney_contents=}") else: tourney_contents = load_json_from_file(full_path) if isinstance(tourney_contents, list): tourney_contents = tourney_contents[0] return tourney_contents
def full_rebuild(skip_pauses: bool = False): # Plan of attack: # THIS IS A FULL REBUILD and we shouldn't have to do this every time. # 1. Get all the tourney ids # 2. Update the sources.yaml list # 3. Rebuild the database -- we could implement a partial update using what we have already # 4. Add in placements -- again, if we kep what's already there, we'd only be adding to new tourneys # 5. Calculate ELO -- again, calculating only the new bits # 1. Tourney ids do_fetch_tourney_ids = ask("Fetch new tourney ids? [Y/N]") if do_fetch_tourney_ids: full_tourney_ids = _phase_1() print("Phase 1 done.") else: full_tourney_ids = load_json_from_file("Phase 1 Ids.json") # 2. Updates sources list # Current sources: sources_contents = _load_sources_file() # Sources now that we've pulled in the tourney files: updated_tourney_ids = set() updated_tourney_paths = set() for tourney_id in full_tourney_ids: # Search the sources yaml filename = tourney_id + ".json" if any([line.endswith(filename) for line in sources_contents]): # Not new pass else: matched_tourney_teams_files = glob.glob( join(TOURNEY_TEAMS_SAVE_DIR, f'*{filename}')) if len(matched_tourney_teams_files) == 1: relative_path = relpath(matched_tourney_teams_files[0], start=SLAPP_DATA_FOLDER) if not relative_path.startswith('.'): relative_path = '.\\' + relative_path updated_tourney_paths.add(relative_path) updated_tourney_ids.add(tourney_id) else: print( f"ERROR: Found an updated tourney file but a unique file wasn't downloaded for it: " f"{tourney_id=}, {len(matched_tourney_teams_files)=}") print("Re-attempting download...") if get_or_fetch_tourney_teams_file(tourney_id): print("Success!") matched_tourney_teams_files = glob.glob( join(TOURNEY_TEAMS_SAVE_DIR, f'*{filename}')) if len(matched_tourney_teams_files) == 1: relative_path = relpath(matched_tourney_teams_files[0], start=SLAPP_DATA_FOLDER) updated_tourney_paths.add(relative_path) updated_tourney_ids.add(tourney_id) else: print( f"ERROR: Reattempt failed. Please debug." f"{tourney_id=}, {len(matched_tourney_teams_files)=}" ) else: print( f"ERROR: Reattempt failed. Skipping file." f"{tourney_id=}, {len(matched_tourney_teams_files)=}") # Now update the yaml # Take care of those pesky exceptions to the rule # Sendou goes first (but only if not dated) if 'Sendou.json' in sources_contents[0]: sendou_str = sources_contents[0] sources_contents.remove(sources_contents[0]) else: sendou_str = None # statink folder is special if './statink' in sources_contents: statink_present = True sources_contents.remove('./statink') else: statink_present = False # Twitter goes last (but only if not dated) if 'Twitter.json' in sources_contents[-1]: twitter_str = sources_contents[-1] sources_contents.remove(sources_contents[-1]) else: twitter_str = None # Add in the new updates for updated_path in updated_tourney_paths: sources_contents.append(updated_path) # Replace backslashes with forwards sources_contents = [line.replace('\\', '/') for line in sources_contents] # Sort by order. sources_contents.sort() # Add the exceptions back in to the correct places if sendou_str: sources_contents.insert(0, sendou_str) if statink_present: sources_contents.insert(1, './statink') if twitter_str: sources_contents.append(twitter_str) # Remove blank lines sources_contents = list( filter(lambda x: not is_none_or_whitespace(x), sources_contents)) new_sources_file_path = join(SLAPP_DATA_FOLDER, 'sources_new.yaml') save_text_to_file(path=new_sources_file_path, content='\n'.join(sources_contents)) print(f"Phase 2 done. {updated_tourney_ids=}") # 3. Rebuild # if yes, call --rebuild [path] do_rebuild = True if not skip_pauses: do_rebuild = ask("Is a rebuild needed?") if do_rebuild: _phase_3(new_sources_file_path) print("Phase 3 done.") # 4. Add in the placements if not skip_pauses: pause(True) update_sources_with_placements() print("Phase 4 done.") # 5. Calculate ELO if not skip_pauses: pause(True) update_sources_with_skills(clear_current_skills=True) print("Phase 5 done, complete!")
file = os.path.basename(file_path) parent_dir = os.path.abspath(os.path.join(file_path, os.pardir)) if parent_dir.rstrip(os.path.sep).endswith(os.path.sep + 'old'): continue if parent_dir.rstrip(os.path.sep).endswith(os.path.sep + 'statink'): continue if file.startswith("Snapshot-"): destination_path = join(SLAPP_DATA_FOLDER, file) if file_path != destination_path: os.rename(file_path, destination_path) continue json_contents = load_json_from_file(file_path) if json_contents and isinstance(json_contents, list): json_contents = json_contents[0] if '_id' in json_contents and \ 'userID' in json_contents and \ 'customFields' in json_contents: # This is a tourney teams file. if not exists(TOURNEY_TEAMS_SAVE_DIR): os.makedirs(TOURNEY_TEAMS_SAVE_DIR) destination_path = join(TOURNEY_TEAMS_SAVE_DIR, file) if conditional_move(file_path, destination_path): continue elif '_id' in json_contents and \
password=os.getenv("DATABASE_PASSWORD")) # Create a cursor cursor = connection.cursor() # Cool, we're connected, let's transfer. # Create tables print(f'Creating Tables') create_tables(cursor) players_snapshot_path: str = input( 'Players snapshot file? (Enter to skip)').replace('"', '') if len(players_snapshot_path) > 0: assert isfile(players_snapshot_path) print('✔ Is a file.') players_snapshot = load_json_from_file(players_snapshot_path) print(f'Processing {len(players_snapshot)} players.') for i, _ in enumerate(players_snapshot): this_id = players_snapshot[i]['Id'] this_names = players_snapshot[i]['Names'] this_teams = players_snapshot[i]['Teams'] this_sources = players_snapshot[i]['Sources'] this_discord_name = players_snapshot[i]['DiscordName'] this_friend_code = players_snapshot[i]['FriendCode'] execute_str = "INSERT INTO players (id, names, teams, sources, discord_name, friend_code) " \ "VALUES (%s, %s, %s, %s, %s, %s);" cursor.execute(execute_str, ( this_id, this_names,