def team_overview( season_str: str = season_str, year: int = year, filename: str = "lists/team_overview.txt", ) -> None: """ Creates a formatted forum post for the team overview thread. """ week: int = config.getint("weekly info", "current-week") with session_scope() as session: teams = Season.get_season_from_database(season_str, year, session).teams with open(filename, "w", encoding="utf-8") as f: f.write(f"Team List - FAL {season_str.capitalize()} {year}\n\n\n") for team in sorted(teams, key=lambda t: t.name.lower()): # Query all the anime on the team for this week base_query = ( session.query(TeamWeeklyAnime) .filter(TeamWeeklyAnime.team_id == team.id) .filter(TeamWeeklyAnime.week == week) ) active_anime = base_query.filter(TeamWeeklyAnime.bench == 0).all() bench_anime = base_query.filter(TeamWeeklyAnime.bench == 1).all() print(f"writing {team.name} to overview") f.write(f"{team.name}\n---------------------------------\n") # List all active series on the team for anime in sorted(active_anime, key=lambda a: a.anime.name.lower()): f.write(f"{anime.anime.name}\n") f.write("\n") # List all bench series on the team for anime in sorted(bench_anime, key=lambda a: a.anime.name.lower()): f.write(f"{anime.anime.name}\n") f.write("\n\n") f.write("[/spoiler]")
def team_dist( season_str: str = season_str, year: int = year, filename: str = "lists/team_dist.txt", ) -> None: """ Creates a statistic of the team distribution (how many people and who chose the same team) This function can also be used during the game to obtain the team distribution of the current week. """ week: int = config.getint("weekly info", "current-week") filename = add_week_to_filename(filename, week) split_teams: Dict[Tuple[int, ...], List[Team]] = {} nonsplit_teams: Dict[Tuple[int, ...], List[Team]] = {} active_teams: Dict[Tuple[int, ...], List[Team]] = {} with session_scope() as session: teams = Season.get_season_from_database(season_str, year, session).teams for i, team in enumerate(teams, 1): # Query all the anime on the team for this week base_query = ( session.query(TeamWeeklyAnime.anime_id) .filter(TeamWeeklyAnime.team_id == team.id) .filter(TeamWeeklyAnime.week == week) ) series: List[int] = base_query.all() active: List[int] = base_query.filter(TeamWeeklyAnime.bench == 0).all() bench: List[int] = base_query.filter(TeamWeeklyAnime.bench == 1).all() # Split and sort team so the active ones are first s_team: Tuple[int, ...] = tuple(sorted(active) + sorted(bench)) n_team: Tuple[int, ...] = tuple(sorted(series)) a_team: Tuple[int, ...] = tuple(sorted(active)) # Add team name to inverse dictionary (key: sorted list of series) if s_team not in split_teams: split_teams[s_team] = [] split_teams[s_team].append(team) if n_team not in nonsplit_teams: nonsplit_teams[n_team] = [] nonsplit_teams[n_team].append(team) if a_team not in active_teams: active_teams[a_team] = [] active_teams[a_team].append(team) print(f"Processed team {i} - {team}") same_series_diff_team_dist, n_list_non = get_dist(nonsplit_teams) same_series_and_team_dist, n_list_split = get_dist(split_teams) teams_with_same_active_team, n_list_act = get_dist(active_teams) with open(filename, "w", encoding="utf-8") as f: write_teams_to_file( f, same_series_and_team_dist, n_list_split, SAME_SPLIT_TEXT ) write_teams_to_file( f, same_series_diff_team_dist, n_list_non, SAME_NONSPLIT_TEXT ) write_teams_to_file( f, teams_with_same_active_team, n_list_act, SAME_ACTIVE_TEXT ) f.write("[/list]")
def test_query_team_weekly_anime(team_id, anime_ids, name, bench_indices): with session_scope(True) as session: anime_list = (session.query(TeamWeeklyAnime).order_by( TeamWeeklyAnime.anime_id).filter( TeamWeeklyAnime.team_id == team_id).all()) for i, anime in enumerate(anime_list): if i in bench_indices: # anime.bench == b'\x01', so we use ord() to convert it to int assert ord(anime.bench) == 1 assert anime.team.name == name assert anime.anime_id == anime_ids[i]
def test_retrieve_teams(): with session_scope(True) as session: query = session.query(Season).filter(Season.id == 1) season = query.one() assert len(season.teams) == 3 sorted_teams = sorted(season.teams, key=lambda team: team.id) assert sorted_teams[0].name == "kei-clone" assert sorted_teams[1].name == "abhinavk99" assert sorted_teams[2].name == "Congress"
def test_query_ptw(date, ptw_counts): with session_scope(True) as session: query = ( session.query(PlanToWatch) .filter(PlanToWatch.date == date) .filter(PlanToWatch.anime_id.in_(ptw_counts.keys())) ) ptw_entries = query.all() assert len(ptw_entries) == 2 for ptw_entry in ptw_entries: assert ptw_entry.anime_id in ptw_counts.keys() assert ptw_entry.date == date assert ptw_entry.count == ptw_counts[ptw_entry.anime_id]
def test_query_anime( id, name, season_id, alias, restricted, eligible, season_of_year, year ): with session_scope(True) as session: query = session.query(Anime).filter(Anime.id == id) anime = query.one() assert anime.name == name assert anime.season_id == season_id assert anime.alias == alias assert anime.restricted == restricted assert anime.eligible == eligible assert anime.season.season_of_year == season_of_year assert anime.season.year == year
def headcount( season_str: str = season_str, year: int = year, filename: str = "lists/team_headcount.txt", ) -> None: """ Creates a formatted forum post for the headcount thread. """ with session_scope() as session: teams = Season.get_season_from_database(season_str, year, session).teams with open(filename, "w", encoding="utf-8") as f: f.write(HEADCOUNT_INTRO_TEXT.format(season_str.capitalize(), year)) # Output participant names alphabetically for team in sorted(teams, key=lambda t: t.name.lower()): f.write(f"[b]{team.name}[/b]\n") f.write(HEADCOUNT_CONC_TEXT.format(len(teams)))
def ptw_counter() -> None: # Ensure season is lowercase string and year is integer season_of_year = config["season info"]["season"].lower() year = int(config["season info"]["year"]) today = date.today() # Database workflow with session_scope() as session: anime_list = Season.get_season_from_database(season_of_year, year, session).anime print(f"Length of list of anime: {len(anime_list)}") # Store PTW of each anime in a list of tuples ptw = get_ptw_info(anime_list) pprint(ptw) output_ptw_info(season_of_year, year, ptw) print("Adding PTW entries to PTW table") for entry in ptw: ptw_count = int(entry.ptw_count.replace(",", "")) add_ptw_to_database(entry.id, today, ptw_count, session)
def load_aces(input_lines: Iterable[str]) -> None: """Takes in an iterable of "<teamname> <anime to ace>" inputs. Parses these inputs and sets the anime for the team to ace for the week if the anime has not been previously aced on that team and the score for that anime hasn't hit the cutoff. """ season_of_year = config.get("season info", "season").lower() year = config.getint("season info", "year") week = config.getint("weekly info", "current-week") with session_scope() as session: season = Season.get_season_from_database(season_of_year, year, session) for line in input_lines: teamname, animename = line.split(" ", 1) team = Team.get_team_from_database(teamname, season, session) anime = Anime.get_anime_from_database_by_name( animename.strip(), session) assert anime if not team_anime_aced_already(team, anime, session): this_week_team_anime = (session.query(TeamWeeklyAnime).filter( TeamWeeklyAnime.anime_id == anime.id, TeamWeeklyAnime.team_id == team.id, TeamWeeklyAnime.week == week, ).one()) if ace_already_loaded_this_week(team, week, session): print( f"{team.name} tried to ace {anime.name}, but already has an anime aced this week" ) elif this_week_team_anime.bench == 1: print( f"{team.name} tried to ace {anime.name}, but it was benched" ) else: this_week_team_anime.ace = 1 else: print( f"{team.name} tried to ace {anime.name}, but it has already been aced" )
def load_teams(registration_data: Sequence[str]) -> None: """Takes the contents of registration.txt (read into a list already) and marshalls them into the database""" assert (config.getint("weekly info", "current-week") <= 1), "Cannot add teams after week 1" # group the contents of the input registration file into separate teams, # loaded into TeamLines objects accumulated_team_input: List[str] = [] team_lines_list: List[TeamLines] = [] for line_num, line in enumerate(registration_data, 1): if line.strip() == "": assert ( accumulated_team_input ), f"Hit a line of whitespace at line {line_num} but no team was assembled" team_lines_list.append(slice_up_team_input(accumulated_team_input)) accumulated_team_input = [] else: accumulated_team_input.append(line) # one more time in case we don't have a trailing whitespace line if accumulated_team_input: team_lines_list.append(slice_up_team_input(accumulated_team_input)) # take the TeamLines objects and load them into the database with session_scope() as session: current_season = Season.get_season_from_database( config["season info"]["season"], config.getint("season info", "year"), session, ) for team_lines in team_lines_list: print(f"Adding {team_lines.teamname} to database") team = Team.get_team_from_database(team_lines.teamname, current_season, session) add_anime_to_team(team, team_lines.active, 0, session) add_anime_to_team(team, team_lines.bench, 1, session)
def collect_series() -> None: config = configparser.ConfigParser() config.read("config.ini") # Ensure season is lowercase string and year is integer season_of_year = config["season info"]["season"].lower() year = int(config["season info"]["year"]) series_dict = get_series(season_of_year, year) # text files workflow series = series_dict.items() print(len(series)) output_series(series, "series.txt") output_series_titles(series_dict.values(), "series_sorted.txt") # database workflow print("adding anime to database") with session_scope() as session: season = Season.get_season_from_database(season_of_year, year, session) for anime_id, anime_name in series_dict.items(): Anime.add_anime_to_database(anime_id, anime_name, season, session)
def team_stats( season_str: str = season_str, year: int = year, filename: str = "lists/team_stats.txt", ) -> None: """ Creates a statistic of the titles distribution for the team overview thread. This function can also be used during the game to obtain the distribution of the current week. """ week: int = config.getint("weekly info", "current-week") filename = add_week_to_filename(filename, week) with session_scope() as session: season: Season = Season.get_season_from_database(season_str, year, session) # Query anime name and number of times it is on a team this week base_query = ( session.query(Anime.name, func.count("*")) .join(TeamWeeklyAnime.anime) .order_by(func.count("*").desc(), Anime.name) .filter(TeamWeeklyAnime.week == week) .filter(Anime.season_id == season.id) .group_by(Anime.name) ) # Group the counts by the name anime_counts: List[Tuple[str, int]] = base_query.all() # Filter to only get active count and group by name active_counts: Dict[str, int] = dict( base_query.filter(TeamWeeklyAnime.bench == 0).all() ) print(f"Anime Counts:\n{anime_counts}") print(f"Active Counts:\n{active_counts}") with open(filename, "w", encoding="utf-8") as f: f.write(TEAM_STATS_TEXT) # Output counts for each anime for i, (anime, count) in enumerate(anime_counts, 1): active_count = active_counts[anime] if anime in active_counts else 0 f.write(f"{i} - {anime}: {count} ({active_count})\n")
def calculate_team_scores() -> None: """ For every team "this week" in this season, calculate its points based on the criteria of the week. """ season_of_year = config.get("season info", "season").lower() year = config.getint("season info", "year") week = config.getint("weekly info", "current-week") with session_scope() as session: teams = Season.get_season_from_database(season_of_year, year, session).teams assert isinstance(teams, list) for team in teams: this_week_points = TeamWeeklyPoints(team_id=team.id, week=week) session.add(this_week_points) add_team_anime_scores_and_ace_to_weekly_points( this_week_points, session) this_week_points.total_points = calculate_team_total_score( team, session) for count, team_id in get_team_scores_counts_this_week(week, session): if count == 1 and not already_got_high_bonus(team_id, session): top_unique_awarded = (session.query(TeamWeeklyPoints).filter( TeamWeeklyPoints.week == week, TeamWeeklyPoints.team_id == team_id, ).one()) top_unique_awarded.weekly_points += config.getint( "scoring info", "highest-unique") top_unique_awarded.total_points += config.getint( "scoring info", "highest-unique") top_unique_awarded.is_highest = 1 break
def team_ages() -> None: """ Potentially can help spot alt accounts """ jikan = jikanpy.Jikan() season_of_year = config.get("season info", "season") year = config.getint("season info", "year") with session_scope() as session: teams = Season.get_season_from_database(season_of_year, year, session).teams for team in cast(Iterable[Team], teams): if not team.mal_join_date: print(f"Getting join date for {team.name}") assert team.name try: team.mal_join_date = parse(jikan.user(team.name)["joined"]) session.commit() # time.sleep(config.get('jikanpy', 'request-interval')) except Exception as e: print( f"Ran into issues figuring out join date with {team.name}: {str(e)}" ) continue
def execute(self) -> None: with session_scope() as session: season = Season.get_or_create( season_of_year=self.season_of_year, year=self.year, session=session, ) self._execute(session, season)
def populate_anime_weekly_stats( simulcast_lines: Optional[Iterable[str]] = None, licenses_lines: Optional[Iterable[str]] = None, ) -> None: """ Populates the AnimeWeeklyStat table with a row for each anime using data from Jikan. """ season_of_year = config.get("season info", "season").lower() year = config.getint("season info", "year") week = config.getint("weekly info", "current-week") if is_week_to_calculate("scoring.simulcast", week) and simulcast_lines is None: raise ValueError(f"simulcast file is required for week {week}") if is_week_to_calculate("scoring.license", week) and licenses_lines is None: raise ValueError(f"licenses file is required for week {week}") with session_scope() as session: anime_simulcast_region_counts = get_anime_simulcast_region_counts( simulcast_lines, session) licensed_anime = get_licensed_anime(licenses_lines, session) season = Season.get_season_from_database(season_of_year, year, session) anime_list = cast(Iterable[Anime], season.anime) anime_ids_collected = [ row[0] for row in session.query(AnimeWeeklyStat.anime_id).join( Anime).filter(AnimeWeeklyStat.week == week).filter( Anime.season_id == season.id).all() ] if anime_ids_collected: action = input( "At least some anime stats have been collected for this week" " already. How should we proceed (overwrite/collect-missing/abort)?" ) if action == "collect-missing": anime_list = (anime for anime in anime_list if anime.id not in anime_ids_collected) elif action == "overwrite": pass else: return # for each anime, get the number of teams that have it on active anime_active_counts = dict( session.query(Anime.id, func.count("*")).join(TeamWeeklyAnime.anime).filter( TeamWeeklyAnime.week == week).filter( Anime.season_id == season.id).filter( TeamWeeklyAnime.bench == 0).group_by( Anime.id).all()) double_score_max_num_teams = math.floor( config.getint("scoring info", "percent-ownership-for-double-score") / 100 * len(cast(Sequence[Team], season.teams))) # casting until update in sqlalchemy-stubs for anime in cast(List[Anime], anime_list): print(f"Populating stats for {anime}") try: stat_data = get_anime_stats_from_jikan(anime) except jikanpy.exceptions.APIException as e: print( f"Jikan servers did not handle our request very well, skipping: {e}" ) continue if (is_week_to_calculate("scoring.simulcast", week) and anime.id not in anime_simulcast_region_counts): print( f"{anime.id}-{anime.name} doesn't have an entry in simulcast file" ) stat_data.week = week stat_data.anime_id = anime.id stat_data.total_points = calculate_anime_weekly_points( stat_data, anime_active_counts.get(anime.id, 0), double_score_max_num_teams, anime_simulcast_region_counts.get(anime.id, 0), anime.id in licensed_anime, ) anime_weekly_stat = AnimeWeeklyStat() for key, value in dataclasses.asdict(stat_data).items(): setattr(anime_weekly_stat, key, value) session.merge(anime_weekly_stat) time.sleep(config.getint("jikanpy", "request-interval"))