def test_when_add_member_from_wrong_type_return_exception(self): member = Player("Pesho") member_type_string = "Acer" team = Team("Wolf") with self.assertRaises(TypeError): team.add_team_members([member, member_type_string])
def test_team_jsonable(self): player1 = Player("Marto") player2 = Player("Pesho") team1 = Team("Wolf") team1.add_team_members([player1, player2]) self.assertEqual( team1.to_json(), json.dumps( { "Wolf": [{ "Marto": { "points": 0, "announcements": [], "cards": [] } }, { "Pesho": { "points": 0, "announcements": [], "cards": [] } }] }, cls=OurEncoder))
def test_parse_match_with_tie(): match_url = "https://www.over.gg/11948/ldn-vs-shd-overwatch-league-2019-season-play-in-sf" fields = overggparser.parse_match(match_url) match = Match(id=7269, start=datetime.strptime("2019-08-31 15:00:00", "%Y-%m-%d %H:%M:%S"), url=match_url, home=Team("London Spitfire", "US"), away=Team("Shanghai Dragons", "US")) overggparser.merge_fields_into_match(fields, match) assert len(match.maps) == 8 assert match.maps[0].name == "Busan" assert match.maps[0].winner == Winner.HOME assert match.maps[1].name == "Numbani" assert match.maps[1].winner == Winner.HOME assert match.maps[2].name == "Hanamura" assert match.maps[2].winner == Winner.AWAY assert match.maps[3].name == "Watchpoint: Gibraltar" assert match.maps[3].winner == Winner.HOME assert match.maps[4].name == "Lijiang Tower" assert match.maps[4].winner == Winner.AWAY assert match.maps[5].name == "King's Row" assert match.maps[5].winner == Winner.TIED assert match.maps[6].name == "Dorado" assert match.maps[6].winner == Winner.AWAY assert match.maps[7].name == "Ilios" assert match.maps[7].winner == Winner.HOME
def test_when_add_more_members_from_2_exception(self): member1 = Player("Pesho") member2 = Player("Gosho") member3 = Player("Thosho") team = Team("Wolf") with self.assertRaises(ValueError): team.add_team_members([member1, member2, member3])
def test_when_add_two_members_to_team_return_correctly(self): member1 = Player("Pesho") member2 = Player("Gosho") team = Team("Wolf") team.add_team_members([member1, member2]) self.assertEqual(len(team.players), 2) self.assertTrue(team.players.__contains__(member1)) self.assertTrue(team.players.__contains__(member2))
def test_parse_match(): match_url = "https://www.over.gg/12004/nyxl-vs-atl-overwatch-league-2019-season-p-offs-sf" fields = overggparser.parse_match(match_url) match = Match(id=7322, start=datetime.strptime("2019-09-08 18:00:00", "%Y-%m-%d %H:%M:%S"), url=match_url, home=Team("New York Excelsior", "US"), away=Team("Atlanta Reign", "US")) overggparser.merge_fields_into_match(fields, match) assert match.home.name == "New York Excelsior" assert match.away.name == "Atlanta Reign" assert match.state == GameState.COMPLETE assert match.home_score == 4 assert match.away_score == 2 assert match.winner == "New York Excelsior" assert match.competition == "Overwatch League 2019 Season" assert match.competition_url == "https://www.over.gg/event/266/overwatch-league-2019-season" assert match.stage == "Playoffs: Semifinals" expected_streams = [ Stream("https://www.twitch.tv/overwatchleague", "ENG"), Stream("https://www.twitch.tv/overwatchleague_kr", "KR"), Stream("https://www.twitch.tv/overwatchleague_fr", "FR") ] for match_stream in match.streams: found = False for expected_stream in expected_streams: if match_stream == expected_stream and match_stream.language == expected_stream.language: found = True break assert found assert len(match.maps) == 6 assert match.maps[0].name == "Busan" assert match.maps[0].winner == Winner.HOME assert match.maps[1].name == "King's Row" assert match.maps[1].winner == Winner.HOME assert match.maps[2].name == "Hanamura" assert match.maps[2].winner == Winner.AWAY assert match.maps[3].name == "Rialto" assert match.maps[3].winner == Winner.HOME assert match.maps[4].name == "Lijiang Tower" assert match.maps[4].winner == Winner.AWAY assert match.maps[5].name == "Numbani" assert match.maps[5].winner == Winner.HOME
def load_tourny_teams(year): teams = [] with open('data/Big_Dance_CSV.csv', 'r') as teams1_file: readCSV = csv.reader(teams1_file, delimiter=',') #Skip the header row header = next(readCSV) for row in readCSV: #Record region_num, region, seed, and team name for each team r_year = row[0] round = row[1] if (int(r_year) == year and int(round) == 1): region_number = row[2] region_name = row[3] seed = row[4] score = row[5] team = row[6] # https://api.sportradar.us/ncaamb/{access_level}/{version}/{language_code}/league/hierarchy.{format}?api_key={your_api_key} team = Team(r_year, round, region_number, region_name, seed, score, team) teams.append(team) #Read through the file once, getting the other, opponent team with open('data/Big_Dance_CSV.csv', 'r') as teams2_file: readCSV = csv.reader(teams2_file, delimiter=',') #Skip the header row header = next(readCSV) for row in readCSV: #Record region_num, region, seed, and team name for each team r_year = row[0] round = row[1] if (int(r_year) == year and int(round) == 1): region_number = row[2] region_name = row[3] seed = row[7] score = row[8] team = row[9] team = Team(r_year, round, region_number, region_name, seed, score, team) teams.append(team) #Confirm there are 64 teams loaded into the tournamnet assert len(teams) == 64, "Wrong number of teams. Check team name spelling" return teams
def load_teams_by_round(tourny_round): teams = [] for year in range(2019, now.year + 1): with open('data/Big_Dance_CSV.csv', 'r') as teams1_file: readCSV = csv.reader(teams1_file, delimiter=',') #Skip the header row header = next(readCSV) for row in readCSV: #Record region_num, region, seed, and team name for each team r_year = row[0] round = row[1] if (int(r_year) == year and int(round) == tourny_round): region_number = row[2] region_name = row[3] seed = row[4] score = row[5] team = row[6] team = Team(r_year, round, region_number, region_name, seed, score, team) teams.append(team) #Read through the file once, getting the other, opponent team with open('data/Big_Dance_CSV.csv', 'r') as teams2_file: readCSV = csv.reader(teams2_file, delimiter=',') #Skip the header row header = next(readCSV) for row in readCSV: #Record region_num, region, seed, and team name for each team r_year = row[0] round = row[1] if (int(r_year) == year and int(round) == tourny_round): region_number = row[2] region_name = row[3] seed = row[7] score = row[8] team = row[9] team = Team(r_year, round, region_number, region_name, seed, score, team) teams.append(team) #Confirm there are the right number of teams in each round assert len(teams) == int(64 / 2**(tourny_round-1)), "Wrong number of teams. Check team name spelling" return teams
def test_game_jsonable(self): player1 = Player("Marto") player2 = Player("Pesho") player3 = Player("Nasko") player4 = Player("Petko") team1 = Team("Wolf") team2 = Team("Lion") team1.add_team_members([player1, player2]) team2.add_team_members([player3, player4]) round1 = Round("All trumps", 1) round1.add_round_members([team1, team2]) game = Game([team1, team2], 1) game.rounds.append(round1) self.assertEqual( game.to_json(), json.dumps( { "game 1": [{ "round 1": [{ "Wolf": [{ "Marto": { "points": 0, "announcements": [], "cards": [] } }, { "Pesho": { "points": 0, "announcements": [], "cards": [] } }] }, { "Lion": [{ "Nasko": { "points": 0, "announcements": [], "cards": [] } }, { "Petko": { "points": 0, "announcements": [], "cards": [] } }] }] }] }, cls=OurEncoder))
def get_upcoming_events(events): data = retry_request(static.OVER_GG_API, 5, 60) if data is None: return False for match_table in data['matches']: match = Match(id=match_table['id'], start=datetime.utcfromtimestamp( int(match_table['timestamp'])), url=match_table['match_link'], home=Team(match_table['teams'][0]['name'], match_table['teams'][0]['country']), away=Team(match_table['teams'][1]['name'], match_table['teams'][1]['country'])) fits_event = False for event in events: if event.match_fits(match.start, match_table['event_name'], match.id): event.add_match(match) fits_event = True break if not fits_event: rank, competition = mappings.get_competition( match_table['event_name']) if competition is None: if match_table['event_name'] not in static.missing_competition: static.missing_competition.add(match_table['event_name']) log.warning( f"Upcoming event not mapped: {match_table['event_name']} : {str(match.start)}" ) elif static.utcnow() + timedelta(hours=competition.event_build_hours_ahead) > \ match.start > \ static.utcnow() - timedelta(hours=10): log.debug( f"Found new upcoming event: {match_table['event_name']} : {str(match.start)}" ) event = Event(competition=competition) event.add_match(match) events.append(event)
def test_create_new_game(self): player1 = Player("Marto") player2 = Player("Pesho") player3 = Player("Nasko") player4 = Player("Petko") team1 = Team("Wolf") team2 = Team("Lion") team1.add_team_members([player1, player2]) team2.add_team_members([player3, player4]) game = Game([team1, team2], 1) self.assertEqual(game.rounds, []) self.assertEqual(game.game_number, 1) self.assertEqual(game.winner, False) self.assertEqual(game.current_round, 1)
def load_teams(year): #Website with March Madness tournament brackets url = "https://www.sports-reference.com/cbb/postseason/" + str( year) + "-ncaa.html" response = get(url) #Get region names (east, west, midwest, south for most, but changes some years) bracket_names_soup = SoupStrainer('div', class_='switcher filter') soup = BeautifulSoup(response.text, 'html.parser', parse_only=bracket_names_soup) bracket_names = soup.find_all('a') regions = [] for a_tag in bracket_names: regions.append(a_tag.text.lower()) # print(regions[0:4]) # regions = ['east', 'west', 'south', 'midwest'] tourny_teams = [] r32 = [] sweet_sixteen = [] elite_eight = [] for region in regions[0:4]: #Only extract teams from one region at a time stripped_region = region.replace(" ", "") stripped_region = stripped_region.replace(".", "") region_soup = SoupStrainer(id=stripped_region) soup = BeautifulSoup(response.text, 'html.parser', parse_only=region_soup) # a_tags = soup.find_all('a', {"class": "team16"}) #to get team names a_tags = [] for brackets in soup.find_all('div', class_='team16'): for a in brackets.find_all('a'): a_tags.append(a) # temp = 0 # for a in a_tags: # print("here", temp, a.text) # temp+=1 #Indices represent where in the list of a_tags the team names are (other a tags represent scores and location of the game) team_indices = [ 0, 2, 5, 7, 10, 12, 15, 17, 20, 22, 25, 27, 30, 32, 35, 37 ] #East and West regions have 2 play in games each, taking up 8 indices (2 games * 2 teams * 2 scores) # buffer = 0 # if region in ["east", "west"]: # team_indices = [i + buffer for i in team_indices] seed_index_counter = 0 seeds = [1, 16, 8, 9, 5, 12, 4, 13, 6, 11, 3, 14, 7, 10, 2, 15] for i in team_indices: r_year = year region_name = region name = a_tags[i].text #Team's score in a round shows up one <a> tag after the team name score = a_tags[i + 1].text #Get a team's stats stats = {} stat_names = [] stat_values = [] #Link to the team's page that has their stats, rosters, etc. stats_url = "https://www.sports-reference.com/" + a_tags[i]["href"] stats_response = get(stats_url) team_stats_soup = SoupStrainer(id="team_stats") #Only focus on the team stat table (other tables show the roster, conference stats, etc.) team_stats_soup = BeautifulSoup(stats_response.text, 'html.parser', parse_only=team_stats_soup) #Get the names of the reported stats (e.g. G = games, MP = minutes played, etc.) team_stats_table = team_stats_soup.find_all('th') #Append Games played # counter = 0 # for th in team_stats_table: # print(counter, th.text) # counter+=1 stat_names.append(team_stats_table[1].text) #Skip Minutes Played (blank some years) then append other stat names for th in team_stats_table[3:-4]: stat_names.append(th.text) #Get the values associated with each stat for a given team team_stats = team_stats_soup.find_all('td') #Append Games played # counter = 0 # for td in team_stats: # print(counter, td.text) # counter+=1 stat_values.append(team_stats[0].text) # print("Len", len(stat_names)) #Skip Minutes Played (blank some years) then append other stat names for td in team_stats[2:len(stat_names) + 1]: stat_values.append(td.text) # for s in range(0,len(stat_values)): # print(stat_values[s]) # Combine the stat names and values (dictionary with names as keys, values as values) for i in range(0, len(stat_names) - 1): #Convert stats to floating point numbers, unless the stat is blank # print(stat_names[i], stat_values[i]) try: stats[stat_names[i]] = float(stat_values[i]) except: stats[stat_names[i]] = 0 #Add per game stat names per_game_stat_names = [ "FG", "FGA", "2P", "2PA", "3P", "3PA", "FT", "FTA", "ORB", "DRB", "TRB", "AST", "STL", "BLK", "TOV", "PF" ] for per_game_stat_name in per_game_stat_names: stat_names.append(per_game_stat_name + "/G") #Add per game stats for per_game_stat_name in per_game_stat_names: per_game_stat_value = stats[per_game_stat_name] / stats["G"] stat_values.append(per_game_stat_value) #Combine the stat names and values (with per game stats) for i in range(0, len(stat_names)): #Convert stats to floating point numbers, unless the stat is blank try: stats[stat_names[i]] = float(stat_values[i]) except: stats[stat_names[i]] = 0 # except: # stats[stat_names[i]] = stat_values[i] #Create team classes with all the corresponding info team = Team(r_year, region_name, name, score, stats, seeds[seed_index_counter]) tourny_teams.append(team) seed_index_counter += 1 r32_indices = [i + 40 for i in team_indices[0:len(team_indices) // 2]] sweet_sixteen_indices = [ i + 60 for i in team_indices[0:len(team_indices) // 4] ] elite_eight_indices = [ i + 70 for i in team_indices[0:len(team_indices) // 8] ] for i in r32_indices: name = a_tags[i].text r32.append(name) for i in sweet_sixteen_indices: name = a_tags[i].text sweet_sixteen.append(name) for i in elite_eight_indices: name = a_tags[i].text elite_eight.append(name) #Only extract teams from one region at a time final_four_soup = SoupStrainer(id="national") soup = BeautifulSoup(response.text, 'html.parser', parse_only=final_four_soup) a_tags = soup.find_all('a') #to get team names final_four = [] championship = [] champion = [] final_four_indices = team_indices[0:4] championship_indices = team_indices[4:6] champion_index = team_indices[6] for i in final_four_indices: name = a_tags[i].text final_four.append(name) for i in championship_indices: name = a_tags[i].text championship.append(name) champion.append(a_tags[champion_index].text) return (regions[0:4], tourny_teams, stat_names, r32, sweet_sixteen, elite_eight, final_four, championship, champion)
def test_when_create_team_then_all_fields_initialized_correctly(self): team = Team("Wolf") self.assertEqual(team.name, "Wolf") self.assertEqual(team.players, [])
def create_team(names, aptitudes, matches, binary, ids): team = [] for i in range(len(names)): team.append(Team(names[i], aptitudes[i], matches[i], binary[i], ids[i])) return team
def load_teams_by_round(year, load_round): #Website with March Madness tournament brackets url = "https://www.sports-reference.com/cbb/postseason/" + str( year) + "-ncaa.html" response = get(url) regions = ['east', 'west', 'south', 'midwest'] tourny_teams = [] #Indices represent where in the list of a_tags the team names are (other a tags represent scores and location of the game) m_s_team_indices = [ 0, 2, 5, 7, 10, 12, 15, 17, 20, 22, 25, 27, 30, 32, 35, 37 ] #East and West regions have 2 play in games each, taking up 8 indices (2 games * 2 teams * 2 scores) e_w_team_indices = [i + buffer for i in team_indices] number_of_teams = len(team_indices) // (2**load_round) team_indices = team_indices[0:number_of_teams] team_indices = [i + (80 // (2**load_round)) for i in team_indices] for region in regions[0:2]: #Only extract teams from one region at a time region_soup = SoupStrainer(id=region) soup = BeautifulSoup(response.text, 'html.parser', parse_only=region_soup) a_tags = soup.find_all('a') #to get team names temp_counter = 0 for a in a_tags: print(temp_counter, a.text) temp_counter += 1 #Each round starts 40 <a> tags after the first team in the previous round print(len(e_w_team_indices)) print(load_round) print(2**load_round) print(number_of_teams) print(e_w_team_indices) # 80 / 1 = 40 # 80 / 2 = 40 # 80 / 4 = 20 # 80 / 8 = 10 # 80 / 16 = 5 # 1 ---> 2 ---> 3 ---> 4 # 40 --> 60 --> 70 --> 75 for i in e_w_team_indices: r_year = year region_name = region name = a_tags[i].text #Team's score in a round shows up one <a> tag after the team name score = a_tags[i + 1].text #Create team classes with all the corresponding info team = Team(r_year, region_name, name, score, [], 0) tourny_teams.append(team) return (tourny_teams)
def load_tourny_teams(year): #Get NCAA Men's Division I Basketball Tournament (aka March Madness) tournament id tournaments_url = 'https://api.sportradar.us/ncaamb/trial/v7/en/tournaments/'+str(year)+'/pst/schedule.xml?api_key=qg6gaqr9f4x4b5utrynfp4jx' march_madness_id = '' #Get the data in XML format tournaments_xml = requests.get(tournaments_url) print(tournaments_xml.content) #Capture the start of the XML DOM tournaments_root = ET.fromstring(tournaments_xml.content) #For each tournament tag in the DOM, if name = March Madess capture the id # print(tournaments_root.tag) # print(tournaments_root.attrib) print(year) print(tournaments_root) print(tournaments_root.tag) print(tournaments_root.attrib) for tournament in tournaments_root[0]: if (tournament.attrib['name'] == 'NCAA Men\'s Division I Basketball Tournament'): march_madness_id = tournament.attrib['id'] break #Need to wait a second before calling the API again time.sleep(1) #Get the teams (along with region and seed) in march madness tourny_teams_url = 'https://api.sportradar.us/ncaamb/trial/v4/en/tournaments/'+march_madness_id+'/summary.xml?api_key=qtcxhk2mzua35x23m5hh96d8' tourny_teams = [] tourny_teams_xml = requests.get(tourny_teams_url) tourny_teams_root = ET.fromstring(tourny_teams_xml.content) # print(tourny_teams_xml.content) #Check that we are looking at the right tournament if (tourny_teams_root.attrib['name'] == 'NCAA Men\'s Division I Basketball Tournament'): for bracket in tourny_teams_root: region = bracket.attrib['name'][:-1*len(' Regional')] for team in bracket: team_id = team.attrib['id'] team_name = team.attrib['market'] seed = team.attrib['seed'] team = Team(year, team_id, team_name, region, seed) tourny_teams.append(team) else: print('Wrong tournament id') #Need to wait a second before calling the API again time.sleep(1) #Get the regular season stat names for team in tourny_teams: stats = [] stats_url = 'https://api.sportradar.us/ncaamb/trial/v4/en/seasons/'+str(year)+'/REG/teams/'+team.id+'/statistics.xml?api_key=qtcxhk2mzua35x23m5hh96d8' stats_xml = requests.get(stats_url) stats_root = ET.fromstring(stats_xml.content) #season --> team_id --> team_records --> overall if stats_root.tag != 'h1': stats = list(stats_root[0][0][0][0].attrib.keys()) #Need to wait a second before calling the API again time.sleep(1) #Get the regular season stats for team in tourny_teams: stats_url = 'https://api.sportradar.us/ncaamb/trial/v4/en/seasons/'+str(year)+'/REG/teams/'+team.id+'/statistics.xml?api_key=qtcxhk2mzua35x23m5hh96d8' stats_xml = requests.get(stats_url) stats_root = ET.fromstring(stats_xml.content) for i in range(0, len(stats)): team.add_stats(team, stats[i], stats_root[0][0][0][0].attrib[stats[i]]) #Need to wait a second before calling the API again time.sleep(0.1) return (tourny_teams, stats)