def sim_and_rank_seasons(lineup, num_rotations=2): ordered_seasons = [] # stores seasons in descending order by runs/game counter = 0 lineup_copy = lineup[:] while counter < num_rotations: # generate random rotation shuffle(lineup_copy) # test all 9 possible orders of given rotation for i in range(9): lineup_copy.append(lineup_copy.pop(0)) # place first batter at end s = Season(lineup=deepcopy(list(lineup_copy)), num_games=162) s.play_season() ordered_seasons.append(s) #print to terminal so people don't freak out waiting for simulations to end sys.stdout.write("\rSeasons Simulated: %d/%d" % (i + counter * 9 + 1, num_rotations * 9)) sys.stdout.flush() counter += 1 ordered_seasons.sort(key=lambda s: s.get_runs_per_game()) sys.stdout.write("\n") return ordered_seasons
def add_season(self, location='', number=0): if number == 0: number = int(location[-2:]) season = Season(location=location, s_nr=number) season.episodes = {} self.seasons[number] = season return season
def main(reps): s = Season(TEAM_FILE,SCHEDULE_FILE, CONFERENCES_FILE, LAST_PLAYED_WEEK) #s2 = copy.deepcopy(s) #mc = s.makeMC(LAST_PLAYED_WEEK) #m = np.matrix(mc) #print(m) #n = copy.deepcopy(m) #for i in xrange(100): # print("Iteration %d" %i) # n = n*m # print(n) #print(s.makeMC(LAST_PLAYED_WEEK)) #o = np.sum(n, axis=0) #print(o.tolist()) #training / testing #for t in [8,10,12,14]: # print("%d -> %d" %(t,t+1)) # s.training([2], # [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.20, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.30, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.40, 0.5 ,0.5, 0.7, 0.8, 0.9, 1], # [0, 0.1, 0.20, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.30, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1], # t,t+1) #for t in [9,11,13]: # for setup in [[2, 0.04, 0.3], [2, 0.01, 0.35], [2, 0, 0.29], [2, 0.38, 0.24]]: # print("%d -> %d, %s, %f" %(t,t+1, setup, s.training([setup[0]], [setup[1]], [setup[2]], t, t+1))) random.seed(42) for i in range(10): seed = random.randint(0,99999) print("---------------------------%d------------------------------------" %i) s.runSimulation(seed,reps, 9, 0.04, 0.3)
def sim_and_rank_seasons(lineup, num_rotations=2): ordered_seasons = [] # stores seasons in descending order by runs/game counter = 0 lineup_copy = lineup[:] while counter < num_rotations: # generate random rotation shuffle(lineup_copy) # test all 9 possible orders of given rotation for i in range(9): lineup_copy.append(lineup_copy.pop(0)) # place first batter at end s = Season(lineup=deepcopy(list(lineup_copy)), num_games=162) s.play_season() ordered_seasons.append(s) #print to terminal so people don't freak out waiting for simulations to end sys.stdout.write("\rSeasons Simulated: %d/%d" % (i+counter*9+1, num_rotations*9)) sys.stdout.flush() counter += 1 ordered_seasons.sort(key=lambda s: s.get_runs_per_game()) sys.stdout.write("\n") return ordered_seasons
def main(): teamslist = read_teams('Analytics_Attachment.xlsx') #files gameslist = read_games('Analytics_Attachment.xlsx') # se = Season(teamslist, gameslist) se.run() outlist = se.get_outlist() write_outlist(outlist) #write the list out
def rbi_sim_and_rank_seasons(lineup, trials_per_pos=2, name="David Oritz"): lineup_copy = lineup[:] this_player = None this_player_index = -1 ordered_seasons = [ [] for _ in range(9) ] # list of list of seasons. Index of list corresponds to player position # take player object of name out of lineup for i, p in enumerate(lineup_copy): if p.get_name() == name: if this_player is None: this_player = p this_player_index = i break else: # menas duplicate player names! print "Error: cannot use duplicate names in lineup!" sys.exit() if this_player is None and this_player_index == -1: print "Player name not in lineup!" sys.exit() lineup_copy.pop(this_player_index) # test player at 1-9 spots of lineup counter = 0 for lineup_pos in range(9): for i in range(trials_per_pos): shuffle(lineup_copy) # randomize lineup lineup_copy.insert( lineup_pos, this_player) # insert this player in appropriate spot s = Season(lineup=deepcopy(list(lineup_copy)), num_games=162) s.play_season() ordered_seasons[lineup_pos].append(s) for i, p in enumerate(lineup_copy): if p.get_name() == name: del lineup_copy[i] break #print to terminal so people don't freak out waiting for simulations to end sys.stdout.write("\rSeasons Simulated: %d/%d" % (counter + 1, trials_per_pos * 9)) sys.stdout.flush() counter += 1 sys.stdout.write("\n") return ordered_seasons
def parseSeasons(): start = time.clock() first_year = 2000 last_year = 2017 min_minutes_played = 10.0 min_games = 20 players = [] all_seasons_by_age = dict() for year in range(first_year, last_year + 1): this_season = Season() try: sorted_players = this_season.calcSeason('season/' + str(year) + 'Stats.txt', year, min_minutes_played, min_games) except IndexError: print(str(year)) for player in sorted_players: seasons = player.getSeason(year) hold = dict() if int(seasons.age) in all_seasons_by_age: hold = all_seasons_by_age[int(seasons.age)] hold[seasons] = player all_seasons_by_age[int(seasons.age)] = hold if player in players: player_season = players[players.index(player)] player_season.addPSeason(year, player) players[players.index(player)] = player_season else: players.append(player) for year in range(first_year, last_year + 1): advanced_file = open('season/' + str(year) + 'AdvancedStats.txt', 'r') for line in advanced_file: advanced_stats = line.split('\t') player_name = advanced_stats[AdvancedReferenceColumns.name.value] fake_player = NbaPlayer(player_name, 'PG', 1990, {}) if fake_player not in players: continue player_index = players.index(fake_player) if not players[player_index].hasSeason(year): continue player_season = players[player_index].getSeason(year) player_season.setPer(float(advanced_stats[AdvancedReferenceColumns.per.value])) player_season.setBpm(float(advanced_stats[AdvancedReferenceColumns.bpm.value])) player_season.setVorp(float(advanced_stats[AdvancedReferenceColumns.vorp.value])) player = players[player_index] player.addSeason(year, player_season) players[player_index] = player advanced_file.close() end = time.clock() print 'time: ' + str(end-start) return players
def next_incomplete_season(self): """Fetches the next incomplete season for this circuit. Asks the user to create a new one if either the previous season is complete or does not exist. :return: the next incomplete season. """ if self.current_season is not None and not self.current_season.complete: return self.current_season while True: season_name = next_string('Enter a new season name', 'season%d' % (len(self.seasons) + 1)) if season_name is None: continue if self.seasons.find(season_name) is not None: print('A season by that name already exists') continue previous_season = self.current_season men_season_stats = self.create_season_stats(season_name, self.men) women_season_stats = self.create_season_stats( season_name, self.women) men_scoreboard = self.create_scoreboard(men_season_stats) women_scoreboard = self.create_scoreboard(women_season_stats) season = Season(self, previous_season, season_name, False, men_season_stats, women_season_stats, men_scoreboard, women_scoreboard) self.current_season = season self.seasons.insert(season_name.lower(), season) self.ordered_seasons.append(season) return self.current_season
def getStaffeln(self): staffeln = [] for staffel in os.listdir(self.path): if not (staffel.startswith(".")): new_staffel = Season(self.path+"/"+staffel, staffel) staffeln.append(new_staffel) return sorted(staffeln, key=lambda x: x.getName())
def getSeason(): session = getSession() try: r = getRequest() #conn.request("GET", "/mlb/v2/JSON/News?%s" % params, "{body}", # headers) #response = conn.getresponse() if r != None: item = r.json() theID = item['Season'] query = session.query(Season).filter( Season.Season == theID).scalar() thisSeason = Season(**{ k: v for k, v in item.items() if k in Season.__table__.columns }) if query is None: session.add(thisSeason) else: query = session.merge( thisSeason ) #session.query(News).filter(News.NewsID == theID).update(newsItem) session.commit() print(thisSeason) #for key, value{j in item.items(): # print(str(key)+ ': '+ str(value)) except Exception as e: print("[Errno {0}] ".format(e))
def seasons(self): """ property that returns a list to all seasons of the show. caches it's value after first call. """ if not self.seasons_: element = self.server.query(self.key) self.seasons_ = [Season(e, self.server) for e in element if ('type' in e.attrib) and (e.attrib['type'] == 'season')] return self.seasons_
def rbi_sim_and_rank_seasons(lineup, trials_per_pos=2, name="David Oritz"): lineup_copy = lineup[:] this_player = None this_player_index = -1 ordered_seasons = [[] for _ in range(9)] # list of list of seasons. Index of list corresponds to player position # take player object of name out of lineup for i, p in enumerate(lineup_copy): if p.get_name() == name: if this_player is None: this_player = p this_player_index = i break else: # menas duplicate player names! print "Error: cannot use duplicate names in lineup!" sys.exit() if this_player is None and this_player_index == -1: print "Player name not in lineup!" sys.exit() lineup_copy.pop(this_player_index) # test player at 1-9 spots of lineup counter = 0 for lineup_pos in range(9): for i in range(trials_per_pos): shuffle(lineup_copy) # randomize lineup lineup_copy.insert(lineup_pos, this_player) # insert this player in appropriate spot s = Season(lineup=deepcopy(list(lineup_copy)), num_games=162) s.play_season() ordered_seasons[lineup_pos].append(s) for i, p in enumerate(lineup_copy): if p.get_name() == name: del lineup_copy[i] break #print to terminal so people don't freak out waiting for simulations to end sys.stdout.write("\rSeasons Simulated: %d/%d" % (counter+1, trials_per_pos*9)) sys.stdout.flush() counter += 1 sys.stdout.write("\n") return ordered_seasons
def addSeason(self, seasonNum, weight, partOneEp=-1): assert isinstance(weight, int) if weight < 1: print( "WARNING: weights must be integers 1 or larger, setting to 1!") weight = 1 self.seasons[seasonNum] = Season(self.dir, seasonNum, weight, partOneEp) self._newSeason = True
def load_circuit(): """Loads a circuit from resources file, then loads all its progress via the previous sessions outputs. :return: the newly loaded circuit. """ circuit = Circuit() load_tournament_types(circuit.tournament_types) load_ranking_points(circuit.ranking_points) load_circuit_players('men', circuit.men) load_circuit_players('women', circuit.women) circuit.men_scoreboard = load_circuit_player_scoreboard(circuit.men) circuit.women_scoreboard = load_circuit_player_scoreboard(circuit.women) circuit_progress_file = '%s/progress.csv' % OUTPUT if not os.path.isfile(circuit_progress_file): return circuit season = None with open(circuit_progress_file, 'r') as the_file: for line in the_file: csv = parse_csv_line(line) name = csv[0] complete = parse_bool(csv[1]) previous = season men_stats = load_season_player_stats(name, 'men', circuit.men) women_stats = load_season_player_stats(name, 'women', circuit.women) men_scoreboard = load_season_player_scoreboard(men_stats) women_scoreboard = load_season_player_scoreboard(women_stats) season = Season(circuit, previous, name, complete, men_stats, women_stats, men_scoreboard, women_scoreboard) season.tournaments = load_tournaments(season) circuit.seasons.insert(name, season) circuit.ordered_seasons.append(season) circuit.current_season = season return circuit
def getMediaObject(self): media = False if self.type == "episode": media = Episode(self.tag, self.server) elif self.type == "movie": media = Movie(self.tag, self.server) elif self.type == "show": media = Show(self.tag, self.server) elif self.type == "season": media = Season(self.tag, self.server) return media
def getAllSeasons(self): query = "SELECT * FROM seasons" if not self.connection: return False cur = self.connection.cursor() cur.execute(query) self.connection.commit() result = [] for record in cur.fetchall(): s = Season.fromDB(record) result.append(s) cur.close() return result
def main(): print(f"{stringy_now()}: Started scraping", flush=True) start = time.time() years = [year for year in range(constants.FIRST_YEAR, constants.PRESENT_YEAR + 1) if year not in constants.BROKEN_YEARS] for year in years: season = Season(year) maybe_make_directory(season) print("Getting schedule") schedule = get(f"https://statsapi.web.nhl.com/api/v1/schedule?season={season.season_id()}").json() response = get_pbp_data(2018020778) with open("response.json", "w") as outfile: json.dump(response, outfile) regular_game_ids = [] playoff_game_ids = [] print("Organizing dates") dates = schedule["dates"] for date in dates: games = date["games"] for game in games: if game["gameType"] == "R": regular_game_ids.append(game["gamePk"]) elif game["gameType"] == "P": playoff_game_ids.append(game["gamePk"]) for regular_game_id in regular_game_ids: regular_game_filename = f"penalty_data/{season.season_id()}/{regular_game_id}.txt" if os.path.isfile(regular_game_filename): print(f"Skipping: {regular_game_id}") else: print(f"Getting game: {regular_game_id}") pbp_data = get_pbp_data(regular_game_id) penalties = get_penalties(pbp_data) seralized_penalties = [str(penalty) for penalty in penalties] write_file(seralized_penalties, regular_game_filename) for playoff_game_id in playoff_game_ids: playoff_game_filename = f"penalty_data/{season.season_id()}/{playoff_game_id}.txt" if os.path.isfile(playoff_game_filename): print(f"Skipping: {playoff_game_id}") else: print(f"Getting game: {playoff_game_id}") pbp_data = get_pbp_data(playoff_game_id) penalties = get_penalties(pbp_data) seralized_penalties = [str(penalty) for penalty in penalties] write_file(seralized_penalties, playoff_game_filename) end = time.time() print(f"Total time: {end-start}", flush=True)
def main(): #**** Choose year ****** for year in range(2021, 2022): #**** Choose Sport ****** league = "Football" league = "Boys Basketball" # league = "Girls Basketball" #**** Create Instance ***** season = Season(league, year) #Check if year path exists if not os.path.exists(season.get_year_path()): os.makedirs(season.get_year_path()) #**** Write HTML to File write_HTML(season) #**** Create Game Infos create_game_infos(season) print("Year: ", year)
def main(start, end): for year_int in range(start, end + 1): year = str(year_int) seasons[year] = Season(year) path = "drivers_data/" + year + ".csv" print(path) with open(path, newline='', encoding='utf-8') as csvfile: reader = csv.DictReader(csvfile) raw_races = reader.fieldnames[3:-1] races = [] for race in raw_races: if len(race) != 0 and race != " ": seasons[year].add_race(race) races.append(race) for row in reader: driver = row["Driver"].lstrip(" ").rstrip(" ") if driver not in drivers: drivers[driver] = Driver(driver, year) seasons[year].add_driver(driver) for race in races: # print(year + ", " + driver + ", " + race + " = " + row[race]) seasons[year].add_result(race, driver, row[race]) for driver in seasons[year].drivers: seasons[year].calc_drivers_avg_finish(drivers[driver]) worst = {} for name, driver in drivers.items(): worst[name] = driver.calc_number_weighted_reverse_finish() sorted_worst = {} sorted_keys = sorted(worst, key=worst.get, reverse=True) count = 1 for name in sorted_keys: score = "{:.3f}".format(worst[name]) if (worst[name] > 0): print((str(count).ljust(2)) + ".: " + name.ljust(20) + " Score: " + score + " Races: " + str(drivers[name].get_number_races())) count += 1 print( str(count) + " Drivers Finished 1 Race. " + str(len(worst)) + " Drivers Overall")
from season import Season season = Season() season.year = 2014 # default season is 2015 season.massey() # Massey ranking method, unweighted, criteria is point diffential for team in season.rating: print team # or, if you want, you can get fancier and weight the massey or colley season.colley("log") # colley logarithmically weighted in time season.massey("linear") # massey linearly weighted in time season.massey( "exponential", "total_yards" ) # massey exponentially weighted in time, with total yards differential as the ranking criteria
#this overrides the previous reverse values, but it doesn't matter because we're not interpreting those values yet. mutual_map('N', -1) mutual_map('W', 0) mutual_map('L', 2) scores = [] csv_files = ['11-12.csv', '12-13.csv', '13-14.csv', '14-15.csv', '15-16.csv'] for test_sea in csv_files: test_data = [] test_target = [] data = [] target = [] loocv = set(csv_files) - set([test_sea]) sea = Season(test_sea) for date, home, away, res, bet in sea.matches: home_standing, home_remain = sea.standing(home, date) away_standing, away_remain = sea.standing(away, date) #data.append( [ home_standing, home_remain, c2i[sea.lastGame(home,date)], away_standing, away_remain, c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ] ) test_data.append([ home_standing, c2i[sea.lastGame(home, date)], away_standing, c2i[sea.lastGame(away, date)], sea.points_on_date(home, date) - sea.points_on_date(away, date) ]) feature_names = [ 'Home standing', 'Home last game', 'Away standing', 'Away last game', 'Point difference' ] test_target.append(c2i[res]) for csv in loocv:
def test_teams_url(self): s = Season(testdata[4]) url = s.getLinksUrl('teams') self.assertEqual(url,'http://api.football-data.org/v1/soccerseasons/398/teams') pass
def main(): #Wait for the data fetcher to ask resources from Football API #And for the DB to be up and ready time.sleep(60*5) #time.sleep(15) config = configparser.ConfigParser() config.read('../config/config.ini') dao = None if "POSTGRESQL" in config: dao = initPGConn(config) if not dao: print("No connection could be established") return else: print("Connected...") else: print("There are no postgre options in config.ini") return print("Finding new files to upload ...") competitions_list = {} with os.scandir('../data/init') as entries: for entry in entries: #league_{ID}_season_{YEAR}_... s = entry.name.replace('.json', '').split('_') if 'league' in s and not s[1] in competitions_list: competitions_list[s[1]] = set() if 'season' in s and s[3] not in competitions_list[s[1]]: competitions_list[s[1]].add(s[3]) if not competitions_list: return areas_map = {} leagues_map = {} teams_map = {} areas_map = {} for area in dao.getAllAreas(): if not areas_map.get(area.get_id()): areas_map[area.get_id()] = area leagues_map = {} for league in dao.getAllLeagues(): if not leagues_map.get(league.get_id()): leagues_map[league.get_id()] = league teams_map = {} for team in dao.getAllTeams(): if not teams_map.get(team.get_id()): teams_map[team.get_id()] = team print("Found {} leagues:".format(len(competitions_list))) for key, value in competitions_list.items(): print("League {} with {} seasons".format(key, value)) for league_id in competitions_list.keys(): #Apro il file della lega with open('../data/init/league_{}.json'.format(league_id), 'r') as file: print("Opened json file for League {}".format(league_id)) data = file.read() deserialized_data = json.loads(data) league = League.fromJson(data) print("League object created, ID: {} name: {}".format(league.get_id(), league.attributes['name'])) area = Area.fromJson(json.dumps(deserialized_data['area'])) print("Area object created, ID: {} name: {}".format(area.get_id(), area.attributes['name'])) if not area.get_id() in areas_map: areas_map[area.get_id()] = area dao.scheduleAsyncQuery(area.create()) print("Area does not exist, running CREATE query") elif areas_map.get(area.get_id) != area: dao.scheduleAsyncQuery(area.update()) print("Area does exist, running UPDATE query") seasons_map = {} for season in dao.getAllSeasons(): if not seasons_map.get(season.get_id()): seasons_map[season.get_id()] = season dao.executeAsync() stats_query = '' #Ciclo per ogni stagione (per ora solo la prima) for s in deserialized_data['seasons'][:len(competitions_list[league_id])]: new_season = False season = Season.fromJson(json.dumps(s)) print("Season object created, startDate: {}".format(season.attributes['start_date'])) #Aggiorno la lista delle stagioni if not season.get_id() in seasons_map: #### NB ######### #Iniziata una nuova stagione, devi salvare le SQUADRE new_season = True seasons_map[season.get_id()] = season dao.scheduleAsyncQuery(season.create()) print("Season does not exist, running CREATE query") elif seasons_map.get(season.get_id) != season: dao.scheduleAsyncQuery(season.update()) print("Season does exist, running UPDATE query") dao.executeAsync() matches_map = {} for match in dao.getAllMatches(season.get_id()): if not matches_map.get(match.get_id()): matches_map[match.get_id()] = match #Per ogni stagione guardo le squadre with open('../data/init/league_{}_season_{}_teams.json'.format(league_id, season.attributes['start_date'][:4]), 'r') as team_file: print("Opened json file for Teams in season {}".format(season.attributes['start_date'][:4])) team_data = team_file.read() team_deserialized_data = json.loads(team_data) print("Found {} teams".format(len(team_deserialized_data['teams']))) new_teams_counter = 0; old_teams_counter = 0 for t in team_deserialized_data['teams']: team = Team.fromJson(json.dumps(t)) if not t['area']['id'] in areas_map: print("Team {} name {} is in a different Area from League".format(team.get_id(), team.attributes['name'])) area = Area.fromJson(json.dumps(t['area'])) print("Area object created, ID: {} name: {}".format(area.get_id(), area.attributes['name'])) areas_map[area.get_id()] = area dao.scheduleAsyncQuery(area.create()) if not team.get_id() in teams_map: teams_map[team.get_id()] = team dao.scheduleAsyncQuery(team.create()) new_teams_counter += 1 else: dao.scheduleAsyncQuery(team.update()) old_teams_counter += 1 if new_season: stats_query += 'INSERT INTO team_league (league_id, team_id, season_id) VALUES ({}, {}, {});'.format(league.get_id(), team.get_id(), season.get_id()) print("Found {} new teams and {} old teams".format(new_teams_counter, old_teams_counter)) dao.executeAsync() #E i match with open('../data/init/league_{}_season_{}_matches.json'.format(league_id, season.attributes['start_date'][:4]), 'r') as match_file: print("Opened json file for Matches in season {}".format(season.attributes['start_date'][:4])) match_data = match_file.read() match_deserialized_data = json.loads(match_data) print("Found {} matches".format(len(match_deserialized_data['matches']))) new_matches_counter = 0; old_matches_counter = 0 for m in match_deserialized_data['matches']: match = Match.fromJson(json.dumps(m)) if not match.get_id() in matches_map: matches_map[match.get_id()] = match dao.scheduleAsyncQuery(match.create()) elif matches_map.get(match.get_id()) != match: dao.scheduleAsyncQuery(match.update()) print("Found {} new matches and {} old matches".format(new_matches_counter, old_matches_counter)) dao.executeAsync() if not league.get_id() in leagues_map: leagues_map[league.get_id()] = league dao.scheduleAsyncQuery(league.create()) print("League found and created-> ID: {} name: {}".format(league.attributes['league_id'], league.attributes['name'])) print("Now executing queries...") if dao.executeAsync(): print("Succeded!") else: print("mmmmmmmm") if new_season: print("Executing stats queries...") if dao.executeQuery(stats_query): print("Succeded!") else: print("mmmmmmmm") with open('../queries/team_stats_view.sql', 'r') as sql_file: dao.executeQuery(sql_file.read()) with open('../queries/team_standings_view.sql', 'r') as sql_file: dao.executeQuery(sql_file.read())
from season import Season run_exp_hits = 'data/run_exp_hits_2015.pickle' run_exp_count = 'data/run_exp_count_2015.pickle' heatmap = 'data/heatmaps_2015.pickle' season = Season(run_exp_hits, run_exp_count, lastSeason=heatmap) filename = 'data/2016.csv' output_file = 'data/2016_processed.csv' season.process_file(filename, output_file)
home_standing, home_remain = sea.standing(home,date) away_standing, away_remain = sea.standing(away,date) return self.i2c[self.clf.predict([[ home_standing, self.c2i[sea.lastGame(home,date)], away_standing, self.c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ]])[0]] def strID(self): return "Decision Tree Predictor" csv_files = ['11-12.csv','12-13.csv','13-14.csv','14-15.csv','15-16.csv'] number_predictors = 6 number_games = 380 tot_acc = [0]*number_predictors acc_OT = [[0]*number_games]*number_predictors totalMarginOfVic = [0]*10 for csv in csv_files: sea = Season(csv) trainingData = [] for train_csv in list(set(csv_files) - set([csv])): trainingData.append(Season(csv)) predictors = [HighPointsPredictor(),HomeTeamPredictor(),AwayTeamPredictor(),LastGamePredictor(),BettingOddsPredictor(),DecisionTreePredictor(trainingData)] for date,home,away,res,bet in sea.matches: for pred in predictors: pred.test(sea,home,away,date,res) #hpp.test(sea,home,away,date,res) #htp.test(sea,home,away,date,res) #atp.test(sea,home,away,date,res) #lgp.test(sea,home,away,date,res) #bop.test(sea,home,away,date,res) #dct.test(sea,home,away,date,res)
from season import Season s = Season("division.csv", "games.csv") s.play_season() s.league.standings(["Golden State Warriors", 'Los Angeles Lakers'], display=True) ##s.league.standings(s.league.get_conference('West') + s.league.get_conference('East'), display=True) #s.league.standings(s.league.get_division(s.league.teams['Houston Rockets'].division),display=True) #print(s.league.get_division('Southwest')) #print(s.league.is_division_leader('San Antonio Spurs')) ##s.league.global_standings() ##print(s.league.standings(s.league.get_conference('West') + s.league.get_conference('East'), display=True, option='West')) ##print(s.league.split_ties(s.league.standings(s.league.get_conference('West') + s.league.get_conference('East'), display=True, option='West'))) print( s.league.h2h_sort([('Golden State Warriors', 0.8170731708317073), ('San Antonio Spurs', 0.8170731707317073), ('Houston Rockets', 0.8170731707317073), ('LA Clippers', 0.8170731707317073), ('Utah Jazz', 0.6219512195121951), ('Oklahoma City Thunder', 0.573170731707317), ('Memphis Grizzlies', 0.524390243902439), ('Portland Trail Blazers', 0.5), ('Denver Nuggets', 0.4878048780487805), ('New Orleans Pelicans', 0.4146341463414634), ('Dallas Mavericks', 0.4024390243902439), ('Sacramento Kings', 0.3902439024390244), ('Minnesota Timberwolves', 0.3780487804878049), ('Los Angeles Lakers', 0.3170731707317073), ('Phoenix Suns', 0.2926829268292683)],
def main(): config = configparser.ConfigParser() config.read('../config/config.ini') dao = None if "POSTGRESQL" in config: dao = initPGConn(config) if not dao: print("No connection could be established") return else: print("Connected...") else: print("There are no postgre options in config.ini") return print("Finding new files to upload ...") competitions_list = set() with os.scandir('../data/updates') as entries: for entry in entries: s = entry.name.replace('.json', '').split('_') if 'league' in s and not s[1] in competitions_list: competitions_list.add(s[1]) if not competitions_list: return today = date.today() areas_map = {} for area in dao.getAllAreas(): if not areas_map.get(area.get_id()): areas_map[area.get_id()] = area leagues_map = {} for league in dao.getAllLeagues(): if not leagues_map.get(league.get_id()): leagues_map[league.get_id()] = league teams_map = {} for team in dao.getAllTeams(): if not teams_map.get(team.get_id()): teams_map[team.get_id()] = team ##Riempio le mappe dal DB for league_id in competitions_list: #Apro il file della lega with open('../data/updates/league_{}.json'.format(league_id), 'r') as file: data = file.read() deserialized_data = json.loads(data) league = League.fromJson(data) area = Area.fromJson(json.dumps(deserialized_data['area'])) if not area.get_id() in areas_map: areas_map[area.get_id()] = area dao.scheduleAsyncQuery(area.create()) elif areas_map.get(area.get_id) != area: dao.scheduleAsyncQuery(area.update()) seasons_map = {} for season in dao.getAllSeasons(): if not seasons_map.get(season.get_id()): seasons_map[season.get_id()] = season stats_query = '' #Ciclo per ogni stagione (per ora solo la prima) for s in deserialized_data['seasons'][:1]: new_season = False season = Season.fromJson(json.dumps(s)) #Aggiorno la lista delle stagioni if not season.get_id() in seasons_map: #### NB ######### #Iniziata una nuova stagione, devi salvare le SQUADRE ( o rilanciare init.py ) new_season = True seasons_map[season.get_id()] = season dao.scheduleAsyncQuery(season.create()) elif seasons_map.get(season.get_id) != season: dao.scheduleAsyncQuery(season.update()) matches_map = {} for match in dao.getAllMatches(season.get_id()): if not matches_map.get(match.get_id()): matches_map[match.get_id()] = match if new_season: with open( '../data/init/league_{}_season_{}_teams.json'. format(league_id, season.attributes['start_date'][:4]), 'r') as team_file: team_data = team_file.read() team_deserialized_data = json.loads(team_data) for t in team_deserialized_data['teams']: team = Team.fromJson(json.dumps(t)) if not team.get_id() in teams_map: teams_map[team.get_id()] = team dao.scheduleAsyncQuery(team.create()) else: dao.scheduleAsyncQuery(team.update()) stats_query += 'INSERT INTO team_league (league_id, team_id, season_id) VALUES ({}, {}, {});'.format( league.get_id(), team.get_id(), season.get_id()) #E i match #league_ID_season_YEAR_updated_matches_DATE with open( '../data/updates/league_{}_season_{}_updated_matches_{}.json' .format(league_id, season.attributes['start_date'][:4], today.strftime("%Y_%m_%d")), 'r') as match_file: match_data = match_file.read() match_deserialized_data = json.loads(match_data) for m in match_deserialized_data['matches']: match = Match.fromJson(json.dumps(m)) if not match.get_id() in matches_map: matches_map[match.get_id()] = match dao.scheduleAsyncQuery(match.create()) elif matches_map.get(match.get_id) != match: dao.scheduleAsyncQuery(match.update()) if not league.get_id() in leagues_map: leagues_map[league.get_id()] = league dao.scheduleAsyncQuery(league.create()) elif leagues_map.get(league.get_id) != league: dao.scheduleAsyncQuery(league.update()) print("League found and created-> ID: {} name: {}".format( league.attributes['league_id'], league.attributes['name'])) print("Executing queries...") if dao.executeAsync(): print("Succeded!") else: print("mmmmmmmm")
def __init__(self): self.wave = Wave().set_wave() self.season = Season().set_season() self.board = Board().set_board() self.wind = Wind().set_wind()
def parse_seasons(driver, player_dict): edge_ef_driver = EventFiringWebDriver(driver, LiquipediaNavListener()) season_titles = [] link_texts = [] seasons = [] # quick hack for cleaning the list... bottom of page contains redundant links! link_elements = edge_ef_driver.find_elements_by_partial_link_text("RLCS Season") for link_element in link_elements: if '-' in link_element.wrapped_element.text: # Get season title season_title = link_element.wrapped_element.text season_titles.append(season_title) link_texts.append(link_element.get_attribute("href")) season_num = 0 for link in link_texts: is_final = "Finals" == season_titles[season_num].split('-')[1].strip() edge_ef_driver.get(link) time.sleep(20) # Get teams teamcards = edge_ef_driver.find_elements_by_class_name("teamcard") team_dict = parse_teams(teamcards, season_titles[season_num], player_dict) # Get matches matches = [] if not is_final: # Group stage tables = edge_ef_driver.find_elements_by_class_name("matchlist table table-bordered collapsible") for table in tables: table_entries = table.find_elements_by_class_name("match-row") event = table.find_elements_by_tag_name("th").wrapped_element.text for match in table_entries: team1 = team_dict.get( table_entries[0].find_element_by_tag_name("span").get_attribute("data-highlightingclass")) team2 = team_dict.get( table_entries[3].find_element_by_tag_name("span").get_attribute("data-highlightingclass")) team1_wins = table_entries[1].wrapped_element.text.lstrip() team2_wins = table_entries[2].wrapped_element.text.lstrip() if not team1_wins == "W" and not team1_wins == "FF": for num in range(int(team1_wins)): matches.append(Match(team1=team1, team2=team2, victor=team1, season_title=season_titles[season_num], event=event)) for num in range(int(team2_wins)): matches.append(Match(team1=team1, team2=team2, victor=team2, season_title=season_titles[season_num], event=event)) # Playoffs bracket_web_elements = edge_ef_driver.find_elements_by_class_name("bracket-column-matches") bracket_headers = [bracket.find_elements_by_class_name("bracket-header") for bracket in bracket_web_elements] if re.search(r"Season [789]", season_titles[season_num]): bracket_tuples = make_brackets(bracket_web_elements, bracket_headers, True) else: bracket_tuples = make_brackets(bracket_web_elements, bracket_headers, False) get_bracket_matches(season_titles[season_num], team_dict, bracket_tuples, matches) else: bracket_web_elements = edge_ef_driver.find_elements_by_class_name("bracket-column-matches") bracket_headers = [bracket.find_elements_by_class_name("bracket-header") for bracket in bracket_web_elements] bracket_tuples = make_brackets(bracket_web_elements, bracket_headers, False) get_bracket_matches(season_titles[season_num], team_dict, bracket_tuples, matches) season = Season(season_titles[season_num], player_dict, set(list(team_dict.values())), matches) seasons.append(season) edge_ef_driver.back() season_num += 1 time.sleep(5) return seasons
#this overrides the previous reverse values, but it doesn't matter because we're not interpreting those values yet. mutual_map('N',-1) mutual_map('W',0) mutual_map('L',2) scores = [] csv_files = ['11-12.csv','12-13.csv','13-14.csv','14-15.csv','15-16.csv'] for test_sea in csv_files: test_data = [] test_target = [] data = [] target = [] loocv = set(csv_files) - set([test_sea]) sea = Season(test_sea) for date,home,away,res,bet in sea.matches: home_standing, home_remain = sea.standing(home,date) away_standing, away_remain = sea.standing(away,date) #data.append( [ home_standing, home_remain, c2i[sea.lastGame(home,date)], away_standing, away_remain, c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ] ) test_data.append( [ home_standing, c2i[sea.lastGame(home,date)], away_standing, c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ] ) feature_names = ['Home standing','Home last game', 'Away standing','Away last game','Point difference'] test_target.append(c2i[res]) for csv in loocv: sea = Season(csv) for date,home,away,res, bet in sea.matches: home_standing, home_remain = sea.standing(home,date) away_standing, away_remain = sea.standing(away,date) #data.append( [ home_standing, home_remain, c2i[sea.lastGame(home,date)], away_standing, away_remain, c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ] ) data.append( [ home_standing, c2i[sea.lastGame(home,date)], away_standing, c2i[sea.lastGame(away,date)], sea.points_on_date(home,date)-sea.points_on_date(away,date) ] ) feature_names = ['Home standing','Home last game', 'Away standing','Away last game','Point difference']
import os import sys from season import Season from nbaplayer import NbaPlayer from score import Score year = 2013 se = Season() sortedPlayers = se.calcSeason('season/thisStats.txt', year, 3, 1, 2, 28, 23, 22, 24, 25, 26, 10, 19, 11, 7) scores = [] for p in sortedPlayers: se = p.getSeason(year) if ("SF" in p.position): scores.append(Score(p.name, se.zScore(1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0), se.zScore())) scores = sorted(scores) for j in scores: print(j.name + "\t" + str(j.score) + "\t" + str(j.extra))
def read_info(): """Reads in all of the files apart from matches and returns: void""" matchFileTour = os.path.join(my_path, ".."+data_paths+"TOURNAMENT INFO.csv") DivInfo = os.path.join(my_path, ".."+data_paths+"DIVISION INFO.csv") ranking_points = os.path.join(my_path, ".."+data_paths+"RANKING POINTS.csv") ranking_list = [] with open(ranking_points, 'r') as csvFile: reader = csv.DictReader(csvFile) for row in reader: temp = int(row["Tournament Ranking Points"]) if temp is not None: ranking_list.sort() if (ex.binary_search(ranking_list, temp) == None): ranking_list.append(temp) ranking_list.sort() # Find out the number of columns in the tournament info csv number_columns = 0 with open(matchFileTour, 'r') as f1: csvlines = csv.reader(f1, delimiter=',') for lineNum, line in enumerate(csvlines): if lineNum == 0: number_columns = (len(line)) break break # Find all of the seasons in the file and load them into seasons season_list = [] with open(matchFileTour, 'r') as csvFile: reader = csv.DictReader(csvFile) for row in reader: temp = row["Season"] if temp is not None: if(ex.binary_search(season_list,temp) == None): season_list.append(temp) for i in season_list: seasons.append(Season(i,ranking_list)) # Load in all tournaments to their respective seasons # Also finds which places get prize money for i in seasons: with open(matchFileTour, 'r') as csvFile: reader = csv.DictReader(csvFile) for row in reader: if(row["Season"] == i.get_name()): temp = [] row_name = "Place " number_places = (number_columns - 3) for x in range(0,number_places): temp.append(float(row[row_name+str(x+1)].replace(',',''))) new_list = list(set(temp)) # Find unique elements in list i.add_tournament(Tournament(row["Tournament"],row["Difficulty"],new_list)) # Load in divisions for each tournament for x in seasons: for j in x.get_tournaments(): with open(DivInfo, 'r') as csvFile: reader = csv.DictReader(csvFile) for row in reader: if (row["Season"] == x.get_name()) and (row["Tournament"] == j.get_name()): j.add_division(Division(row["Division"],row["Best Of"],row["Players"])) # Add players to seasons list_all_divisions = [] for x in seasons: for j in x.get_tournaments(): for k in j.get_divisions(): if list_all_divisions is not None: # Find all of the divisions in the season list_all_divisions.sort() if(ex.binary_search(list_all_divisions,k.get_player_type()) == None): list_all_divisions.append(str(k.get_player_type())) for i in list_all_divisions: with open(os.path.join(my_path, ".."+data_paths+"PLAYERS "+i+".csv"), 'r') as csvFile: reader = csv.DictReader(csvFile) temp = [] for row in reader: player = Player(row["Player"],i) player.set_number_tournaments(x.number_tournaments()) temp.append(player) x.add_participants(temp)
def FromJSONDirectory(cls, directory, simulations, experiments): return cls(Season.FromJSONDirectory(directory), Standings.FromJSONDirectory(directory), simulations, experiments)
import os import sys from season import Season from nbaplayer import NbaPlayer from score import Score year = 2014 season = Season() sortedPlayers = season.calcSeason("season/nextSeason.txt", year, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1) scores = [] for p in sortedPlayers: se = p.getSeason(year) r = se.zScore() scores.append(Score(p.name, r)) scores = sorted(scores) for j in scores: print(str(j.name) + "\t" + str(j.score))
if tag not in headers: value_index = headers.index(tag) print(tag + ' found in csv file') except: print(tag + ' not found :(') column = {} for h in headers: column[h] = [] for row in reader: for h, v in zip(headers, row): column[h].append(v) return column if __name__ == '__main__': url = 'http://www.football-data.co.uk/mmz4281/1516/E0.csv' download_file(url) raw_data = load_data() season = Season(raw_data) print('----------------') print('Example team:') season.teams[random.randint(0, 20)].print() print('Example week:') season.weeks[random.randint(0, 20)].print() print('Example fixture:') season.weeks[random.randint(0, 38)].fixtures[random.randint(0, 10)].print() print('Example table:') season.weeks[random.randint(0, 20)].table.print()
# if len(west_copy)>1: # west_copy_1, east_copy_1=s2.league.standings(s2.league.teams,display=False); # # print(' ') # print('New copy') # for i in range(len(west)): # print("{0} \t {1:20} \t {2:3f}".format(i+1, west_copy_1[i][0], west_copy_1[i][1])); # print('East copy') # for i in range(len(east)): # print("{0} \t {1:20} \t {2:3f}".format(i+1, east_copy_1[i][0], east_copy_1[i][1])); ## print(elimed) return elimed s = Season("division.csv", "games.csv"); for game in s.games: s.play_game(game); check_for_elim(s, "East", game.date); check_for_elim(s, "West", game.date); with open('stephen_curry.csv','w') as f: f.write("Teams, Date Eliminated\n") for team in sorted(s.league.teams.keys()): if s.league.teams[team].eliminated: f.write(s.league.teams[team].name + ',' + s.league.teams[team].date_elim + '\n') else: f.write(s.league.teams[team].name + ', Playoffs\n')
class SeasonEnum(Enum): SPRING = Season('Spring', 8, 3, 4) SUMMER = Season('Summer', 6, 0, 3) WINTER = Season('Winter', 11, 5, 5) FALL = Season('Fall', 9, 3, 4)
avgScores[8] = goalScores[8] - ((numPlayersLeft * tpmAvg) + paolo.scores[8]) print "Average Scores left: " + str(avgScores) mods = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0] for i in range(0, 9): if avgScores[i] / goalScores[i] > 0.2: mods[i] = 2.0 elif avgScores[i] / goalScores[i] > 0.1: mods[i] = 1.5 des = paolo.getDesirabilities(players, mods) for i in range(0, 20): print des[i][0] + "- " + str(des[i][1]) dr = DraftRanks() year = 2013 se = Season() sortedPlayers = se.calcSeason("season/thisStats2.txt", year, 3, 1, 2, 28, 23, 22, 24, 25, 26, 10, 19, 11, 7) players = {} # Set up the stats for calculating when a stat is low # turn these into arrays pts = {} ast = {} reb = {} stl = {} blk = {} tos = {} fgp = {} ftp = {} tpm = {} ptsAvg = 0.0 astAvg = 0.0
def test_is_premier_league(self): s = Season(testdata[4]) self.assertTrue(s.isLeague('Premier League')) self.assertEqual(s.getLeague(),'PL') print 'getLeague returned : ',s.getLeague() pass
import time from AdvancedReferenceColumns import AdvancedReferenceColumns from season import Season from nbaplayer import NbaPlayer from score import Score start = time.clock() first_year = 2000 last_year = 2014 min_minutes_played = 10.0 min_games = 20 players = [] all_seasons_by_age = dict() for year in range(first_year, last_year + 1): this_season = Season() sorted_players = this_season.calcSeason('season/' + str(year) + 'Stats.txt', year, min_minutes_played, min_games) for player in sorted_players: seasons = player.getSeason(year) hold = dict() if int(seasons.age) in all_seasons_by_age: hold = all_seasons_by_age[int(seasons.age)] hold[seasons] = player all_seasons_by_age[int(seasons.age)] = hold if player in players: player_season = players[players.index(player)] player_season.addPSeason(year, player) players[players.index(player)] = player_season else: players.append(player)
# teamnamelist.append(teamlist.teamlist[ele].name) #print(sb.get_win_lose_remain_total('Cleveland Cavaliers', teamnamelist)) #print(sb.get_div_list('Boston Celtics')) #conf_list = sb.get_conf_list('Boston Celtics') #conf_list.sort() #print(conf_list) #li = sb.sort_by_lose(teamnamelist) #print(li) #print(sb.must_div_lead('Orlando Magic')) Se = Season(teamlist, gamelist) Se.run() ''' d1=datetime.datetime.strptime('3/20/2017','%m/%d/%Y') sb = ScoreBoard(teamlist, gamelist, d1) sb.win_all('Minnesota Timberwolves') sb.win_all('Golden State Warriors') sb.win_all('San Antonio Spurs') sb.win_all('LA Clippers') sb.win_all('Houston Rockets') sb.win_all('Memphis Grizzlies') sb.win_all('Oklahoma City Thunder') sb.win_all('Utah Jazz') teamnamelist = list()
import os import sys from season import Season from nbaplayer import NbaPlayer from score import Score year = 2013 se = Season() sortedPlayers = se.calcSeason('season/nextSeason.txt', year, 0, 0, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1) scores = [] for p in sortedPlayers: se = p.getSeason(year) if ("PG" in p.position): scores.append(Score(p.name, se.zScore(1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0), se.zScore())) scores = sorted(scores) for j in scores: print(j.name + "\t" + str(j.score) + "\t" + str(j.extra))
def FromJSON(cls, season_file, standings_file, simulations, experiments): return cls(Season.FromJSON(season_file), Standings.FromJSON(standings_file), simulations, experiments)