def run_pipeline():
    output_fname = util.get_daily_lineup_out_fname()
    now = get_curr_date()

    if not os.path.exists(output_fname):
        team_depth_charts = depth_chart.scrape_depth_chart()
        games = schedule_parser.get_games_on_date(datetime.datetime.strftime(now, "%m/%d/%Y"))
        daily_lineup = []

        for game in games:
            home = game["home"]
            away = game["away"]
            date = game["date"]
            home_depth_data = team_depth_charts[home]
            away_depth_data = team_depth_charts[away]
            daily_lineup_data = {
                "date": date,
                "home": home,
                "away": away,
                "home_depth": home_depth_data,
                "away_depth": away_depth_data,
            }
            daily_lineup.append(daily_lineup_data)

            # now_str = datetime.datetime.strftime(now, '%m-%d-%Y')
            # output_filename = "pipeline_out/daily_lineups_" + now_str + ".json"
        f = open(output_fname, "w+")
        f.write(json.dumps(daily_lineup, sort_keys=True, indent=4))
        f.close()
Пример #2
0
	def get_last_n_stats(self, player_name, n=5, query_stats=None):
		if not query_stats:
			query_stats = self.DEFAULT_QUERY_STATS

		today = get_curr_date()
		select_stats = ''

		# @TODO: Design a way so that you can easily tell what type a column is in your database
		# Right now, we're explicitly checking for the date so that we don't convert it to float
		# representation
		for stat in query_stats:
			select_stats += stat + ', '

		
		#Truncate extra comma and space at end, if there was at least one entry for query stats
		if len(query_stats) > 0:
			select_stats = select_stats[0:-2]
		
		template = 'SELECT ' + select_stats + ' FROM player_stats WHERE name = ? and date <= ? ORDER BY date DESC LIMIT ? '
		results = self.cursor.execute(template, [player_name, today, n]).fetchall()
		total = 0
		for row in results:
			total += row[-1]
		print total/5
		return results
Пример #3
0
	def get_last_n_averaged_stats(self, player_name, n=5, query_stats=None):
		if not query_stats:
			query_stats = self.DEFAULT_QUERY_STATS

		today = get_curr_date()
		last_n_stats = np.array(self.get_last_n_stats(player_name, n, query_stats))
		indices = [i for i in range(len(query_stats)) if query_stats[i] != 'Date' and query_stats[i] != 'opp_team']
		last_n_stats = last_n_stats[:, indices].astype(float)
		avgs = list(np.mean(last_n_stats, axis=0))
		avgs = [round(avg, 2) for avg in avgs]
		print(avgs)
		return avgs

		"""
Пример #4
0
	def refresh(self):
		self.today = get_curr_date()
		self.now = get_curr_time()
		self.load_data()
		self.analyze_games()
"""
Constructs a mapping where the keys are player names, and values are their Fanduel-unique attributes,
such as salary data and average fantasy point values.
"""

from util import get_curr_date
import util
from datetime import datetime
import pandas
import json
import os

now = datetime.strftime(get_curr_date(), '%m-%d-%Y')


def run_pipeline():
	pipeline_in = util.get_fd_pipe_in_fname()
	pipeline_out = util.get_fd_pipe_out_fname()

	if os.path.exists(pipeline_out):
		f = open(pipeline_out, 'r')
		data = json.loads(f.read())
		f.close()
		return data

	player_mapping = {}

	dataframe = pandas.read_csv(pipeline_in)
	nrows = len(dataframe)

	for c in range(nrows):