def team_cumul_mileage(): """ """ teams = db.session.query(Team).all() # @UndefinedVariable q = text(""" select team_id, ride_date, points, (@total_points := @total_points + points) AS cumulative_points, (@total_distance := @total_distance + points) AS cumulative_distance from daily_scores, (select @total_points := 0, @total_distance := 0) AS vars where team_id = :team_id order by ride_date; """) cols = [{'id': 'date', 'label': 'Date', 'type': 'date'}] for team in teams: cols.append({ 'id': 'team_{0}'.format(team.id), 'label': team.name, 'type': 'number' }) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) tpl_dict = dict([(dt.strftime('%Y-%m-%d'), None) for dt in rrule.rrule( rrule.DAILY, dtstart=start_date, until=datetime.now())]) # Query for each team, build this into a multidim array daily_cumul = defaultdict(dict) for team in teams: daily_cumul[team.id] = copy.copy( tpl_dict ) # Ensure that we have keys for every day (even if there were no rides for that day) for row in db.engine.execute( q, team_id=team.id).fetchall(): # @UndefinedVariable daily_cumul[team.id][row['ride_date'].strftime( '%Y-%m-%d')] = row['cumulative_distance'] # Fill in any None gaps with the previous non-None value prev_value = 0 for datekey in sorted(tpl_dict.keys()): if daily_cumul[team.id][datekey] is None: daily_cumul[team.id][datekey] = prev_value else: prev_value = daily_cumul[team.id][datekey] rows = [] for datekey in sorted(tpl_dict.keys()): cells = [{'v': parse_competition_timestamp(datekey).date()}] for team in teams: cells.append({'v': daily_cumul[team.id][datekey]}) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})
def execute(self, options, args): sess = db.session if options.start_date: start = parse_competition_timestamp(options.start_date) self.logger.info("Fetching rides newer than {0}".format(start)) else: start = None self.logger.info("Fetching all rides (since competition start)") end_date = parse_competition_timestamp(app.config["BAFS_END_DATE"]) grace_days = app.config["BAFS_UPLOAD_GRACE_PERIOD_DAYS"] grace_delta = timedelta(days=grace_days) if (datetime.now(utc) > (end_date + grace_delta)) and not options.force: raise CommandError( "Current time is after competition end date + grace period, not syncing rides. (Use --force to override.)" ) if options.rewrite: self.logger.info("Rewriting existing ride data.") # We iterate over all of our athletes that have access tokens. (We can't fetch anything # for those that don't.) q = sess.query(model.Athlete) q = q.filter(model.Athlete.access_token != None) if options.athlete_id: q = q.filter(model.Athlete.id == options.athlete_id) # Also only fetch athletes that have teams configured. This may not be strictly necessary # but this is a team competition, so not a lot of value in pulling in data for those # without teams. # (The way the athlete sync works, athletes will only be configured for a single team # that is one of the configured competition teams.) q = q.filter(model.Athlete.team_id != None) for athlete in q.all(): assert isinstance(athlete, model.Athlete) self.logger.info("Fetching rides for athlete: {0}".format(athlete)) try: self._write_rides(start, end_date, athlete=athlete, rewrite=options.rewrite) except InvalidAuthorizationToken: self.logger.error("Invalid authorization token for {} (removing)".format(athlete)) athlete.access_token = None sess.add(athlete) sess.commit()
def team_weekly_points(): """ """ teams = db.session.query(Team).all() # @UndefinedVariable week_q = text(""" select sum(DS.points) as total_score from daily_scores DS join teams T on T.id = DS.team_id where T.id = :team_id and week(DS.ride_date) = :week ; """) cols = [{'id': 'week', 'label': 'Week No.', 'type': 'string'}] for t in teams: cols.append({'id': 'team_{0}'.format(t.id), 'label': t.name, 'type': 'number'}) # This is a really inefficient way to do this, but it's also super simple. And I'm feeling lazy :) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) week_r = rrule.rrule(rrule.WEEKLY, dtstart=start_date, until=datetime.now()) rows = [] for i, dt in enumerate(week_r): week_no = dt.date().isocalendar()[1] # these are 1-based, whereas mysql uses 0-based cells = [{'v': 'Week {0}'.format(i + 1), 'f': 'Week {0}'.format(i + 1)}, # Competition always starts at week 1, regardless of isocalendar week no ] for t in teams: total_score = db.engine.execute(week_q, team_id=t.id, week=week_no-1).scalar() # @UndefinedVariable if total_score is None: total_score = 0 cells.append({'v': total_score, 'f': '{0:.2f}'.format(total_score)}) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})
def user_daily_points(athlete_id): """ """ teams = db.session.query(Team).all() # @UndefinedVariable day_q = text(""" select DS.points from daily_scores DS where DAYOFYEAR(DS.ride_date) = :yday and DS.athlete_id = :id ; """) cols = [{'id': 'day', 'label': 'Day No.', 'type': 'string'}] cols.append({'id': 'athlete_{0}'.format(athlete_id), 'label': '', 'type': 'number'}) # This is a really inefficient way to do this, but it's also super simple. And I'm feeling lazy :) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) day_r = rrule.rrule(rrule.DAILY, dtstart=start_date, until=datetime.now()) rows = [] for i, dt in enumerate(day_r): day_no = dt.timetuple().tm_yday # these are 1-based, whereas mysql uses 0-based cells = [{'v': '{0}'.format(dt.strftime('%b %d')), 'f': '{0}'.format(dt.strftime('%m/%d'))}, # Competition always starts at day 1, regardless of isocalendar day no ] points = db.engine.execute(day_q, id=athlete_id, yday=day_no).scalar() # @UndefinedVariable if points is None: points = 0 cells.append({'v': points, 'f': '{0:.2f}'.format(points)}) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})
def execute(self, options, args): sess = db.session if options.start_date: start = parse_competition_timestamp(options.start_date) self.logger.info("Fetching rides newer than {0}".format(start)) else: start = None self.logger.info("Fetching all rides (since competition start)") end_date = parse_competition_timestamp(app.config['BAFS_END_DATE']) grace_days = app.config['BAFS_UPLOAD_GRACE_PERIOD_DAYS'] grace_delta = timedelta(days=grace_days) if (datetime.now(utc) > (end_date + grace_delta)) and not options.force: raise CommandError("Current time is after competition end date + grace period, not syncing rides. (Use --force to override.)") if options.rewrite: self.logger.info("Rewriting existing ride data.") # We iterate over all of our athletes that have access tokens. (We can't fetch anything # for those that don't.) q = sess.query(model.Athlete) q = q.filter(model.Athlete.access_token != None) if options.athlete_id: q = q.filter(model.Athlete.id == options.athlete_id) # Also only fetch athletes that have teams configured. This may not be strictly necessary # but this is a team competition, so not a lot of value in pulling in data for those # without teams. # (The way the athlete sync works, athletes will only be configured for a single team # that is one of the configured competition teams.) q = q.filter(model.Athlete.team_id != None) for athlete in q.all(): assert isinstance(athlete, model.Athlete) self.logger.info("Fetching rides for athlete: {0}".format(athlete)) try: self._write_rides(start, end_date, athlete=athlete, rewrite=options.rewrite) except InvalidAuthorizationToken: self.logger.error("Invalid authorization token for {} (removing)".format(athlete)) athlete.access_token = None sess.add(athlete) sess.commit()
def team_cumul_mileage(): """ """ teams = db.session.query(Team).all() # @UndefinedVariable q = text(""" select team_id, ride_date, points, (@total_points := @total_points + points) AS cumulative_points, (@total_distance := @total_distance + points) AS cumulative_distance from daily_scores, (select @total_points := 0, @total_distance := 0) AS vars where team_id = :team_id order by ride_date; """) cols = [{'id': 'date', 'label': 'Date', 'type': 'date'}] for team in teams: cols.append({'id': 'team_{0}'.format(team.id), 'label': team.name, 'type': 'number'}) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) tpl_dict = dict([(dt.strftime('%Y-%m-%d'), None) for dt in rrule.rrule(rrule.DAILY, dtstart=start_date, until=datetime.now())]) # Query for each team, build this into a multidim array daily_cumul = defaultdict(dict) for team in teams: daily_cumul[team.id] = copy.copy(tpl_dict) # Ensure that we have keys for every day (even if there were no rides for that day) for row in db.engine.execute(q, team_id=team.id).fetchall(): # @UndefinedVariable daily_cumul[team.id][row['ride_date'].strftime('%Y-%m-%d')] = row['cumulative_distance'] # Fill in any None gaps with the previous non-None value prev_value = 0 for datekey in sorted(tpl_dict.keys()): if daily_cumul[team.id][datekey] is None: daily_cumul[team.id][datekey] = prev_value else: prev_value = daily_cumul[team.id][datekey] rows = [] for datekey in sorted(tpl_dict.keys()): cells = [{'v': parse_competition_timestamp(datekey).date() }] for team in teams: cells.append({'v': daily_cumul[team.id][datekey]}) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})
def team_weekly_points(): """ """ teams = db.session.query(Team).all() # @UndefinedVariable week_q = text(""" select sum(DS.points) as total_score from daily_scores DS join teams T on T.id = DS.team_id where T.id = :team_id and week(DS.ride_date) = :week ; """) cols = [{'id': 'week', 'label': 'Week No.', 'type': 'string'}] for t in teams: cols.append({ 'id': 'team_{0}'.format(t.id), 'label': t.name, 'type': 'number' }) # This is a really inefficient way to do this, but it's also super simple. And I'm feeling lazy :) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) week_r = rrule.rrule(rrule.WEEKLY, dtstart=start_date, until=datetime.now()) rows = [] for i, dt in enumerate(week_r): week_no = dt.date().isocalendar()[1] # these are 1-based, whereas mysql uses 0-based cells = [ { 'v': 'Week {0}'.format(i + 1), 'f': 'Week {0}'.format(i + 1) }, # Competition always starts at week 1, regardless of isocalendar week no ] for t in teams: total_score = db.engine.execute(week_q, team_id=t.id, week=week_no - 1).scalar() # @UndefinedVariable if total_score is None: total_score = 0 cells.append({ 'v': total_score, 'f': '{0:.2f}'.format(total_score) }) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})
def user_daily_points(athlete_id): """ """ teams = db.session.query(Team).all() # @UndefinedVariable day_q = text(""" select DS.points from daily_scores DS where DAYOFYEAR(DS.ride_date) = :yday and DS.athlete_id = :id ; """) cols = [{'id': 'day', 'label': 'Day No.', 'type': 'string'}] cols.append({ 'id': 'athlete_{0}'.format(athlete_id), 'label': '', 'type': 'number' }) # This is a really inefficient way to do this, but it's also super simple. And I'm feeling lazy :) start_date = parse_competition_timestamp(app.config['BAFS_START_DATE']) start_date = start_date.replace(tzinfo=None) day_r = rrule.rrule(rrule.DAILY, dtstart=start_date, until=datetime.now()) rows = [] for i, dt in enumerate(day_r): day_no = dt.timetuple().tm_yday # these are 1-based, whereas mysql uses 0-based cells = [ { 'v': '{0}'.format(dt.strftime('%b %d')), 'f': '{0}'.format(dt.strftime('%m/%d')) }, # Competition always starts at day 1, regardless of isocalendar day no ] points = db.engine.execute(day_q, id=athlete_id, yday=day_no).scalar() # @UndefinedVariable if points is None: points = 0 cells.append({'v': points, 'f': '{0:.2f}'.format(points)}) rows.append({'c': cells}) return gviz_api_jsonify({'cols': cols, 'rows': rows})