def get(self, event_key): df = DatafeedTba() event = Event.get_by_id(event_key) match_filetypes = df.getVideos(event) if match_filetypes: matches_to_put = [] for match in event.matches: if match.tba_videos != match_filetypes.get(match.key_name, []): match.tba_videos = match_filetypes.get(match.key_name, []) match.dirty = True matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) tbavideos = match_filetypes.items() else: logging.info("No tbavideos found for event " + event.key_name) tbavideos = [] template_values = { 'tbavideos': tbavideos, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/tba_videos_get.html') self.response.out.write(template.render(path, template_values))
def get(self, year): year_event_keys = Event.query(Event.year == int(year)).fetch(1000, keys_only=True) final_match_keys = [] for event_key in year_event_keys: final_match_keys.extend(Match.query(Match.event == event_key, Match.comp_level == 'f').fetch(100, keys_only=True)) match_keys_to_repair = [] for match_key in final_match_keys: key_name = match_key.id() if '_f0m' in key_name: match_keys_to_repair.append(match_key) deleted_keys = [] matches_to_repair = ndb.get_multi(match_keys_to_repair) for match in matches_to_repair: deleted_keys.append(match.key) event = ndb.get_multi([match.event])[0] match.set_number = 1 match.key = ndb.Key(Match, Match.renderKeyName( event.key.id(), match.comp_level, match.set_number, match.match_number)) MatchManipulator.createOrUpdate(matches_to_repair) MatchManipulator.delete_keys(deleted_keys) template_values = {'deleted_keys': deleted_keys, 'new_matches': matches_to_repair} path = os.path.join(os.path.dirname(__file__), '../templates/math/final_matches_repair_do.html') self.response.out.write(template.render(path, template_values))
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string logging.debug("Calculating time!") MatchHelper.add_match_times(event, [match]) matches.append(match) MatchManipulator.createOrUpdate(matches) self.response.out.write(json.dumps({'Success': "Matches successfully updated"}))
def get(self, event_key): event = Event.get_by_id(event_key) if not event: self.abort(404) if not event.remap_teams: return event.prepAwardsMatchesTeams() # Remap matches EventHelper.remapteams_matches(event.matches, event.remap_teams) MatchManipulator.createOrUpdate(event.matches) # Remap alliance selections if event.alliance_selections: EventHelper.remapteams_alliances(event.alliance_selections, event.remap_teams) # Remap rankings if event.rankings: EventHelper.remapteams_rankings(event.rankings, event.remap_teams) if event.details and event.details.rankings2: EventHelper.remapteams_rankings2(event.details.rankings2, event.remap_teams) EventDetailsManipulator.createOrUpdate(event.details) # Remap awards EventHelper.remapteams_awards(event.awards, event.remap_teams) AwardManipulator.createOrUpdate(event.awards, auto_union=False)
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches, _ = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] MatchManipulator.createOrUpdate(matches) self.redirect('/admin/event/{}'.format(event_key))
def post(self, match_key): self._require_admin() alliances_json = self.request.get("alliances_json") alliances = json.loads(alliances_json) youtube_videos = json.loads(self.request.get("youtube_videos")) team_key_names = list() for alliance in alliances: team_key_names.extend(alliances[alliance].get('teams', None)) match = Match( id=match_key, event=Event.get_by_id(self.request.get("event_key_name")).key, set_number=int(self.request.get("set_number")), match_number=int(self.request.get("match_number")), comp_level=self.request.get("comp_level"), team_key_names=team_key_names, alliances_json=alliances_json, # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO ) match = MatchManipulator.createOrUpdate(match) match.youtube_videos = youtube_videos match.dirty = True # hacky MatchManipulator.createOrUpdate(match) self.redirect("/admin/match/" + match.key_name)
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] MatchManipulator.createOrUpdate(matches) try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") self.redirect('/admin/event/{}'.format(event_key))
def post(self): self._require_admin() additions = json.loads(self.request.get("youtube_additions_json")) match_keys, youtube_videos = zip(*additions["videos"]) matches = ndb.get_multi([ndb.Key(Match, match_key) for match_key in match_keys]) matches_to_put = [] results = {"existing": [], "bad_match": [], "added": []} for (match, match_key, youtube_video) in zip(matches, match_keys, youtube_videos): if match: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # hacky matches_to_put.append(match) results["added"].append(match_key) else: results["existing"].append(match_key) else: results["bad_match"].append(match_key) MatchManipulator.createOrUpdate(matches_to_put) self.template_values.update({ "results": results, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/videos_add.html') self.response.out.write(template.render(path, self.template_values))
def add_year(event_key): logging.info(event_key) matches = event_key.get().matches if matches: for match in matches: match.year = int(match.event.id()[:4]) match.dirty = True MatchManipulator.createOrUpdate(match)
def test_createOrUpdate(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") MatchManipulator.createOrUpdate(self.new_match) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"))
def test_2017scmb_sequence(self): event = Event( id='2017scmb', event_short='scmb', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() event_code = 'scmb' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format(event_code) context = ndb.get_context() result = context.urlfetch('https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}'.format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 11) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 4) f_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'f').fetch() self.assertEqual(len(f_matches), 3) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['red']['totalPoints'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['red']['score'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['blue']['score'], 306) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['red']['totalPoints'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['blue']['totalPoints'], 306)
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) if new_matches: for match in new_matches: if hasattr(match, 'dirty') and match.dirty: # Enqueue push notification try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event.key_name, method='GET') break template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp(int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp(int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp(int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) for alliance in ['red', 'blue']: data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop('team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) return MatchManipulator.createOrUpdate(match)
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp(int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp(int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp(int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) team_key_names = [] for alliance in ['red', 'blue']: team_key_names += data['alliances'][alliance]['team_keys'] data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop('team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) match.team_key_names = team_key_names youtube_videos = [] for video in data['videos']: if video['type'] == 'youtube': youtube_videos.append(video['key']) match.youtube_videos = youtube_videos return MatchManipulator.createOrUpdate(match)
def send_upcoming_match_notification(cls, match, event): users = PushHelper.get_users_subscribed_to_match(match, NotificationType.UPCOMING_MATCH) keys = PushHelper.get_client_ids_for_users(users) if match.set_number == 1 and match.match_number == 1: # First match of a new type, send level starting notifications start_users = PushHelper.get_users_subscribed_to_match(match, NotificationType.LEVEL_STARTING) start_keys = PushHelper.get_client_ids_for_users(start_users) level_start = CompLevelStartingNotification(match, event) level_start.send(start_keys) # Send upcoming match notification notification = UpcomingMatchNotification(match, event) notification.send(keys) match.push_sent = True # Make sure we don't send updates for this match again match.dirty = True from helpers.match_manipulator import MatchManipulator MatchManipulator.createOrUpdate(match)
def predict_future_matches(cls, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if not next_match: # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time(played_matches, next_match, timezone) last = last_match # Only predict up to 20 matches in the future on the same day for i in range(0, min(20, len(unplayed_matches))): match = unplayed_matches[i] scheduled_time = cls.as_local(match.time, timezone) if scheduled_time.day != last_match_day and last_match_day is not None: # Stop, once we exhaust all unplayed matches on this day break # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: last_predicted = cls.as_local(last_match.actual_time if i == 0 else last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta(seconds=average_cycle_time) else: predicted = match.time # Never predict a match to happen more than 2 minutes ahead of schedule or in the past # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. now = datetime.datetime.now(timezone) + cls.MAX_IN_PAST if is_live else cls.as_local(cls.EPOCH, timezone) earliest_possible = cls.as_local(match.time + datetime.timedelta(minutes=-2), timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible), cls.as_utc(now)) last = match MatchManipulator.createOrUpdate(unplayed_matches)
def test_2017flwp(self): event = Event( id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 22)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 5) old_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(old_match, None) self.assertEqual(old_match.alliances['red']['score'], 255) self.assertEqual(old_match.alliances['blue']['score'], 255) self.assertEqual(old_match.score_breakdown['red']['totalPoints'], 255) self.assertEqual(old_match.score_breakdown['blue']['totalPoints'], 255) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 35)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) new_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(new_match, None) self.assertEqual(old_match.alliances, new_match.alliances) self.assertEqual(old_match.score_breakdown, new_match.score_breakdown) tiebreaker_match = Match.get_by_id('2017flwp_sf1m4') self.assertNotEqual(tiebreaker_match, None) self.assertEqual(tiebreaker_match.alliances['red']['score'], 165) self.assertEqual(tiebreaker_match.alliances['blue']['score'], 263) self.assertEqual(tiebreaker_match.score_breakdown['red']['totalPoints'], 165) self.assertEqual(tiebreaker_match.score_breakdown['blue']['totalPoints'], 263)
def get(self, event_key): df = DatafeedFMSAPI('v2.0') new_matches = MatchManipulator.createOrUpdate(df.getMatches(event_key)) template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is hacky -fangeugene 2014-10-26 matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put)
def get(self, event_key): df = DatafeedFMSAPI('v2.0', save_response=True) updated_matches = [] for m1 in df.getMatches(event_key): m2 = m1.key.get() # Only update if teams and scores are equal if m2 and (m1.alliances['red']['teams'] == m2.alliances['red']['teams'] and m1.alliances['blue']['teams'] == m2.alliances['blue']['teams'] and m1.alliances['red']['score'] == m2.alliances['red']['score'] and m1.alliances['blue']['score'] == m2.alliances['blue']['score']): old_alliances = m2.alliances old_alliances['red']['dqs'] = m1.alliances['red']['dqs'] old_alliances['blue']['dqs'] = m1.alliances['blue']['dqs'] m2.alliances_json = json.dumps(old_alliances) updated_matches.append(m2) else: logging.warning("Match not equal: {}".format(m1.key.id())) MatchManipulator.createOrUpdate(updated_matches) self.response.out.write("DONE")
def get(self, event_key): df = DatafeedFMSAPI('v2.0') new_matches = MatchManipulator.createOrUpdate(df.getMatches(event_key)) template_values = { 'matches': new_matches, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write(json.dumps({'Success': "Match videos successfully updated"}))
def post(self): trusted_api_secret = Sitevar.get_by_id("trusted_api.secret") if trusted_api_secret is None: raise Exception("Missing sitevar: trusted_api.secret. Can't accept YouTube Videos.") secret = self.request.get('secret', None) if secret is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No secret given"})) return if str(trusted_api_secret.values_json) != str(secret): self.response.set_status(400) self.response.out.write(json.dumps({"400": "Incorrect secret"})) return match_key = self.request.get('match_key', None) if match_key is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No match_key given"})) return youtube_id = self.request.get('youtube_id', None) if youtube_id is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No youtube_id given"})) return match = Match.get_by_id(match_key) if match is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "Match {} does not exist!".format(match_key)})) return if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is so hacky. -fangeugene 2014-03-06 MatchManipulator.createOrUpdate(match)
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) for match in JSONMatchesParser.parse(request.body, year)] MatchManipulator.createOrUpdate(matches)
def get(self, event_key): df = DatafeedOffseason() event = Event.get_by_id(event_key) url = self.request.get('url') new_matches = MatchManipulator.createOrUpdate(df.getMatches(event, url)) template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/offseason_matches_get.html') self.response.out.write(template.render(path, template_values))
def add_surrogates(cls, event): """ If a team has more scheduled matches than other teams, then the third match is a surrogate. Valid for 2008 and up, don't compute for offseasons. """ if event.year < 2008 or event.event_type_enum not in EventType.SEASON_EVENT_TYPES: return qual_matches = cls.organizeMatches(event.matches)['qm'] if not qual_matches: return # Find surrogate teams match_counts = defaultdict(int) for match in qual_matches: for alliance_color in ['red', 'blue']: for team in match.alliances[alliance_color]['teams']: match_counts[team] += 1 num_matches = min(match_counts.values()) surrogate_teams = set() for k, v in match_counts.items(): if v > num_matches: surrogate_teams.add(k) # Add surrogate info num_played = defaultdict(int) for match in qual_matches: for alliance_color in ['red', 'blue']: match.alliances[alliance_color]['surrogates'] = [] for team in match.alliances[alliance_color]['teams']: num_played[team] += 1 if team in surrogate_teams and num_played[team] == 3: match.alliances[alliance_color]['surrogates'].append(team) match.alliances_json = json.dumps(match.alliances) MatchManipulator.createOrUpdate(qual_matches)
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches, _ = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [ Match(id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None)) for match in matches ] MatchManipulator.createOrUpdate(matches) self.redirect('/admin/event/{}'.format(event_key))
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [ Match( id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) for match in JSONMatchesParser.parse(request.body, year) ] MatchManipulator.createOrUpdate(matches)
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps( {"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps( {"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write( json.dumps({'Success': "Match videos successfully updated"}))
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] match.score_breakdown_json = json.dumps(data['score_breakdown']) for alliance in ['red', 'blue']: data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop('team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) return MatchManipulator.createOrUpdate(match)
class ApiTrustedEventMatchesUpdate(ApiTrustedBaseController): """ Creates/updates matches """ REQUIRED_AUTH_TYPES = {AuthType.EVENT_MATCHES} def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] needs_time = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string needs_time.append(match) matches.append(match) if needs_time: try: logging.debug("Calculating time!") MatchHelper.add_match_times(event, needs_time) except Exception, e: logging.error("Failed to calculate match times") if event.remap_teams: EventHelper.remapteams_matches(matches, event.remap_teams) MatchManipulator.createOrUpdate(matches) self.response.out.write( json.dumps({'Success': "Matches successfully updated"}))
def get(self, event_key): df = DatafeedFMSAPI('v2.0', save_response=True) new_matches = MatchManipulator.createOrUpdate( MatchHelper.deleteInvalidMatches(df.getMatches(event_key), Event.get_by_id(event_key))) template_values = { 'matches': new_matches, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join( os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] match.score_breakdown_json = json.dumps(data['score_breakdown']) for alliance in ['red', 'blue']: data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop( 'team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][ alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) return MatchManipulator.createOrUpdate(match)
def get(self, event_key): df = DatafeedOffseason() event = Event.get_by_id(event_key) url = self.request.get('url') new_matches = MatchManipulator.createOrUpdate(df.getMatches( event, url)) template_values = { 'matches': new_matches, } path = os.path.join( os.path.dirname(__file__), '../templates/datafeeds/offseason_matches_get.html') self.response.out.write(template.render(path, template_values))
def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches
def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [ Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions ]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp( int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp( int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp( int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) team_key_names = [] for alliance in ['red', 'blue']: team_key_names += data['alliances'][alliance]['team_keys'] data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop( 'team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][ alliance].pop('surrogate_team_keys') data['alliances'][alliance]['dqs'] = data['alliances'][alliance].pop( 'dq_team_keys') match.alliances_json = json.dumps(data['alliances']) match.team_key_names = team_key_names youtube_videos = [] for video in data['videos']: if video['type'] == 'youtube': youtube_videos.append(video['key']) match.youtube_videos = youtube_videos return MatchManipulator.createOrUpdate(match)
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event.key_name, method='GET') template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def get(self, event_key): df = DatafeedFMSAPI('v2.0', save_response=True) event = Event.get_by_id(event_key) matches = MatchHelper.deleteInvalidMatches( df.getMatches(event_key), Event.get_by_id(event_key) ) if event and event.remap_teams: EventHelper.remapteams_matches(matches, event.remap_teams) new_matches = MatchManipulator.createOrUpdate(matches) template_values = { 'matches': new_matches, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) try: last_matches = MatchHelper.recentMatches(new_matches, 1) upcoming_matches = MatchHelper.upcomingMatches(new_matches, 8) except: logging.warning("Computing last/upcoming matches for Firebase failed!") try: FirebasePusher.updateEvent(event, last_matches, upcoming_matches) except: logging.warning("Enqueuing Firebase push failed!") template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def post(self, match_key): self._require_admin() alliances_json = self.request.get("alliances_json") alliances = json.loads(alliances_json) team_key_names = list() for alliance in alliances: team_key_names.extend(alliances[alliance].get('teams', None)) match = Match( id=match_key, event=Event.get_by_id(self.request.get("event_key_name")).key, game=self.request.get("game"), set_number=int(self.request.get("set_number")), match_number=int(self.request.get("match_number")), comp_level=self.request.get("comp_level"), team_key_names=team_key_names, alliances_json=alliances_json, # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO ) match = MatchManipulator.createOrUpdate(match) self.redirect("/admin/match/" + match.key_name)
def predict_future_matches(cls, event_key, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ to_log = '--------------------------------------------------\n' to_log += "[TIME PREDICTIONS] Current time: {}\n".format( datetime.datetime.now()) to_log += "[TIME PREDICTIONS] Current event: {}\n".format(event_key) last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if last_match: to_log += "[TIME PREDICTIONS] Last Match: {}, Actual Time: {}, Schedule: {} - {}, Predicted: {} - {}\n"\ .format(last_match.key_name, cls.as_local(last_match.actual_time, timezone), cls.as_local(last_match.time, timezone), last_match.schedule_error_str, cls.as_local(last_match.predicted_time, timezone), last_match.prediction_error_str) if next_match: to_log += "[TIME PREDICTIONS] Next Match: {}, Schedule: {}, Last Predicted: {}\n"\ .format(next_match.key_name, cls.as_local(next_match.time, timezone), cls.as_local(next_match.predicted_time, timezone)) if len(played_matches) >= 2: two_ago = played_matches[-2] cycle = last_match.actual_time - two_ago.actual_time s = int(cycle.total_seconds()) to_log += '[TIME PREDICTIONS] Last Cycle: {:02}:{:02}:{:02}\n'.format( s // 3600, s % 3600 // 60, s % 60) if not next_match: # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time( played_matches, next_match, timezone) last = last_match # Only write logs if this is the first time after a new match is played memcache_key = "time_prediction:last_match:{}".format(event_key) last_played = memcache.get(memcache_key) write_logs = False if last_match and last_match.key_name != last_played: write_logs = True memcache.set(memcache_key, last_match.key_name, 60 * 60 * 24) if average_cycle_time: average_cycle_time = int(average_cycle_time) to_log += "[TIME PREDICTIONS] Average Cycle Time: {:02}:{:02}:{:02}\n".format( average_cycle_time // 3600, average_cycle_time % 3600 // 60, average_cycle_time % 60) # Run predictions for all unplayed matches on this day for i in range(0, len(unplayed_matches)): match = unplayed_matches[i] scheduled_time = cls.as_local(match.time, timezone) if scheduled_time.day != last_match_day and last_match_day is not None: # Stop, once we exhaust all unplayed matches on this day if i == 0: write_logs = False break # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: last_predicted = cls.as_local( last_match.actual_time if i == 0 else last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta( seconds=average_cycle_time) predicted = predicted.replace( second=0) # Round down to the nearest minute else: predicted = match.time # Never predict a match to happen more than 2 minutes ahead of schedule or in the past # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. now = datetime.datetime.now( timezone) + cls.MAX_IN_PAST if is_live else cls.as_local( cls.EPOCH, timezone) earliest_possible = cls.as_local( match.time + datetime.timedelta(minutes=cls.MAX_SCHEDULE_OFFSET), timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible), cls.as_utc(now)) last = match MatchManipulator.createOrUpdate(unplayed_matches) # Log to cloudstorage, but only if we have something new if not write_logs: return log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/match-time-predictions/' log_file = '{}.txt'.format(event_key) full_path = log_dir + log_file existing_contents = '' if full_path in set( [f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log)
def step(self): event = Event.get_by_id('2016nytr') if self._step == 0: # Qual match schedule added for match in copy.deepcopy(self._all_matches['qm']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 1: # After each qual match MatchManipulator.createOrUpdate(self._played_matches['qm'][self._substep]) if self._substep < len(self._played_matches['qm']) - 1: self._substep += 1 else: self._step += 1 self._substep = 0 EventDetailsManipulator.createOrUpdate(EventDetails(id='2016nytr')) elif self._step == 2: # After alliance selections EventDetailsManipulator.createOrUpdate(EventDetails( id='2016nytr', alliance_selections=self._alliance_selections_without_backup )) self._step += 1 elif self._step == 3: # QF schedule added for match in copy.deepcopy(self._all_matches['qf']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 4: # After each QF match new_match = MatchHelper.play_order_sort_matches(self._played_matches['qf'])[self._substep] MatchManipulator.createOrUpdate(new_match) if not self._batch_advance: win_counts = { 'red': 0, 'blue': 0, } for i in xrange(new_match.match_number): win_counts[Match.get_by_id( Match.renderKeyName( new_match.event.id(), new_match.comp_level, new_match.set_number, i+1)).winning_alliance] += 1 for alliance, wins in win_counts.items(): if wins == 2: s = new_match.set_number if s in {1, 2}: self._advancement_alliances['sf1']['red' if s == 1 else 'blue'] = new_match.alliances[alliance]['teams'] elif s in {3, 4}: self._advancement_alliances['sf2']['red' if s == 3 else 'blue'] = new_match.alliances[alliance]['teams'] else: raise Exception("Invalid set number: {}".format(s)) for match_set, alliances in self._advancement_alliances.items(): if match_set.startswith('sf'): for i in xrange(3): for match in copy.deepcopy(self._all_matches['sf']): key = '2016nytr_{}m{}'.format(match_set, i+1) if match.key.id() == key: for color in ['red', 'blue']: match.alliances[color]['score'] = -1 match.alliances[color]['teams'] = alliances.get(color, []) match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) if self._substep < len(self._played_matches['qf']) - 1: self._substep += 1 else: self._step += 1 if self._batch_advance else 2 self._substep = 0 elif self._step == 5: # SF schedule added if self._batch_advance: for match in copy.deepcopy(self._all_matches['sf']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 6: # After each SF match new_match = MatchHelper.play_order_sort_matches(self._played_matches['sf'])[self._substep] MatchManipulator.createOrUpdate(new_match) if not self._batch_advance: win_counts = { 'red': 0, 'blue': 0, } for i in xrange(new_match.match_number): win_counts[Match.get_by_id( Match.renderKeyName( new_match.event.id(), new_match.comp_level, new_match.set_number, i+1)).winning_alliance] += 1 for alliance, wins in win_counts.items(): if wins == 2: self._advancement_alliances['f1']['red' if new_match.set_number == 1 else 'blue'] = new_match.alliances[alliance]['teams'] for match_set, alliances in self._advancement_alliances.items(): if match_set.startswith('f'): for i in xrange(3): for match in copy.deepcopy(self._all_matches['f']): key = '2016nytr_{}m{}'.format(match_set, i+1) if match.key.id() == key: for color in ['red', 'blue']: match.alliances[color]['score'] = -1 match.alliances[color]['teams'] = alliances.get(color, []) match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) # Backup robot introduced if self._substep == 3: EventDetailsManipulator.createOrUpdate(EventDetails( id='2016nytr', alliance_selections=self._event_details.alliance_selections )) if self._substep < len(self._played_matches['sf']) - 1: self._substep += 1 else: self._step += 1 if self._batch_advance else 2 self._substep = 0 elif self._step == 7: # F schedule added if self._batch_advance: for match in copy.deepcopy(self._all_matches['f']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 8: # After each F match MatchManipulator.createOrUpdate( MatchHelper.play_order_sort_matches( self._played_matches['f'])[self._substep]) if self._substep < len(self._played_matches['f']) - 1: self._substep += 1 else: self._step += 1 self._substep = 0 ndb.get_context().clear_cache() # Re fetch event matches event = Event.get_by_id('2016nytr') MatchHelper.deleteInvalidMatches(event.matches, event) ndb.get_context().clear_cache() self._update_rankings()
def accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos( match, suggestion.contents["youtube_videos"]) return MatchManipulator.createOrUpdate(match)
def test_2017flwp_sequence(self): event = Event(id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York') event.put() event_code = 'flwp' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format( event_code) context = ndb.get_context() result = context.urlfetch( 'https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}' .format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI( 'v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 7) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').alliances['red']['score'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').alliances['blue']['score'], 258) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').score_breakdown['red'] ['totalPoints'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').score_breakdown['blue'] ['totalPoints'], 258) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').alliances['red']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').score_breakdown['red'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').alliances['red']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').score_breakdown['red'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').alliances['red']['score'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').alliances['blue']['score'], 263) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').score_breakdown['red'] ['totalPoints'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').score_breakdown['blue'] ['totalPoints'], 263)
def createCompleteQuals(self): comp_level = "qm" set_number = 1 complete = True matches = [self.buildTestMatch(comp_level, set_number, match_number, complete) for match_number in range(1, 11)] MatchManipulator.createOrUpdate(matches)
def get(self, event_key): if tba_config.CONFIG["env"] == "prod": # disable in prod for now logging.error("Tried to restore {} from CSV in prod! No can do.".format(event_key)) return event = Event.get_by_id(event_key) # alliances result = urlfetch.fetch(self.ALLIANCES_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.ALLIANCES_URL.format(event.year, event_key, event_key))) else: data = result.content.replace('frc', '') alliance_selections = CSVAllianceSelectionsParser.parse(data) event_details = EventDetails( id=event_key, alliance_selections=alliance_selections ) EventDetailsManipulator.createOrUpdate(event_details) # awards result = urlfetch.fetch(self.AWARDS_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.AWARDS_URL.format(event.year, event_key, event_key))) else: # convert into expected input format data = StringIO.StringIO() writer = csv.writer(data, delimiter=',') for row in csv.reader(StringIO.StringIO(result.content), delimiter=','): writer.writerow([event.year, event.event_short, row[1], row[2].replace('frc', ''), row[3]]) awards = [] for award in CSVAwardsParser.parse(data.getvalue()): awards.append(Award( id=Award.render_key_name(event.key_name, award['award_type_enum']), name_str=award['name_str'], award_type_enum=award['award_type_enum'], year=event.year, event=event.key, event_type_enum=event.event_type_enum, team_list=[ndb.Key(Team, 'frc{}'.format(team_number)) for team_number in award['team_number_list']], recipient_json_list=award['recipient_json_list'] )) AwardManipulator.createOrUpdate(awards) # matches result = urlfetch.fetch(self.MATCHES_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.MATCHES_URL.format(event.year, event_key, event_key))) else: data = result.content.replace('frc', '').replace('{}_'.format(event_key), '') match_dicts, _ = OffseasonMatchesParser.parse(data) matches = [ Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in match_dicts] MatchManipulator.createOrUpdate(matches) # rankings result = urlfetch.fetch(self.RANKINGS_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.RANKINGS_URL.format(event.year, event_key, event_key))) else: # convert into expected input format rankings = list(csv.reader(StringIO.StringIO(result.content), delimiter=',')) event_details = EventDetails( id=event_key, rankings=rankings ) EventDetailsManipulator.createOrUpdate(event_details) self.response.out.write("Done restoring {}!".format(event_key))
def predict_future_matches(cls, event_key, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ to_log = '--------------------------------------------------\n' to_log += "[TIME PREDICTIONS] Current time: {}\n".format( datetime.datetime.now()) to_log += "[TIME PREDICTIONS] Current event: {}\n".format(event_key) last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if last_match: to_log += "[TIME PREDICTIONS] Last Match: {}, Actual Time: {}, Schedule: {} - {}, Predicted: {} - {}\n"\ .format(last_match.key_name, cls.as_local(last_match.actual_time, timezone), cls.as_local(last_match.time, timezone), last_match.schedule_error_str, cls.as_local(last_match.predicted_time, timezone), last_match.prediction_error_str) if next_match: to_log += "[TIME PREDICTIONS] Next Match: {}, Schedule: {}, Last Predicted: {}\n"\ .format(next_match.key_name, cls.as_local(next_match.time, timezone), cls.as_local(next_match.predicted_time, timezone)) if len(played_matches) >= 2: # Just for some logging two_ago = played_matches[-2] if last_match.actual_time and two_ago.actual_time: cycle = last_match.actual_time - two_ago.actual_time s = int(cycle.total_seconds()) to_log += '[TIME PREDICTIONS] Last Cycle: {:02}:{:02}:{:02}\n'.format( s // 3600, s % 3600 // 60, s % 60) if not next_match or (last_match and not last_match.time) or ( last_match and not last_match.actual_time): # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time( played_matches, next_match, timezone) last = last_match # Only write logs if this is the first time after a new match is played memcache_key = "time_prediction:last_match:{}".format(event_key) last_played = memcache.get(memcache_key) write_logs = False if last_match and last_match.key_name != last_played: write_logs = True memcache.set(memcache_key, last_match.key_name, 60 * 60 * 24) if average_cycle_time: average_cycle_time = int(average_cycle_time) to_log += "[TIME PREDICTIONS] Average Cycle Time: {:02}:{:02}:{:02}\n".format( average_cycle_time // 3600, average_cycle_time % 3600 // 60, average_cycle_time % 60) # Run predictions for all unplayed matches on this day and comp level last_comp_level = next_match.comp_level if next_match else None now = datetime.datetime.now( timezone) + cls.MAX_IN_PAST if is_live else cls.as_local( cls.EPOCH, timezone) first_unplayed_timedelta = None for i in range(0, len(unplayed_matches)): match = unplayed_matches[i] if not match.time: continue if first_unplayed_timedelta is None: first_unplayed_timedelta = now - cls.as_local( match.time, timezone) if first_unplayed_timedelta < datetime.timedelta(seconds=0): first_unplayed_timedelta = datetime.timedelta(seconds=0) scheduled_time = cls.as_local(match.time, timezone) if (scheduled_time.day != last_match_day and last_match_day is not None) \ or last_comp_level != match.comp_level: if i == 0: write_logs = False # Use predicted = scheduled once we exhaust all unplayed matches on this day or move to a new comp level match.predicted_time = cls.as_utc( cls.as_local(match.time, timezone) + first_unplayed_timedelta) continue # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: cycle_time = average_cycle_time if average_cycle_time else 60 * 7 # Default to 7 min base_time = max(cls.as_local(last_match.actual_time, timezone), now - datetime.timedelta(seconds=cycle_time)) last_predicted = base_time if i == 0 else cls.as_local( last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta( seconds=average_cycle_time) else: # Shift predicted time by the amouont the first match is behind predicted = cls.as_local(match.time, timezone) + first_unplayed_timedelta # Never predict a match to happen more than 15 minutes ahead of schedule or in the past # Except for playoff matches, which we allow to be any amount early (since all schedule # bets are off due to canceled tiebreaker matches). # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. earliest_possible = cls.as_local(match.time + cls.MAX_SCHEDULE_OFFSET, timezone) \ if match.comp_level not in Match.ELIM_LEVELS else cls.as_local(cls.EPOCH, timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible)) last = match last_comp_level = match.comp_level MatchManipulator.createOrUpdate(unplayed_matches) # Log to cloudstorage, but only if we have something new if not write_logs: return log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/match-time-predictions/' log_file = '{}.txt'.format(event_key) full_path = log_dir + log_file existing_contents = '' if full_path in set( [f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log)
def post(self, event_key_id): self._require_admin() event = Event.get_by_id(event_key_id) event.prepAwardsMatchesTeams() remap_teams = {} for key, value in json.loads(self.request.get('remap_teams')).items(): remap_teams['frc{}'.format(key)] = 'frc{}'.format(value) # Remap matches for match in event.matches: for old_team, new_team in remap_teams.items(): # Update team key names for i, key in enumerate(match.team_key_names): if key == old_team: match.dirty = True if new_team.isdigit(): # Only if non "B" teams match.team_key_names[i] = new_team else: del match.team_key_names[i] # Update alliances for color in ['red', 'blue']: for i, key in enumerate(match.alliances[color]['teams']): if key == old_team: match.dirty = True match.alliances[color]['teams'][i] = new_team match.alliances_json = json.dumps(match.alliances) MatchManipulator.createOrUpdate(event.matches) # Remap alliance selections if event.alliance_selections: for row in event.alliance_selections: for choice in ['picks', 'declines']: for old_team, new_team in remap_teams.items(): for i, key in enumerate(row[choice]): if key == old_team: row[choice][i] = new_team # Remap rankings if event.rankings: for row in event.rankings: for old_team, new_team in remap_teams.items(): if row[1] == old_team[3:]: row[1] = new_team[3:] EventDetailsManipulator.createOrUpdate(event.details) # Remap awards for award in event.awards: for old_team, new_team in remap_teams.items(): # Update team keys for i, key in enumerate(award.team_list): if key.id() == old_team: award.dirty = True if new_team.isdigit(): # Only if non "B" teams award.team_list[i] = ndb.Key(Team, new_team) else: del award.team_list[i] # Update recipient list for recipient in award.recipient_list: if str(recipient['team_number']) == old_team[3:]: award.dirty = True recipient['team_number'] = new_team[3:] award.recipient_json_list = [json.dumps(r) for r in award.recipient_list] AwardManipulator.createOrUpdate(event.awards, auto_union=False) self.redirect("/admin/event/" + event.key_name)