def post(self): self._require_admin() additions = json.loads(self.request.get("youtube_additions_json")) match_keys, youtube_videos = zip(*additions["videos"]) matches = ndb.get_multi([ndb.Key(Match, match_key) for match_key in match_keys]) matches_to_put = [] results = {"existing": [], "bad_match": [], "added": []} for (match, match_key, youtube_video) in zip(matches, match_keys, youtube_videos): if match: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # hacky matches_to_put.append(match) results["added"].append(match_key) else: results["existing"].append(match_key) else: results["bad_match"].append(match_key) MatchManipulator.createOrUpdate(matches_to_put) self.template_values.update({ "results": results, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/videos_add.html') self.response.out.write(template.render(path, self.template_values))
def deleteInvalidMatches(self, match_list): """ A match is invalid iff it is an elim match that has not been played and the same alliance already won in 2 match numbers in the same set. """ red_win_counts = defaultdict(int) # key: <comp_level><set_number> blue_win_counts = defaultdict(int) # key: <comp_level><set_number> for match in match_list: if match.has_been_played and match.comp_level in Match.ELIM_LEVELS: key = '{}{}'.format(match.comp_level, match.set_number) if match.winning_alliance == 'red': red_win_counts[key] += 1 elif match.winning_alliance == 'blue': blue_win_counts[key] += 1 return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and not match.has_been_played: key = '{}{}'.format(match.comp_level, match.set_number) if red_win_counts[key] == 2 or blue_win_counts[key] == 2: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning("Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def post(self, match_key): self._require_admin() alliances_json = self.request.get("alliances_json") score_breakdown_json = self.request.get("score_breakdown_json") # Ignore u'None' from form POST score_breakdown_json = score_breakdown_json if score_breakdown_json != "None" else None # Fake JSON load of the score breakdown to ensure the JSON is proper before attempting to save to the DB if score_breakdown_json: json.loads(score_breakdown_json) alliances = json.loads(alliances_json) tba_videos = json.loads(self.request.get( "tba_videos")) if self.request.get("tba_videos") else [] youtube_videos = json.loads(self.request.get( "youtube_videos")) if self.request.get("youtube_videos") else [] team_key_names = list() for alliance in alliances: team_key_names.extend(alliances[alliance].get('teams', None)) match = Match( id=match_key, event=Event.get_by_id(self.request.get("event_key_name")).key, set_number=int(self.request.get("set_number")), match_number=int(self.request.get("match_number")), comp_level=self.request.get("comp_level"), team_key_names=team_key_names, alliances_json=alliances_json, score_breakdown_json=score_breakdown_json, tba_videos=tba_videos, youtube_videos=youtube_videos # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO ) MatchManipulator.createOrUpdate(match, auto_union=False) self.redirect("/admin/match/" + match.key_name)
def post(self, match_key): self._require_admin() alliances_json = self.request.get("alliances_json") alliances = json.loads(alliances_json) youtube_videos = json.loads(self.request.get("youtube_videos")) team_key_names = list() for alliance in alliances: team_key_names.extend(alliances[alliance].get('teams', None)) match = Match( id=match_key, event=Event.get_by_id(self.request.get("event_key_name")).key, set_number=int(self.request.get("set_number")), match_number=int(self.request.get("match_number")), comp_level=self.request.get("comp_level"), team_key_names=team_key_names, alliances_json=alliances_json, # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO ) match = MatchManipulator.createOrUpdate(match) match.youtube_videos = youtube_videos match.dirty = True # hacky MatchManipulator.createOrUpdate(match) self.redirect("/admin/match/" + match.key_name)
def get(self, event_key): df = DatafeedTba() event = Event.get_by_id(event_key) match_filetypes = df.getVideos(event) if match_filetypes: matches_to_put = [] for match in event.matches: if match.tba_videos != match_filetypes.get(match.key_name, []): match.tba_videos = match_filetypes.get(match.key_name, []) match.dirty = True matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) tbavideos = match_filetypes.items() else: logging.info("No tbavideos found for event " + event.key_name) tbavideos = [] template_values = { 'tbavideos': tbavideos, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/tba_videos_get.html') self.response.out.write(template.render(path, template_values))
def deleteInvalidMatches(self, match_list): """ A match is invalid iff it is an elim match that has not been played and the same alliance already won in 2 match numbers in the same set. """ red_win_counts = defaultdict(int) # key: <comp_level><set_number> blue_win_counts = defaultdict(int) # key: <comp_level><set_number> for match in match_list: if match.has_been_played and match.comp_level in Match.ELIM_LEVELS: key = '{}{}'.format(match.comp_level, match.set_number) if match.winning_alliance == 'red': red_win_counts[key] += 1 elif match.winning_alliance == 'blue': blue_win_counts[key] += 1 return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and not match.has_been_played: key = '{}{}'.format(match.comp_level, match.set_number) if red_win_counts[key] == 2 or blue_win_counts[key] == 2: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning( "Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def deleteInvalidMatches(self, match_list, event): """ A match is invalid iff it is an elim match that has not been played and the same alliance already won in 2 match numbers in the same set. """ red_win_counts = defaultdict(int) # key: <comp_level><set_number> blue_win_counts = defaultdict(int) # key: <comp_level><set_number> for match in match_list: if match.has_been_played and match.comp_level in Match.ELIM_LEVELS: key = '{}{}'.format(match.comp_level, match.set_number) if match.winning_alliance == 'red': red_win_counts[key] += 1 elif match.winning_alliance == 'blue': blue_win_counts[key] += 1 return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and not match.has_been_played: if event.playoff_type != PlayoffType.ROUND_ROBIN_6_TEAM or match.comp_level == 'f': # Don't delete round robin semifinal matches key = '{}{}'.format(match.comp_level, match.set_number) n = 3 if event.playoff_type == PlayoffType.BO5_FINALS else 2 if red_win_counts[key] == n or blue_win_counts[key] == n: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning( "Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def get(self, event_key): event = Event.get_by_id(event_key) if not event: self.abort(404) if not event.remap_teams: return event.prepAwardsMatchesTeams() # Remap matches EventHelper.remapteams_matches(event.matches, event.remap_teams) MatchManipulator.createOrUpdate(event.matches) # Remap alliance selections if event.alliance_selections: EventHelper.remapteams_alliances(event.alliance_selections, event.remap_teams) # Remap rankings if event.rankings: EventHelper.remapteams_rankings(event.rankings, event.remap_teams) if event.details and event.details.rankings2: EventHelper.remapteams_rankings2(event.details.rankings2, event.remap_teams) EventDetailsManipulator.createOrUpdate(event.details) # Remap awards EventHelper.remapteams_awards(event.awards, event.remap_teams) AwardManipulator.createOrUpdate(event.awards, auto_union=False)
def deleteInvalidMatches(self, match_list): """ A match is invalid iff it is an elim match where the match number is 3 and the same alliance won in match numbers 1 and 2 of the same set. """ matches_by_key = {} for match in match_list: matches_by_key[match.key_name] = match return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and match.match_number == 3 and (not match.has_been_played): match_1 = matches_by_key.get(Match.renderKeyName(match.event.id(), match.comp_level, match.set_number, 1)) match_2 = matches_by_key.get(Match.renderKeyName(match.event.id(), match.comp_level, match.set_number, 2)) if match_1 is not None and match_2 is not None and\ match_1.has_been_played and match_2.has_been_played and\ match_1.winning_alliance == match_2.winning_alliance: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning("Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def post(self, match_key): self._require_admin() alliances_json = self.request.get("alliances_json") score_breakdown_json = self.request.get("score_breakdown_json") alliances = json.loads(alliances_json) tba_videos = json.loads(self.request.get( "tba_videos")) if self.request.get("tba_videos") else [] youtube_videos = json.loads(self.request.get( "youtube_videos")) if self.request.get("youtube_videos") else [] team_key_names = list() for alliance in alliances: team_key_names.extend(alliances[alliance].get('teams', None)) match = Match( id=match_key, event=Event.get_by_id(self.request.get("event_key_name")).key, set_number=int(self.request.get("set_number")), match_number=int(self.request.get("match_number")), comp_level=self.request.get("comp_level"), team_key_names=team_key_names, alliances_json=alliances_json, score_breakdown_json=score_breakdown_json, tba_videos=tba_videos, youtube_videos=youtube_videos # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO ) MatchManipulator.createOrUpdate(match, auto_union=False) self.redirect("/admin/match/" + match.key_name)
def deleteInvalidMatches(self, match_list, event): """ A match is invalid iff it is an elim match that has not been played and the same alliance already won in 2 match numbers in the same set. """ red_win_counts = defaultdict(int) # key: <comp_level><set_number> blue_win_counts = defaultdict(int) # key: <comp_level><set_number> for match in match_list: if match.has_been_played and match.comp_level in Match.ELIM_LEVELS: key = '{}{}'.format(match.comp_level, match.set_number) if match.winning_alliance == 'red': red_win_counts[key] += 1 elif match.winning_alliance == 'blue': blue_win_counts[key] += 1 return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and not match.has_been_played: if event.playoff_type != PlayoffType.ROUND_ROBIN_6_TEAM or match.comp_level == 'f': # Don't delete round robin semifinal matches key = '{}{}'.format(match.comp_level, match.set_number) n = 3 if event.playoff_type == PlayoffType.BO5_FINALS else 2 if red_win_counts[key] == n or blue_win_counts[key] == n: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning("Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] MatchManipulator.createOrUpdate(matches) try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") self.redirect('/admin/event/{}'.format(event_key))
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches, _ = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] MatchManipulator.createOrUpdate(matches) self.redirect('/admin/event/{}'.format(event_key))
def post(self): self._require_admin() additions = json.loads(self.request.get("youtube_additions_json")) match_keys, youtube_videos = zip(*additions["videos"]) matches = ndb.get_multi( [ndb.Key(Match, match_key) for match_key in match_keys]) matches_to_put = [] results = {"existing": [], "bad_match": [], "added": []} for (match, match_key, youtube_video) in zip(matches, match_keys, youtube_videos): if match: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # hacky matches_to_put.append(match) results["added"].append(match_key) else: results["existing"].append(match_key) else: results["bad_match"].append(match_key) MatchManipulator.createOrUpdate(matches_to_put) self.template_values.update({ "results": results, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/videos_add.html') self.response.out.write(template.render(path, self.template_values))
def get(self, year): year_event_keys = Event.query(Event.year == int(year)).fetch(1000, keys_only=True) final_match_keys = [] for event_key in year_event_keys: final_match_keys.extend(Match.query(Match.event == event_key, Match.comp_level == 'f').fetch(100, keys_only=True)) match_keys_to_repair = [] for match_key in final_match_keys: key_name = match_key.id() if '_f0m' in key_name: match_keys_to_repair.append(match_key) deleted_keys = [] matches_to_repair = ndb.get_multi(match_keys_to_repair) for match in matches_to_repair: deleted_keys.append(match.key) event = ndb.get_multi([match.event])[0] match.set_number = 1 match.key = ndb.Key(Match, Match.renderKeyName( event.key.id(), match.comp_level, match.set_number, match.match_number)) MatchManipulator.createOrUpdate(matches_to_repair) MatchManipulator.delete_keys(deleted_keys) template_values = {'deleted_keys': deleted_keys, 'new_matches': matches_to_repair} path = os.path.join(os.path.dirname(__file__), '../templates/math/final_matches_repair_do.html') self.response.out.write(template.render(path, template_values))
def deleteInvalidMatches(self, match_list): """ A match is invalid iff it is an elim match where the match number is 3 and the same alliance won in match numbers 1 and 2 of the same set. """ matches_by_key = {} for match in match_list: matches_by_key[match.key_name] = match return_list = [] for match in match_list: if match.comp_level in Match.ELIM_LEVELS and match.match_number == 3 and ( not match.has_been_played): match_1 = matches_by_key.get( Match.renderKeyName(match.event.id(), match.comp_level, match.set_number, 1)) match_2 = matches_by_key.get( Match.renderKeyName(match.event.id(), match.comp_level, match.set_number, 2)) if match_1 is not None and match_2 is not None and\ match_1.has_been_played and match_2.has_been_played and\ match_1.winning_alliance == match_2.winning_alliance: try: MatchManipulator.delete(match) logging.warning("Deleting invalid match: %s" % match.key_name) except: logging.warning( "Tried to delete invalid match, but failed: %s" % match.key_name) continue return_list.append(match) return return_list
def createIncompleteQuals(self): comp_level = "qm" set_number = 1 complete = False matches = [self.buildTestMatch(comp_level, set_number, match_number, complete) for match_number in range(11, 21)] MatchManipulator.createOrUpdate(matches) return matches
def get(self, event_key, comp_level, to_delete): self._require_admin() event = Event.get_by_id(event_key) if not event: self.abort(404) organized_matches = MatchHelper.organizeMatches(event.matches) if comp_level not in organized_matches: self.abort(400) return matches_to_delete = [] if to_delete == 'all': matches_to_delete = [m for m in organized_matches[comp_level]] elif to_delete == 'unplayed': matches_to_delete = [ m for m in organized_matches[comp_level] if not m.has_been_played ] delete_count = len(matches_to_delete) if matches_to_delete: MatchManipulator.delete(matches_to_delete) self.redirect("/admin/event/{}?deleted={}#matches".format( event_key, delete_count))
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string logging.debug("Calculating time!") MatchHelper.add_match_times(event, [match]) matches.append(match) MatchManipulator.createOrUpdate(matches) self.response.out.write(json.dumps({'Success': "Matches successfully updated"}))
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps( {"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps( {"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is hacky -fangeugene 2014-10-26 matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write( json.dumps({'Success': "Match videos successfully updated"}))
def test_createOrUpdate_no_auto_union(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") MatchManipulator.createOrUpdate(self.new_match, auto_union=False) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"), False)
def add_year(event_key): logging.info(event_key) matches = event_key.get().matches if matches: for match in matches: match.year = int(match.event.id()[:4]) match.dirty = True MatchManipulator.createOrUpdate(match)
def test_createOrUpdate(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") MatchManipulator.createOrUpdate(self.new_match) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"))
def _process_request(self, request, event_key): if request.body != event_key: self._errors = json.dumps({"Error": "To delete all matches for this event, the body of the request must be the event key."}) self.abort(400) keys_to_delete = Match.query(Match.event == ndb.Key(Event, event_key)).fetch(keys_only=True) MatchManipulator.delete_keys(keys_to_delete) self.response.out.write(json.dumps({'Success': "All matches for {} deleted".format(event_key)}))
def createCompleteQuals(self): comp_level = "qm" set_number = 1 complete = True matches = [ self.buildTestMatch(comp_level, set_number, match_number, complete) for match_number in range(1, 11) ] MatchManipulator.createOrUpdate(matches)
def test_createOrUpdate(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") self.assertEqual(Match.get_by_id("2012ct_qm1").score_breakdown['red']['auto'], 20) MatchManipulator.createOrUpdate(self.new_match) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"), True)
def predict_future_matches(cls, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if not next_match: # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time( played_matches, next_match, timezone) last = last_match # Only predict up to 20 matches in the future on the same day for i in range(0, min(20, len(unplayed_matches))): match = unplayed_matches[i] scheduled_time = cls.as_local(match.time, timezone) if scheduled_time.day != last_match_day and last_match_day is not None: # Stop, once we exhaust all unplayed matches on this day break # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: last_predicted = cls.as_local( last_match.actual_time if i == 0 else last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta( seconds=average_cycle_time) else: predicted = match.time # Never predict a match to happen more than 2 minutes ahead of schedule or in the past # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. now = datetime.datetime.now( timezone) + cls.MAX_IN_PAST if is_live else cls.as_local( cls.EPOCH, timezone) earliest_possible = cls.as_local( match.time + datetime.timedelta(minutes=-2), timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible), cls.as_utc(now)) last = match MatchManipulator.createOrUpdate(unplayed_matches)
def test_2017scmb_sequence(self): event = Event( id='2017scmb', event_short='scmb', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() event_code = 'scmb' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format(event_code) context = ndb.get_context() result = context.urlfetch('https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}'.format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 11) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 4) f_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'f').fetch() self.assertEqual(len(f_matches), 3) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['red']['totalPoints'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['red']['score'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['blue']['score'], 306) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['red']['totalPoints'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['blue']['totalPoints'], 306)
def _process_request(self, request, event_key): keys_to_delete = set() try: match_keys = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "'keys_to_delete' could not be parsed"}) self.abort(400) for match_key in match_keys: keys_to_delete.add(ndb.Key(Match, '{}_{}'.format(event_key, match_key))) MatchManipulator.delete_keys(keys_to_delete) self.response.out.write(json.dumps({'keys_deleted': [key.id().split('_')[1] for key in keys_to_delete]}))
def _process_request(self, request, event_key): keys_to_delete = set() try: match_keys = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "'keys_to_delete' could not be parsed"}) self.abort(400) for match_key in match_keys: keys_to_delete.add(ndb.Key(Match, '{}_{}'.format(event_key, match_key))) MatchManipulator.delete_keys(keys_to_delete) ret = json.dumps({"keys_deleted": [key.id().split('_')[1] for key in keys_to_delete]}) self.response.out.write(ret)
def post(self, match_key_id): self._require_admin() logging.warning("Deleting %s at the request of %s / %s" % (match_key_id, self.user_bundle.user.user_id(), self.user_bundle.user.email())) match = Match.get_by_id(match_key_id) event_key_id = match.event.id() MatchManipulator.delete(match) self.redirect("/admin/event/%s?deleted=%s" % (event_key_id, match_key_id))
def post(self, match_key_id): self._require_admin() logging.warning("Deleting %s at the request of %s / %s" % ( match_key_id, self.user_bundle.user.user_id(), self.user_bundle.user.email())) match = Match.get_by_id(match_key_id) event_key_id = match.event.id() MatchManipulator.delete(match) self.redirect("/admin/event/%s?deleted=%s" % (event_key_id, match_key_id))
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) if new_matches: for match in new_matches: if hasattr(match, 'dirty') and match.dirty: # Enqueue push notification try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event.key_name, method='GET') break template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp(int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp(int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp(int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) for alliance in ['red', 'blue']: data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop('team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) return MatchManipulator.createOrUpdate(match)
def accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) elif "internet_archive_videos" in suggestion.contents: match = self._merge_internet_archive_videos(match, suggestion.contents["internet_archive_videos"]) return MatchManipulator.createOrUpdate(match)
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp(int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp(int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp(int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) team_key_names = [] for alliance in ['red', 'blue']: team_key_names += data['alliances'][alliance]['team_keys'] data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop('team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) match.team_key_names = team_key_names youtube_videos = [] for video in data['videos']: if video['type'] == 'youtube': youtube_videos.append(video['key']) match.youtube_videos = youtube_videos return MatchManipulator.createOrUpdate(match)
def store_match(data): match = Match(id=data['key']) match.event = ndb.Key(Event, data['event_key']) match.year = int(data['key'][:4]) match.comp_level = data['comp_level'] match.set_number = data['set_number'] match.match_number = data['match_number'] if data.get('time'): match.time = datetime.datetime.fromtimestamp(int(data['time'])) if data.get('actual_time'): match.actual_time = datetime.datetime.fromtimestamp( int(data['actual_time'])) if data.get('predicted_time'): match.predicted_time = datetime.datetime.fromtimestamp( int(data['predicted_time'])) if data.get('post_result_time'): match.post_result_time = datetime.datetime.fromtimestamp( int(data['post_result_time'])) match.score_breakdown_json = json.dumps(data['score_breakdown']) for alliance in ['red', 'blue']: data['alliances'][alliance]['teams'] = data['alliances'][alliance].pop( 'team_keys') data['alliances'][alliance]['surrogates'] = data['alliances'][ alliance].pop('surrogate_team_keys') match.alliances_json = json.dumps(data['alliances']) return MatchManipulator.createOrUpdate(match)
def post(self, event_key_id): logging.warning("Deleting %s at the request of %s / %s" % ( event_key_id, users.get_current_user().user_id(), users.get_current_user().email())) event = Event.get_by_id(event_key_id) matches = Match.query(Match.event == event.key).fetch(5000) MatchManipulator.delete(matches) event_teams = EventTeam.query(EventTeam.event == event.key).fetch(5000) EventTeamManipulator.delete(event_teams) EventManipulator.delete(event) self.redirect("/admin/events?deleted=%s" % event_key_id)
def send_upcoming_match_notification(cls, match, event): users = PushHelper.get_users_subscribed_to_match(match, NotificationType.UPCOMING_MATCH) keys = PushHelper.get_client_ids_for_users(users) if match.set_number == 1 and match.match_number == 1: # First match of a new type, send level starting notifications start_users = PushHelper.get_users_subscribed_to_match(match, NotificationType.LEVEL_STARTING) start_keys = PushHelper.get_client_ids_for_users(start_users) level_start = CompLevelStartingNotification(match, event) level_start.send(start_keys) # Send upcoming match notification notification = UpcomingMatchNotification(match, event) notification.send(keys) match.push_sent = True # Make sure we don't send updates for this match again match.dirty = True from helpers.match_manipulator import MatchManipulator MatchManipulator.createOrUpdate(match)
def post(self, event_key_id): self._require_admin() logging.warning("Deleting %s at the request of %s / %s" % (event_key_id, self.user_bundle.user.user_id(), self.user_bundle.user.email())) event = Event.get_by_id(event_key_id) matches = Match.query(Match.event == event.key).fetch(5000) MatchManipulator.delete(matches) event_teams = EventTeam.query(EventTeam.event == event.key).fetch(5000) EventTeamManipulator.delete(event_teams) EventManipulator.delete(event) self.redirect("/admin/events?deleted=%s" % event_key_id)
def test_2017flwp(self): event = Event( id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 22)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 5) old_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(old_match, None) self.assertEqual(old_match.alliances['red']['score'], 255) self.assertEqual(old_match.alliances['blue']['score'], 255) self.assertEqual(old_match.score_breakdown['red']['totalPoints'], 255) self.assertEqual(old_match.score_breakdown['blue']['totalPoints'], 255) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 35)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) new_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(new_match, None) self.assertEqual(old_match.alliances, new_match.alliances) self.assertEqual(old_match.score_breakdown, new_match.score_breakdown) tiebreaker_match = Match.get_by_id('2017flwp_sf1m4') self.assertNotEqual(tiebreaker_match, None) self.assertEqual(tiebreaker_match.alliances['red']['score'], 165) self.assertEqual(tiebreaker_match.alliances['blue']['score'], 263) self.assertEqual(tiebreaker_match.score_breakdown['red']['totalPoints'], 165) self.assertEqual(tiebreaker_match.score_breakdown['blue']['totalPoints'], 263)
def predict_future_matches(cls, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if not next_match: # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time(played_matches, next_match, timezone) last = last_match # Only predict up to 20 matches in the future on the same day for i in range(0, min(20, len(unplayed_matches))): match = unplayed_matches[i] scheduled_time = cls.as_local(match.time, timezone) if scheduled_time.day != last_match_day and last_match_day is not None: # Stop, once we exhaust all unplayed matches on this day break # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: last_predicted = cls.as_local(last_match.actual_time if i == 0 else last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta(seconds=average_cycle_time) else: predicted = match.time # Never predict a match to happen more than 2 minutes ahead of schedule or in the past # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. now = datetime.datetime.now(timezone) + cls.MAX_IN_PAST if is_live else cls.as_local(cls.EPOCH, timezone) earliest_possible = cls.as_local(match.time + datetime.timedelta(minutes=-2), timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible), cls.as_utc(now)) last = match MatchManipulator.createOrUpdate(unplayed_matches)
def get(self, event_key): df = DatafeedFMSAPI() new_matches = MatchManipulator.createOrUpdate(df.getMatches(event_key)) template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def get(self, event_key): df = DatafeedFMSAPI('v2.0', save_response=True) updated_matches = [] for m1 in df.getMatches(event_key): m2 = m1.key.get() # Only update if teams and scores are equal if m2 and (m1.alliances['red']['teams'] == m2.alliances['red']['teams'] and m1.alliances['blue']['teams'] == m2.alliances['blue']['teams'] and m1.alliances['red']['score'] == m2.alliances['red']['score'] and m1.alliances['blue']['score'] == m2.alliances['blue']['score']): old_alliances = m2.alliances old_alliances['red']['dqs'] = m1.alliances['red']['dqs'] old_alliances['blue']['dqs'] = m1.alliances['blue']['dqs'] m2.alliances_json = json.dumps(old_alliances) updated_matches.append(m2) else: logging.warning("Match not equal: {}".format(m1.key.id())) MatchManipulator.createOrUpdate(updated_matches) self.response.out.write("DONE")
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is hacky -fangeugene 2014-10-26 matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put)
def get(self, event_key): df = DatafeedFMSAPI('v2.0') new_matches = MatchManipulator.createOrUpdate(df.getMatches(event_key)) template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def post(self): self._require_admin() event = Event.get_by_id(self.request.get("event_key_name")) matches_to_delete = list() match_keys_to_delete = list() if event is not None: for match in Match.query(Match.event == event.key): if match.key.id() != match.key_name: matches_to_delete.append(match) match_keys_to_delete.append(match.key_name) MatchManipulator.delete(matches_to_delete) self.template_values.update({ "match_keys_deleted": match_keys_to_delete, "tried_delete": True }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/matches_cleanup.html') self.response.out.write(template.render(path, self.template_values))
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write(json.dumps({'Success': "Match videos successfully updated"}))
def get(self, event_key): df = DatafeedFMSAPI('v2.0') new_matches = MatchManipulator.createOrUpdate(df.getMatches(event_key)) template_values = { 'matches': new_matches, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))