def test_accept_new_key(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m2') response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) # Make sure the video gets associated match = Match.get_by_id(self.match2.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertTrue('H-54KMwMKY0' in match.youtube_videos) # Make sure we don't add it to the first match match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos)
def testAcceptNewKey(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m2') response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) # Make sure the video gets associated match = Match.get_by_id(self.match2.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertTrue('H-54KMwMKY0' in match.youtube_videos) # Make sure we don't add it to the first match match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos)
def test_createOrUpdate_no_auto_union(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") MatchManipulator.createOrUpdate(self.new_match, auto_union=False) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"), False)
def test_createOrUpdate(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") MatchManipulator.createOrUpdate(self.new_match) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"))
def test_createOrUpdate(self): MatchManipulator.createOrUpdate(self.old_match) self.assertOldMatch(Match.get_by_id("2012ct_qm1")) self.assertEqual(Match.get_by_id("2012ct_qm1").alliances_json, """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""") self.assertEqual(Match.get_by_id("2012ct_qm1").score_breakdown['red']['auto'], 20) MatchManipulator.createOrUpdate(self.new_match) self.assertMergedMatch(Match.get_by_id("2012ct_qm1"), True)
def test_match_videos_add(self): self.video_auth.put() match1 = Match( id="2014casj_qm1", alliances_json= """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[ u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073' ], youtube_videos=["abcdef"]) match1.put() match2 = Match( id="2014casj_sf1m1", alliances_json= """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="sf", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[ u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073' ], ) match2.put() match_videos = {'qm1': 'aFZy8iibMD0', 'sf1m1': 'RpSgUrsghv4'} request_body = json.dumps(match_videos) request_path = '/api/trusted/v1/event/2014casj/match_videos/add' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_5', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(set(Match.get_by_id('2014casj_qm1').youtube_videos), {'abcdef', 'aFZy8iibMD0'}) self.assertEqual(set(Match.get_by_id('2014casj_sf1m1').youtube_videos), {'RpSgUrsghv4'})
def test_2017flwp(self): event = Event(id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York') event.put() MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 21, 22)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 5) old_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(old_match, None) self.assertEqual(old_match.alliances['red']['score'], 255) self.assertEqual(old_match.alliances['blue']['score'], 255) self.assertEqual(old_match.score_breakdown['red']['totalPoints'], 255) self.assertEqual(old_match.score_breakdown['blue']['totalPoints'], 255) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 21, 35)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) new_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(new_match, None) self.assertEqual(old_match.alliances, new_match.alliances) self.assertEqual(old_match.score_breakdown, new_match.score_breakdown) tiebreaker_match = Match.get_by_id('2017flwp_sf1m4') self.assertNotEqual(tiebreaker_match, None) self.assertEqual(tiebreaker_match.alliances['red']['score'], 165) self.assertEqual(tiebreaker_match.alliances['blue']['score'], 263) self.assertEqual( tiebreaker_match.score_breakdown['red']['totalPoints'], 165) self.assertEqual( tiebreaker_match.score_breakdown['blue']['totalPoints'], 263)
def createMatchVideoYouTubeSuggestion(cls, author_account_key, youtube_id, match_key): """Create a YouTube Match Video. Returns status (success, suggestion_exists, video_exists, bad_url)""" if youtube_id: match = Match.get_by_id(match_key) if not match: return 'bad_match' if youtube_id not in match.youtube_videos: year = match_key[:4] suggestion_id = Suggestion.render_media_key_name(year, 'match', match_key, 'youtube', youtube_id) suggestion = Suggestion.get_by_id(suggestion_id) if not suggestion or suggestion.review_state != Suggestion.REVIEW_PENDING: suggestion = Suggestion( id=suggestion_id, author=author_account_key, target_key=match_key, target_model="match", ) suggestion.contents = {"youtube_videos": [youtube_id]} suggestion.put() return 'success' else: return 'suggestion_exists' else: return 'video_exists' else: return 'bad_url'
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps( {"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps( {"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is hacky -fangeugene 2014-10-26 matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write( json.dumps({'Success': "Match videos successfully updated"}))
def _process_request(self, request, event_key): to_put = [] for zebra_data in JSONZebraMotionWorksParser.parse(request.body): match_key = zebra_data['key'] # Check that match_key matches event_key if match_key.split('_')[0] != event_key: self._errors = json.dumps({"Error": "Match key {} does not match Event key {}!".format(match_key, event_key)}) self.abort(400) # Check that match exists match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) # Check that teams in Zebra data and teams in Match are the same for color in ['red', 'blue']: match_teams = match.alliances[color]['teams'] zebra_teams = [team['team_key'] for team in zebra_data['alliances'][color]] if match_teams != zebra_teams: self._errors = json.dumps({"Error": "Match {} teams are not valid!".format(match_key)}) self.abort(400) to_put.append(ZebraMotionWorks(id=match_key, event=ndb.Key(Event, event_key), data=zebra_data)) ndb.put_multi(to_put)
def post(self, match_key): self._require_login() self._require_registration() current_user_id = self.user_bundle.account.key.id() match = Match.get_by_id(match_key) if self.request.get('favorite'): favorite = Favorite( parent=ndb.Key(Account, current_user_id), user_id=current_user_id, model_type=ModelType.MATCH, model_key=match_key ) MyTBAHelper.add_favorite(favorite) else: MyTBAHelper.remove_favorite(current_user_id, match_key, ModelType.MATCH) subs = self.request.get_all('notification_types') if subs: subscription = Subscription( parent=ndb.Key(Account, current_user_id), user_id=current_user_id, model_type=ModelType.MATCH, model_key=match_key, notification_types=[int(s) for s in subs] ) MyTBAHelper.add_subscription(subscription) else: MyTBAHelper.remove_subscription(current_user_id, match_key, ModelType.MATCH) self.redirect('/account/mytba?status=match_updated#my-matches')
def getMatchDetails(cls, match_key): """ Returns match details """ memcache_key = "api_match_details_%s" % match_key match_dict = memcache.get(memcache_key) if match_dict is None: match = Match.get_by_id(match_key) if match is None: return None match_dict = {} match_dict["key"] = match.key_name match_dict["event"] = match.event.id() match_dict["competition_level"] = match.name match_dict["set_number"] = match.set_number match_dict["match_number"] = match.match_number match_dict["team_keys"] = match.team_key_names match_dict["alliances"] = json.loads(match.alliances_json) match_dict["videos"] = match.videos match_dict["time_string"] = match.time_string if match.time is not None: match_dict["time"] = match.time.strftime("%s") else: match_dict["time"] = None if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, match_dict, (2 * (60 * 60))) return match_dict
def createMatchVideoYouTubeSuggestion(cls, author_account_key, youtube_id, match_key): """Create a YouTube Match Video. Returns status (success, suggestion_exists, video_exists, bad_url)""" if youtube_id: match = Match.get_by_id(match_key) if not match: return 'bad_match' if youtube_id not in match.youtube_videos: year = match_key[:4] suggestion_id = Suggestion.render_media_key_name( year, 'match', match_key, 'youtube', youtube_id) suggestion = Suggestion.get_by_id(suggestion_id) if not suggestion or suggestion.review_state != Suggestion.REVIEW_PENDING: suggestion = Suggestion( id=suggestion_id, author=author_account_key, target_key=match_key, target_model="match", ) suggestion.contents = {"youtube_videos": [youtube_id]} suggestion.put() return 'success' else: return 'suggestion_exists' else: return 'video_exists' else: return 'bad_url'
def get(self, match_key): self._require_registration() match = Match.get_by_id(match_key) if not match: self.abort(404) user = self.user_bundle.account.key favorite = Favorite.query(Favorite.model_key == match_key, Favorite.model_type == ModelType.MATCH, ancestor=user).get() subscription = Subscription.query( Favorite.model_key == match_key, Favorite.model_type == ModelType.MATCH, ancestor=user).get() if not favorite and not subscription: # New entry; default to being a favorite is_favorite = True else: is_favorite = favorite is not None enabled_notifications = [ (en, NotificationType.render_names[en]) for en in NotificationType.enabled_match_notifications ] self.template_values['match'] = match self.template_values['is_favorite'] = is_favorite self.template_values['subscription'] = subscription self.template_values['enabled_notifications'] = enabled_notifications self.response.out.write( jinja2_engine.render('mytba_match.html', self.template_values))
def createMatchVideoInternetArchiveSuggestion(cls, author_account_key, archive_id, match_key): if archive_id: match = Match.get_by_id(match_key) if not match: return 'bad_match' if archive_id not in match.internet_archive_videos: year = match_key[:4] suggestion_id = Suggestion.render_media_key_name( year, 'match', match_key, 'internet_archive', archive_id) suggestion = Suggestion.get_by_id(suggestion_id) if not suggestion or suggestion.review_state != Suggestion.REVIEW_PENDING: suggestion = Suggestion( id=suggestion_id, author=author_account_key, target_key=match_key, target_model="match", ) suggestion.contents = { "internet_archive_videos": [archive_id] } suggestion.put() return 'success' else: return 'suggestion_exists' else: return 'video_exists' else: return 'bad_url'
def create_target_model(self, suggestion): target_key = self.request.get('key-{}'.format(suggestion.key.id()), suggestion.target_key) match = Match.get_by_id(target_key) if not match: return None return MatchSuggestionAccepter.accept_suggestion(match, suggestion)
def post(self, match_key): self._require_registration() current_user_id = self.user_bundle.account.key.id() match = Match.get_by_id(match_key) if self.request.get('favorite'): favorite = Favorite(parent=ndb.Key(Account, current_user_id), user_id=current_user_id, model_type=ModelType.MATCH, model_key=match_key) MyTBAHelper.add_favorite(favorite) else: MyTBAHelper.remove_favorite(current_user_id, match_key, ModelType.MATCH) subs = self.request.get_all('notification_types') if subs: subscription = Subscription( parent=ndb.Key(Account, current_user_id), user_id=current_user_id, model_type=ModelType.MATCH, model_key=match_key, notification_types=[int(s) for s in subs]) MyTBAHelper.add_subscription(subscription) else: MyTBAHelper.remove_subscription(current_user_id, match_key, ModelType.MATCH) self.redirect('/account/mytba?status=match_updated#my-matches')
def get(self, match_key): self._require_login() self._require_registration() match = Match.get_by_id(match_key) if not match: self.abort(404) user = self.user_bundle.account.key favorite = Favorite.query(Favorite.model_key==match_key, Favorite.model_type==ModelType.MATCH, ancestor=user).get() subscription = Subscription.query(Favorite.model_key==match_key, Favorite.model_type==ModelType.MATCH, ancestor=user).get() if not favorite and not subscription: # New entry; default to being a favorite is_favorite = True else: is_favorite = favorite is not None enabled_notifications = [(en, NotificationType.render_names[en]) for en in NotificationType.enabled_match_notifications] self.template_values['match'] = match self.template_values['is_favorite'] = is_favorite self.template_values['subscription'] = subscription self.template_values['enabled_notifications'] = enabled_notifications self.response.out.write(jinja2_engine.render('mytba_match.html', self.template_values))
def get(self, match_key): match = Match.get_by_id(match_key) template_values = { "match": match } path = os.path.join(os.path.dirname(__file__), '../../templates/admin/match_details.html') self.response.out.write(template.render(path, template_values))
def get(self, match_key): self._require_admin() match = Match.get_by_id(match_key) self.template_values.update({"match": match}) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/match_details.html') self.response.out.write(template.render(path, self.template_values))
def get(self, match_key): self._require_admin() match = Match.get_by_id(match_key) self.template_values.update({ "match": match }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/match_details.html') self.response.out.write(template.render(path, self.template_values))
def test_2017flwp(self): event = Event( id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 22)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 5) old_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(old_match, None) self.assertEqual(old_match.alliances['red']['score'], 255) self.assertEqual(old_match.alliances['blue']['score'], 255) self.assertEqual(old_match.score_breakdown['red']['totalPoints'], 255) self.assertEqual(old_match.score_breakdown['blue']['totalPoints'], 255) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 21, 35)).getMatches('2017flwp')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) new_match = Match.get_by_id('2017flwp_sf1m3') self.assertNotEqual(new_match, None) self.assertEqual(old_match.alliances, new_match.alliances) self.assertEqual(old_match.score_breakdown, new_match.score_breakdown) tiebreaker_match = Match.get_by_id('2017flwp_sf1m4') self.assertNotEqual(tiebreaker_match, None) self.assertEqual(tiebreaker_match.alliances['red']['score'], 165) self.assertEqual(tiebreaker_match.alliances['blue']['score'], 263) self.assertEqual(tiebreaker_match.score_breakdown['red']['totalPoints'], 165) self.assertEqual(tiebreaker_match.score_breakdown['blue']['totalPoints'], 263)
def get(self, type): self._require_registration('/account/') user_id = self.user_bundle.account.key.id() logging.info("Sending for {}".format(type)) try: type = int(type) except ValueError: # Not passed a valid int, just stop here logging.info("Invalid number passed") self.redirect('/apidocs/webhooks') return event = Event.get_by_id('2014necmp') match = Match.get_by_id('2014necmp_f1m1') district = District.get_by_id('2014ne') if type == NotificationType.UPCOMING_MATCH: notification = UpcomingMatchNotification(match, event) elif type == NotificationType.MATCH_SCORE: notification = MatchScoreNotification(match) elif type == NotificationType.LEVEL_STARTING: notification = CompLevelStartingNotification(match, event) elif type == NotificationType.ALLIANCE_SELECTION: notification = AllianceSelectionNotification(event) elif type == NotificationType.AWARDS: notification = AwardsUpdatedNotification(event) elif type == NotificationType.MEDIA_POSTED: # Not implemented yet pass elif type == NotificationType.DISTRICT_POINTS_UPDATED: notification = DistrictPointsUpdatedNotification(district) elif type == NotificationType.SCHEDULE_UPDATED: notification = ScheduleUpdatedNotification(event, match) elif type == NotificationType.FINAL_RESULTS: # Not implemented yet pass elif type == NotificationType.MATCH_VIDEO: notification = MatchVideoNotification(match) elif type == NotificationType.EVENT_MATCH_VIDEO: notification = EventMatchVideoNotification(match) else: # Not passed a valid int, return self.redirect('/apidocs/webhooks') return keys = PushHelper.get_client_ids_for_users([user_id]) logging.info("Keys: {}".format(keys)) if notification: # This page should not push notifications to the firebase queue # Nor should its notifications be tracked in analytics notification.send(keys, push_firebase=False, track_call=False) self.redirect('/apidocs/webhooks')
def test_match_videos_add(self): self.video_auth.put() match1 = Match( id="2014casj_qm1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], youtube_videos=["abcdef"] ) match1.put() match2 = Match( id="2014casj_sf1m1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="sf", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], ) match2.put() match_videos = {'qm1': 'aFZy8iibMD0', 'sf1m1': 'RpSgUrsghv4'} request_body = json.dumps(match_videos) request_path = '/api/trusted/v1/event/2014casj/match_videos/add' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_5', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(set(Match.get_by_id('2014casj_qm1').youtube_videos), {'abcdef', 'aFZy8iibMD0'}) self.assertEqual(set(Match.get_by_id('2014casj_sf1m1').youtube_videos), {'RpSgUrsghv4'})
def post(self, match_key_id): self._require_admin() logging.warning("Deleting %s at the request of %s / %s" % (match_key_id, self.user_bundle.user.user_id(), self.user_bundle.user.email())) match = Match.get_by_id(match_key_id) event_key_id = match.event.id() MatchManipulator.delete(match) self.redirect("/admin/event/%s?deleted=%s" % (event_key_id, match_key_id))
def post(self, match_key_id): self._require_admin() logging.warning("Deleting %s at the request of %s / %s" % ( match_key_id, self.user_bundle.user.user_id(), self.user_bundle.user.email())) match = Match.get_by_id(match_key_id) event_key_id = match.event.id() MatchManipulator.delete(match) self.redirect("/admin/event/%s?deleted=%s" % (event_key_id, match_key_id))
def testRejectSuggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('reject_keys[]', suggestion_id) response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_REJECTED) # Make sure the video gets associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertFalse(match.youtube_videos)
def test_accept_suggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form['accept_reject-{}'.format(suggestion_id)] = 'accept::{}'.format( suggestion_id) response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) # Make sure the video gets associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertTrue('H-54KMwMKY0' in match.youtube_videos)
def testAcceptBadKey(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m3') # This match doesn't exist response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we don't mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_PENDING) # Make sure the video doesn't get associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos)
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is hacky -fangeugene 2014-10-26 matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put)
def test_accept_bad_key(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m3') # This match doesn't exist response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we don't mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_PENDING) # Make sure the video doesn't get associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos)
def _process_request(self, request, event_key): try: match_videos = json.loads(request.body) except Exception: self._errors = json.dumps({"Error": "Invalid JSON. Please check input."}) self.abort(400) matches_to_put = [] for partial_match_key, youtube_id in match_videos.items(): match_key = '{}_{}'.format(event_key, partial_match_key) match = Match.get_by_id(match_key) if match is None: self._errors = json.dumps({"Error": "Match {} does not exist!".format(match_key)}) self.abort(400) if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) matches_to_put.append(match) MatchManipulator.createOrUpdate(matches_to_put) self.response.out.write(json.dumps({'Success': "Match videos successfully updated"}))
def getMatchDetails(self, match_key): """ Returns match details """ memcache_key = "api_match_details_%s" % match_key match_dict = memcache.get(memcache_key) if match_dict is None: match = Match.get_by_id(match_key) match_dict = {} match_dict["key"] = match.key_name match_dict["event"] = match.event.id() match_dict["competition_level"] = match.name match_dict["set_number"] = match.set_number match_dict["match_number"] = match.match_number match_dict["team_keys"] = match.team_key_names match_dict["alliances"] = json.loads(match.alliances_json) if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, match_dict, (2 * (60 * 60)) ) return match_dict
def post(self): trusted_api_secret = Sitevar.get_by_id("trusted_api.secret") if trusted_api_secret is None: raise Exception("Missing sitevar: trusted_api.secret. Can't accept YouTube Videos.") secret = self.request.get('secret', None) if secret is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No secret given"})) return if str(trusted_api_secret.values_json) != str(secret): self.response.set_status(400) self.response.out.write(json.dumps({"400": "Incorrect secret"})) return match_key = self.request.get('match_key', None) if match_key is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No match_key given"})) return youtube_id = self.request.get('youtube_id', None) if youtube_id is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "No youtube_id given"})) return match = Match.get_by_id(match_key) if match is None: self.response.set_status(400) self.response.out.write(json.dumps({"400": "Match {} does not exist!".format(match_key)})) return if youtube_id not in match.youtube_videos: match.youtube_videos.append(youtube_id) match.dirty = True # This is so hacky. -fangeugene 2014-03-06 MatchManipulator.createOrUpdate(match)
def test_2017flwp_sequence(self): event = Event( id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() event_code = 'flwp' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format(event_code) context = ndb.get_context() result = context.urlfetch('https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}'.format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 7) self.assertEqual(Match.get_by_id('2017flwp_sf1m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017flwp_sf1m1').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017flwp_sf1m1').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m2').alliances['red']['score'], 165) self.assertEqual(Match.get_by_id('2017flwp_sf1m2').alliances['blue']['score'], 258) self.assertEqual(Match.get_by_id('2017flwp_sf1m2').score_breakdown['red']['totalPoints'], 165) self.assertEqual(Match.get_by_id('2017flwp_sf1m2').score_breakdown['blue']['totalPoints'], 258) self.assertEqual(Match.get_by_id('2017flwp_sf1m3').alliances['red']['score'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m3').score_breakdown['red']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m3').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m4').alliances['red']['score'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m4').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m4').score_breakdown['red']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m4').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017flwp_sf1m5').alliances['red']['score'], 165) self.assertEqual(Match.get_by_id('2017flwp_sf1m5').alliances['blue']['score'], 263) self.assertEqual(Match.get_by_id('2017flwp_sf1m5').score_breakdown['red']['totalPoints'], 165) self.assertEqual(Match.get_by_id('2017flwp_sf1m5').score_breakdown['blue']['totalPoints'], 263)
def test_2017ncwin(self): event = Event( id='2017ncwin', event_short='ncwin', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 05, 21, 2)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red']['totalPoints'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue']['totalPoints'], 150) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 05, 21, 30)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red']['totalPoints'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue']['totalPoints'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red']['totalPoints'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue']['totalPoints'], 205) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 05, 21, 35)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red']['totalPoints'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue']['totalPoints'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red']['totalPoints'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue']['totalPoints'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').alliances['red']['score'], 145) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').alliances['blue']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').score_breakdown['red']['totalPoints'], 145) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').score_breakdown['blue']['totalPoints'], 265) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 05, 21, 51)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches, event) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 7) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red']['totalPoints'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue']['totalPoints'], 150) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red']['totalPoints'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue']['totalPoints'], 205) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').alliances['red']['score'], 145) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').alliances['blue']['score'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').score_breakdown['red']['totalPoints'], 145) self.assertEqual(Match.get_by_id('2017ncwin_sf2m3').score_breakdown['blue']['totalPoints'], 265) self.assertEqual(Match.get_by_id('2017ncwin_sf2m4').alliances['red']['score'], 180) self.assertEqual(Match.get_by_id('2017ncwin_sf2m4').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017ncwin_sf2m4').score_breakdown['red']['totalPoints'], 180) self.assertEqual(Match.get_by_id('2017ncwin_sf2m4').score_breakdown['blue']['totalPoints'], 305)
def _set_match(self, match_key): self.match = Match.get_by_id(match_key) if self.match is None: self._errors = json.dumps( {"404": "%s match not found" % self.match_key}) self.abort(404)
def parse(self, response): matches = response['Schedule'] event_key = '{}{}'.format(self.year, self.event_short) event = Event.get_by_id(event_key) if event.timezone_id: event_tz = pytz.timezone(event.timezone_id) else: logging.warning( "Event {} has no timezone! Match times may be wrong.".format( event_key)) event_tz = None parsed_matches = [] remapped_matches = {} # If a key changes due to a tiebreaker for match in matches: if 'tournamentLevel' in match: # 2016+ level = match['tournamentLevel'] else: # 2015 level = match['level'] comp_level = PlayoffType.get_comp_level(event.playoff_type, level, match['matchNumber']) set_number, match_number = PlayoffType.get_set_match_number( event.playoff_type, comp_level, match['matchNumber']) red_teams = [] blue_teams = [] red_surrogates = [] blue_surrogates = [] team_key_names = [] null_team = False sorted_teams = sorted( match['Teams'], key=lambda team: team['station'] ) # Sort by station to ensure correct ordering. Kind of hacky. for team in sorted_teams: if team['teamNumber'] is None: null_team = True team_key = 'frc{}'.format(team['teamNumber']) team_key_names.append(team_key) if 'Red' in team['station']: red_teams.append(team_key) if team['surrogate']: red_surrogates.append(team_key) elif 'Blue' in team['station']: blue_teams.append(team_key) if team['surrogate']: blue_surrogates.append(team_key) if null_team and match['scoreRedFinal'] is None and match[ 'scoreBlueFinal'] is None: continue alliances = { 'red': { 'teams': red_teams, 'surrogates': red_surrogates, 'score': match['scoreRedFinal'] }, 'blue': { 'teams': blue_teams, 'surrogates': blue_surrogates, 'score': match['scoreBlueFinal'] }, } if not match[ 'startTime']: # no startTime means it's an unneeded rubber match continue time = datetime.datetime.strptime(match['startTime'].split('.')[0], TIME_PATTERN) if event_tz is not None: time = time - event_tz.utcoffset(time) actual_time_raw = match[ 'actualStartTime'] if 'actualStartTime' in match else None actual_time = None if actual_time_raw is not None: actual_time = datetime.datetime.strptime( actual_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: actual_time = actual_time - event_tz.utcoffset(actual_time) post_result_time_raw = match.get('postResultTime') post_result_time = None if post_result_time_raw is not None: post_result_time = datetime.datetime.strptime( post_result_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: post_result_time = post_result_time - event_tz.utcoffset( post_result_time) key_name = Match.renderKeyName(event_key, comp_level, set_number, match_number) # Check for tiebreaker matches existing_match = Match.get_by_id(key_name) # Follow chain of existing matches while existing_match is not None and existing_match.tiebreak_match_key is not None: logging.info("Following Match {} to {}".format( existing_match.key.id(), existing_match.tiebreak_match_key.id())) existing_match = existing_match.tiebreak_match_key.get() # Check if last existing match needs to be tiebroken if existing_match and existing_match.comp_level != 'qm' and \ existing_match.has_been_played and \ existing_match.winning_alliance == '' and \ existing_match.actual_time != actual_time and \ not self.is_blank_match(existing_match): logging.warning("Match {} is tied!".format( existing_match.key.id())) # TODO: Only query within set if set_number ever gets indexed match_count = 0 for match_key in Match.query( Match.event == event.key, Match.comp_level == comp_level).fetch(keys_only=True): _, match_key = match_key.id().split('_') if match_key.startswith('{}{}'.format( comp_level, set_number)): match_count += 1 # Sanity check: Tiebreakers must be played after at least 3 matches, or 6 for finals if match_count < 3 or (match_count < 6 and comp_level == 'f'): logging.warning( "Match supposedly tied, but existing count is {}! Skipping match." .format(match_count)) continue match_number = match_count + 1 new_key_name = Match.renderKeyName(event_key, comp_level, set_number, match_number) remapped_matches[key_name] = new_key_name key_name = new_key_name # Point existing match to new tiebreaker match existing_match.tiebreak_match_key = ndb.Key(Match, key_name) parsed_matches.append(existing_match) logging.warning("Creating new match: {}".format(key_name)) elif existing_match: remapped_matches[key_name] = existing_match.key.id() key_name = existing_match.key.id() match_number = existing_match.match_number parsed_matches.append( Match( id=key_name, event=event.key, year=event.year, set_number=set_number, match_number=match_number, comp_level=comp_level, team_key_names=team_key_names, time=time, actual_time=actual_time, post_result_time=post_result_time, alliances_json=json.dumps(alliances), )) if self.year == 2015: # Fix null teams in elims (due to FMS API failure, some info not complete) # Should only happen for sf and f matches organized_matches = MatchHelper.organizeMatches(parsed_matches) for level in ['sf', 'f']: playoff_advancement = MatchHelper.generatePlayoffAdvancement2015( organized_matches) if playoff_advancement[LAST_LEVEL[level]] != []: for match in organized_matches[level]: if 'frcNone' in match.team_key_names: if level == 'sf': red_seed, blue_seed = QF_SF_MAP[ match.match_number] else: red_seed = 0 blue_seed = 1 red_teams = [ 'frc{}'.format(t) for t in playoff_advancement[ LAST_LEVEL[level]][red_seed][0] ] blue_teams = [ 'frc{}'.format(t) for t in playoff_advancement[ LAST_LEVEL[level]][blue_seed][0] ] alliances = match.alliances alliances['red']['teams'] = red_teams alliances['blue']['teams'] = blue_teams match.alliances_json = json.dumps(alliances) match.team_key_names = red_teams + blue_teams fixed_matches = [] for key, matches in organized_matches.items(): if key != 'num': for match in matches: if 'frcNone' not in match.team_key_names: fixed_matches.append(match) parsed_matches = fixed_matches return parsed_matches, remapped_matches
def post(self, type): self._require_registration('/account/') event_key = self.request.get('event_key') match_key = self.request.get('match_key') district_key = self.request.get('district_key') user_id = self.user_bundle.account.key.id() logging.info("Sending for {}".format(type)) try: type = int(type) except ValueError: # Not passed a valid int, just stop here logging.info("Invalid number passed") return event = None if type != NotificationType.DISTRICT_POINTS_UPDATED: if event_key == "": logging.info("No event key") self.response.out.write("No event key specified!") return event = Event.get_by_id(event_key) if event is None: logging.info("Invalid event key passed") self.response.out.write("Invalid event key!") return if type == NotificationType.UPCOMING_MATCH: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = UpcomingMatchNotification(match, event) elif type == NotificationType.MATCH_SCORE: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = MatchScoreNotification(match) elif type == NotificationType.LEVEL_STARTING: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = CompLevelStartingNotification(match, event) elif type == NotificationType.ALLIANCE_SELECTION: notification = AllianceSelectionNotification(event) elif type == NotificationType.AWARDS: notification = AwardsUpdatedNotification(event) elif type == NotificationType.MEDIA_POSTED: # Not implemented yet pass elif type == NotificationType.DISTRICT_POINTS_UPDATED: if district_key == "": logging.info("No district key") self.response.out.write("No district key specified!") return district = District.get_by_id(district_key) if district is None: logging.info("Invalid district key passed") self.response.out.write("Invalid district key!") return notification = DistrictPointsUpdatedNotification(district) elif type == NotificationType.SCHEDULE_UPDATED: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = ScheduleUpdatedNotification(event, match) elif type == NotificationType.FINAL_RESULTS: # Not implemented yet pass elif type == NotificationType.MATCH_VIDEO: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = MatchVideoNotification(match) elif type == NotificationType.EVENT_MATCH_VIDEO: if match_key == "": logging.info("No match key") self.response.out.write("No match key specified!") return match = Match.get_by_id(match_key) if match is None: logging.info("Invalid match key passed") self.response.out.write("Invalid match key!") return notification = EventMatchVideoNotification(match) else: # Not passed a valid int, return return keys = PushHelper.get_client_ids_for_users([user_id]) logging.info("Keys: {}".format(keys)) if notification: # This page should not push notifications to the firebase queue # Nor should its notifications be tracked in analytics notification.send(keys, push_firebase=False, track_call=False) self.response.out.write("ok")
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format(bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format(bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get('current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime(current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get('current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime(current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get('blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get('blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id(current_match_key) if current_match_key else None last_match = Match.get_by_id(last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format([le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format([le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info("[BLUEZONE] all upcoming matches sorted by predicted time: {}".format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format([um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info("[BLUEZONE] potential_matches sorted by predicted time: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format([pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info("[BLUEZONE] potential_matches sorted by hotness: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format([pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info("[BLUEZONE] Current match played? {}, now = {}, cutoff = {}".format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format(current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format(current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format(current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format(match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format(match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format(match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format(match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info("[BLUEZONE] Event {} is blacklisted, skipping...".format(match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format(match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len(potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info("[BLUEZONE] Adding match to blacklist: {}".format(match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format(match.key.id()) logging.info("[BLUEZONE] scheduled time: {}, now: {}".format(current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format(current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert(slack_url, "Blacklisting match {}. Predicted time: {}, now: {}".format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info("[BLUEZONE] Continuing to use match: {}".format(match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format(match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info("[BLUEZONE] Found a good new match: {}".format(match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format(match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format(match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format(match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format([m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format([m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter(lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps([real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format(bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format(bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert(slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}.".format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set([f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
def step(self): event = Event.get_by_id('2016nytr') if self._step == 0: # Qual match schedule added for match in copy.deepcopy(self._all_matches['qm']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 1: # After each qual match MatchManipulator.createOrUpdate(self._played_matches['qm'][self._substep]) if self._substep < len(self._played_matches['qm']) - 1: self._substep += 1 else: self._step += 1 self._substep = 0 EventDetailsManipulator.createOrUpdate(EventDetails(id='2016nytr')) elif self._step == 2: # After alliance selections EventDetailsManipulator.createOrUpdate(EventDetails( id='2016nytr', alliance_selections=self._alliance_selections_without_backup )) self._step += 1 elif self._step == 3: # QF schedule added for match in copy.deepcopy(self._all_matches['qf']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 4: # After each QF match new_match = MatchHelper.play_order_sort_matches(self._played_matches['qf'])[self._substep] MatchManipulator.createOrUpdate(new_match) if not self._batch_advance: win_counts = { 'red': 0, 'blue': 0, } for i in xrange(new_match.match_number): win_counts[Match.get_by_id( Match.renderKeyName( new_match.event.id(), new_match.comp_level, new_match.set_number, i+1)).winning_alliance] += 1 for alliance, wins in win_counts.items(): if wins == 2: s = new_match.set_number if s in {1, 2}: self._advancement_alliances['sf1']['red' if s == 1 else 'blue'] = new_match.alliances[alliance]['teams'] elif s in {3, 4}: self._advancement_alliances['sf2']['red' if s == 3 else 'blue'] = new_match.alliances[alliance]['teams'] else: raise Exception("Invalid set number: {}".format(s)) for match_set, alliances in self._advancement_alliances.items(): if match_set.startswith('sf'): for i in xrange(3): for match in copy.deepcopy(self._all_matches['sf']): key = '2016nytr_{}m{}'.format(match_set, i+1) if match.key.id() == key: for color in ['red', 'blue']: match.alliances[color]['score'] = -1 match.alliances[color]['teams'] = alliances.get(color, []) match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) if self._substep < len(self._played_matches['qf']) - 1: self._substep += 1 else: self._step += 1 if self._batch_advance else 2 self._substep = 0 elif self._step == 5: # SF schedule added if self._batch_advance: for match in copy.deepcopy(self._all_matches['sf']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 6: # After each SF match new_match = MatchHelper.play_order_sort_matches(self._played_matches['sf'])[self._substep] MatchManipulator.createOrUpdate(new_match) if not self._batch_advance: win_counts = { 'red': 0, 'blue': 0, } for i in xrange(new_match.match_number): win_counts[Match.get_by_id( Match.renderKeyName( new_match.event.id(), new_match.comp_level, new_match.set_number, i+1)).winning_alliance] += 1 for alliance, wins in win_counts.items(): if wins == 2: self._advancement_alliances['f1']['red' if new_match.set_number == 1 else 'blue'] = new_match.alliances[alliance]['teams'] for match_set, alliances in self._advancement_alliances.items(): if match_set.startswith('f'): for i in xrange(3): for match in copy.deepcopy(self._all_matches['f']): key = '2016nytr_{}m{}'.format(match_set, i+1) if match.key.id() == key: for color in ['red', 'blue']: match.alliances[color]['score'] = -1 match.alliances[color]['teams'] = alliances.get(color, []) match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) # Backup robot introduced if self._substep == 3: EventDetailsManipulator.createOrUpdate(EventDetails( id='2016nytr', alliance_selections=self._event_details.alliance_selections )) if self._substep < len(self._played_matches['sf']) - 1: self._substep += 1 else: self._step += 1 if self._batch_advance else 2 self._substep = 0 elif self._step == 7: # F schedule added if self._batch_advance: for match in copy.deepcopy(self._all_matches['f']): for alliance in ['red', 'blue']: match.alliances[alliance]['score'] = -1 match.alliances_json = json.dumps(match.alliances) match.score_breakdown_json = None match.actual_time = None MatchManipulator.createOrUpdate(match) self._step += 1 elif self._step == 8: # After each F match MatchManipulator.createOrUpdate( MatchHelper.play_order_sort_matches( self._played_matches['f'])[self._substep]) if self._substep < len(self._played_matches['f']) - 1: self._substep += 1 else: self._step += 1 self._substep = 0 ndb.get_context().clear_cache() # Re fetch event matches event = Event.get_by_id('2016nytr') MatchHelper.deleteInvalidMatches(event.matches, event) ndb.get_context().clear_cache() self._update_rankings()
def _set_match(self, match_key): self.match = Match.get_by_id(match_key) if self.match is None: self._errors = json.dumps({"404": "%s match not found" % self.match_key}) self.abort(404)
def test_accept_suggestions(self): MatchSuggestionAccepter.accept_suggestion(self.match, self.suggestion) match = Match.get_by_id("2012ct_qm1") self.assertTrue("abcdef" in match.youtube_videos) self.assertTrue("123456" in match.youtube_videos)
def parse(self, response): matches = response['Schedule'] event_key = '{}{}'.format(self.year, self.event_short) event = Event.get_by_id(event_key) if event.timezone_id: event_tz = pytz.timezone(event.timezone_id) else: logging.warning("Event {} has no timezone! Match times may be wrong.".format(event_key)) event_tz = None parsed_matches = [] remapped_matches = {} # If a key changes due to a tiebreaker is_octofinals = len(matches) > 0 and 'Octofinal' in matches[0]['description'] for match in matches: if 'tournamentLevel' in match: # 2016+ level = match['tournamentLevel'] else: # 2015 level = match['level'] comp_level = get_comp_level(self.year, level, match['matchNumber'], is_octofinals) set_number, match_number = get_set_match_number(self.year, comp_level, match['matchNumber'], is_octofinals) red_teams = [] blue_teams = [] red_surrogates = [] blue_surrogates = [] team_key_names = [] null_team = False sorted_teams = sorted(match['Teams'], key=lambda team: team['station']) # Sort by station to ensure correct ordering. Kind of hacky. for team in sorted_teams: if team['teamNumber'] is None: null_team = True team_key = 'frc{}'.format(team['teamNumber']) team_key_names.append(team_key) if 'Red' in team['station']: red_teams.append(team_key) if team['surrogate']: red_surrogates.append(team_key) elif 'Blue' in team['station']: blue_teams.append(team_key) if team['surrogate']: blue_surrogates.append(team_key) if null_team and match['scoreRedFinal'] is None and match['scoreBlueFinal'] is None: continue alliances = { 'red': { 'teams': red_teams, 'surrogates': red_surrogates, 'score': match['scoreRedFinal'] }, 'blue': { 'teams': blue_teams, 'surrogates': blue_surrogates, 'score': match['scoreBlueFinal'] }, } if not match['startTime']: # no startTime means it's an unneeded rubber match continue time = datetime.datetime.strptime(match['startTime'].split('.')[0], TIME_PATTERN) if event_tz is not None: time = time - event_tz.utcoffset(time) actual_time_raw = match['actualStartTime'] if 'actualStartTime' in match else None actual_time = None if actual_time_raw is not None: actual_time = datetime.datetime.strptime(actual_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: actual_time = actual_time - event_tz.utcoffset(actual_time) post_result_time_raw = match.get('postResultTime') post_result_time = None if post_result_time_raw is not None: post_result_time = datetime.datetime.strptime(post_result_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: post_result_time = post_result_time - event_tz.utcoffset(post_result_time) key_name = Match.renderKeyName( event_key, comp_level, set_number, match_number) # Check for tiebreaker matches existing_match = Match.get_by_id(key_name) # Follow chain of existing matches while existing_match is not None and existing_match.tiebreak_match_key is not None: logging.info("Following Match {} to {}".format(existing_match.key.id(), existing_match.tiebreak_match_key.id())) existing_match = existing_match.tiebreak_match_key.get() # Check if last existing match needs to be tiebroken if existing_match and existing_match.comp_level != 'qm' and \ existing_match.has_been_played and \ existing_match.winning_alliance == '' and \ existing_match.actual_time != actual_time and \ not self.is_blank_match(existing_match): logging.warning("Match {} is tied!".format(existing_match.key.id())) # TODO: Only query within set if set_number ever gets indexed match_count = 0 for match_key in Match.query(Match.event==event.key, Match.comp_level==comp_level).fetch(keys_only=True): _, match_key = match_key.id().split('_') if match_key.startswith('{}{}'.format(comp_level, set_number)): match_count += 1 # Sanity check: Tiebreakers must be played after at least 3 matches, or 6 for finals if match_count < 3 or (match_count < 6 and comp_level == 'f'): logging.warning("Match supposedly tied, but existing count is {}! Skipping match.".format(match_count)) continue match_number = match_count + 1 new_key_name = Match.renderKeyName( event_key, comp_level, set_number, match_number) remapped_matches[key_name] = new_key_name key_name = new_key_name # Point existing match to new tiebreaker match existing_match.tiebreak_match_key = ndb.Key(Match, key_name) parsed_matches.append(existing_match) logging.warning("Creating new match: {}".format(key_name)) elif existing_match: remapped_matches[key_name] = existing_match.key.id() key_name = existing_match.key.id() match_number = existing_match.match_number parsed_matches.append(Match( id=key_name, event=event.key, year=event.year, set_number=set_number, match_number=match_number, comp_level=comp_level, team_key_names=team_key_names, time=time, actual_time=actual_time, post_result_time=post_result_time, alliances_json=json.dumps(alliances), )) if self.year == 2015: # Fix null teams in elims (due to FMS API failure, some info not complete) # Should only happen for sf and f matches organized_matches = MatchHelper.organizeMatches(parsed_matches) for level in ['sf', 'f']: playoff_advancement = MatchHelper.generatePlayoffAdvancement2015(organized_matches) if playoff_advancement[LAST_LEVEL[level]] != []: for match in organized_matches[level]: if 'frcNone' in match.team_key_names: if level == 'sf': red_seed, blue_seed = QF_SF_MAP[match.match_number] else: red_seed = 0 blue_seed = 1 red_teams = ['frc{}'.format(t) for t in playoff_advancement[LAST_LEVEL[level]][red_seed][0]] blue_teams = ['frc{}'.format(t) for t in playoff_advancement[LAST_LEVEL[level]][blue_seed][0]] alliances = match.alliances alliances['red']['teams'] = red_teams alliances['blue']['teams'] = blue_teams match.alliances_json = json.dumps(alliances) match.team_key_names = red_teams + blue_teams fixed_matches = [] for key, matches in organized_matches.items(): if key != 'num': for match in matches: if 'frcNone' not in match.team_key_names: fixed_matches.append(match) parsed_matches = fixed_matches return parsed_matches, remapped_matches
def test_matches_update(self): self.matches_auth.put() update_request_path = '/api/trusted/v1/event/2014casj/matches/update' delete_request_path = '/api/trusted/v1/event/2014casj/matches/delete' # add one match matches = [{ 'comp_level': 'qm', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': {'teams': ['frc1', 'frc2', 'frc3'], 'score': 25}, 'blue': {'teams': ['frc4', 'frc5', 'frc6'], 'score': 26}, }, 'time_string': '9:00 AM', 'time_utc': '2014-08-31T16:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 1) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) # add another match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': {'teams': ['frc1', 'frc2', 'frc3'], 'score': 250}, 'blue': {'teams': ['frc4', 'frc5', 'frc6'], 'score': 260}, }, 'time_string': '10:00 AM', 'time_utc': '2014-08-31T17:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) # add a match and delete a match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 2, 'alliances': { 'red': {'teams': ['frc1', 'frc2', 'frc3'], 'score': 250}, 'blue': {'teams': ['frc4', 'frc5', 'frc6'], 'score': 260}, }, 'score_breakdown': { 'red': {'auto': 20, 'assist': 40, 'truss+catch': 20, 'teleop_goal+foul': 20}, 'blue': {'auto': 40, 'assist': 60, 'truss+catch': 10, 'teleop_goal+foul': 40}, }, 'time_string': '11:00 AM', 'time_utc': '2014-08-31T18:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) keys_to_delete = ['qm1'] request_body = json.dumps(keys_to_delete) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_request_path, request_body)).hexdigest() response = self.testapp.post(delete_request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.json['keys_deleted'], ['qm1']) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) # verify match data match = Match.get_by_id('2014casj_f1m2') self.assertEqual(match.time, datetime.datetime(2014, 8, 31, 18, 0)) self.assertEqual(match.time_string, '11:00 AM') self.assertEqual(match.alliances['red']['score'], 250) self.assertEqual(match.score_breakdown['red']['truss+catch'], 20)
def test_2017scmb(self): event = Event( id='2017scmb', event_short='scmb', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 19, 17)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 19, 50)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 20, 12)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['red']['totalPoints'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue']['totalPoints'], 255) ndb.get_context().clear_cache() # Prevent data from leaking between tests MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=datetime.datetime(2017, 3, 04, 20, 48)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 13) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['red']['totalPoints'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['red']['score'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['blue']['score'], 306) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['red']['totalPoints'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['blue']['totalPoints'], 306)
def test_2017ncwin(self): event = Event(id='2017ncwin', event_short='ncwin', year=2017, event_type_enum=0, timezone_id='America/New_York') event.put() MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 05, 21, 2)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red'] ['totalPoints'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue'] ['totalPoints'], 150) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 05, 21, 30)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red'] ['totalPoints'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue'] ['totalPoints'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red'] ['totalPoints'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue'] ['totalPoints'], 205) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 05, 21, 35)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 6) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red'] ['totalPoints'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue'] ['totalPoints'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red'] ['totalPoints'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue'] ['totalPoints'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').alliances['red']['score'], 145) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').alliances['blue']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').score_breakdown['red'] ['totalPoints'], 145) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').score_breakdown['blue'] ['totalPoints'], 265) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 05, 21, 51)).getMatches('2017ncwin')) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017ncwin'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 7) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['red']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').alliances['blue']['score'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['red'] ['totalPoints'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m1').score_breakdown['blue'] ['totalPoints'], 150) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['red']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').alliances['blue']['score'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['red'] ['totalPoints'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m2').score_breakdown['blue'] ['totalPoints'], 205) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').alliances['red']['score'], 145) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').alliances['blue']['score'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').score_breakdown['red'] ['totalPoints'], 145) self.assertEqual( Match.get_by_id('2017ncwin_sf2m3').score_breakdown['blue'] ['totalPoints'], 265) self.assertEqual( Match.get_by_id('2017ncwin_sf2m4').alliances['red']['score'], 180) self.assertEqual( Match.get_by_id('2017ncwin_sf2m4').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017ncwin_sf2m4').score_breakdown['red'] ['totalPoints'], 180) self.assertEqual( Match.get_by_id('2017ncwin_sf2m4').score_breakdown['blue'] ['totalPoints'], 305)
def create_target_model(self, suggestion): match = Match.get_by_id(suggestion.target_key) return MatchSuggestionAccepter.accept_suggestion(match, suggestion)
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format( bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format( bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get( 'current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime( current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get( 'current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime( current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get( 'blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get( 'blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id( current_match_key) if current_match_key else None last_match = Match.get_by_id( last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format( [le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format( [le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info( "[BLUEZONE] all upcoming matches sorted by predicted time: {}". format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format( [um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info( "[BLUEZONE] potential_matches sorted by predicted time: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format( [pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info( "[BLUEZONE] potential_matches sorted by hotness: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format( [pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info( "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}". format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format( current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format( current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format( current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format( match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format( match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format( match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format( match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info( "[BLUEZONE] Event {} is blacklisted, skipping...".format( match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format( match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len( potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info( "[BLUEZONE] Adding match to blacklist: {}".format( match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format( match.key.id()) logging.info( "[BLUEZONE] scheduled time: {}, now: {}".format( current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format( current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert( slack_url, "Blacklisting match {}. Predicted time: {}, now: {}" .format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info( "[BLUEZONE] Continuing to use match: {}".format( match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format( match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info( "[BLUEZONE] Found a good new match: {}".format( match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format( match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format( match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format( match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format( [m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format( [m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter( lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps( [real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format( bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format( bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert( slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}." .format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/bluezone/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set( [f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
def test_2017flwp_sequence(self): event = Event(id='2017flwp', event_short='flwp', year=2017, event_type_enum=0, timezone_id='America/New_York') event.put() event_code = 'flwp' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format( event_code) context = ndb.get_context() result = context.urlfetch( 'https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}' .format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI( 'v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017flwp'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 7) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017flwp_sf1m1').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').alliances['red']['score'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').alliances['blue']['score'], 258) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').score_breakdown['red'] ['totalPoints'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m2').score_breakdown['blue'] ['totalPoints'], 258) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').alliances['red']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').score_breakdown['red'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m3').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').alliances['red']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').score_breakdown['red'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m4').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').alliances['red']['score'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').alliances['blue']['score'], 263) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').score_breakdown['red'] ['totalPoints'], 165) self.assertEqual( Match.get_by_id('2017flwp_sf1m5').score_breakdown['blue'] ['totalPoints'], 263)
def test_matches_update(self): self.matches_auth.put() update_request_path = '/api/trusted/v1/event/2014casj/matches/update' delete_request_path = '/api/trusted/v1/event/2014casj/matches/delete' delete_all_request_path = '/api/trusted/v1/event/2014casj/matches/delete_all' # add one match matches = [{ 'comp_level': 'qm', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 25 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 26 }, }, 'time_string': '9:00 AM', 'time_utc': '2014-08-31T16:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 1) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) # add another match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 250 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 260 }, }, 'time_string': '10:00 AM', 'time_utc': '2014-08-31T17:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) # add a match and delete a match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 2, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 250 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 260 }, }, 'score_breakdown': { 'red': { 'auto': 20, 'assist': 40, 'truss+catch': 20, 'teleop_goal+foul': 20 }, 'blue': { 'auto': 40, 'assist': 60, 'truss+catch': 10, 'teleop_goal+foul': 40 }, }, 'time_string': '11:00 AM', 'time_utc': '2014-08-31T18:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) keys_to_delete = ['qm1'] request_body = json.dumps(keys_to_delete) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_request_path, request_body)).hexdigest() response = self.testapp.post(delete_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.json['keys_deleted'], ['qm1']) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) # verify match data match = Match.get_by_id('2014casj_f1m2') self.assertEqual(match.time, datetime.datetime(2014, 8, 31, 18, 0)) self.assertEqual(match.time_string, '11:00 AM') self.assertEqual(match.alliances['red']['score'], 250) self.assertEqual(match.score_breakdown['red']['truss+catch'], 20) # test delete all matches request_body = '' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_all_request_path, request_body)).hexdigest() response = self.testapp.post(delete_all_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) request_body = '2014casj' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_all_request_path, request_body)).hexdigest() response = self.testapp.post(delete_all_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 0)
def test_2017scmb(self): event = Event(id='2017scmb', event_short='scmb', year=2017, event_type_enum=0, timezone_id='America/New_York') event.put() MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 19, 17)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue'] ['totalPoints'], 305) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 19, 50)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['red'] ['totalPoints'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue'] ['totalPoints'], 305) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 20, 12)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 12) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['red'] ['totalPoints'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').score_breakdown['red'] ['totalPoints'], 312) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue'] ['totalPoints'], 255) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests MatchManipulator.createOrUpdate( DatafeedFMSAPI('v2.0', sim_time=datetime.datetime( 2017, 3, 04, 20, 48)).getMatches('2017scmb')) MatchHelper.deleteInvalidMatches(event.matches) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 13) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['red'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['red'] ['totalPoints'], 213) self.assertEqual( Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue'] ['totalPoints'], 305) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').score_breakdown['red'] ['totalPoints'], 312) self.assertEqual( Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue'] ['totalPoints'], 255) self.assertEqual( Match.get_by_id('2017scmb_qf4m4').alliances['red']['score'], 310) self.assertEqual( Match.get_by_id('2017scmb_qf4m4').alliances['blue']['score'], 306) self.assertEqual( Match.get_by_id('2017scmb_qf4m4').score_breakdown['red'] ['totalPoints'], 310) self.assertEqual( Match.get_by_id('2017scmb_qf4m4').score_breakdown['blue'] ['totalPoints'], 306)