def get(self): self._require_admin() status_sitevar = Sitevar.get_or_insert('apistatus', values_json="{}") trusted_sitevar = Sitevar.get_or_insert( 'trustedapi', values_json='{"enabled": true}') status = status_sitevar.contents android_status = status.get('android', None) ios_status = status.get('ios', None) self.template_values.update({ 'max_year': status.get('max_season', 2016), 'current_year': status.get('current_season', 2016), 'android_latest_version': android_status.get('latest_app_version', -1) if android_status else -1, 'android_min_version': android_status.get('min_app_version', -1) if android_status else -1, 'ios_latest_version': ios_status.get('latest_app_version', -1) if ios_status else -1, 'ios_min_version': ios_status.get('min_app_version', -1) if ios_status else -1, 'enable_trustedapi': trusted_sitevar.contents, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/apistatus.html') self.response.out.write(template.render(path, self.template_values))
def post(self): self._require_admin() trusted_sitevar = Sitevar.get_or_insert('trustedapi') sitevar = Sitevar.get_or_insert('apistatus') old_value = sitevar.contents status = {} status['android'] = {} status['ios'] = {} status['max_season'] = int(self.request.get('max_year')) status['current_season'] = int(self.request.get('current_year')) status['android']['latest_app_version'] = int(self.request.get('android_latest_version')) status['android']['min_app_version'] = int(self.request.get('android_min_version')) status['ios']['latest_app_version'] = int(self.request.get('ios_latest_version')) status['ios']['min_app_version'] = int(self.request.get('ios_min_version')) sitevar.contents = status sitevar.put() trusted_status = { 1: True if self.request.get('enable_match_video') else False, 2: True if self.request.get('enable_event_teams') else False, 3: True if self.request.get('enable_event_matches') else False, 4: True if self.request.get('enable_event_rankings') else False, 5: True if self.request.get('enable_event_alliances') else False, 6: True if self.request.get('enable_event_awards') else False, } trusted_sitevar.contents = trusted_status trusted_sitevar.put() ApiStatusController.clear_cache_if_needed(old_value, status) self.redirect('/admin/apistatus')
def post(self): self._require_admin() google_secrets = Sitevar.get_or_insert('google.secrets') firebase_secrets = Sitevar.get_or_insert('firebase.secrets') fmsapi_secrets = Sitevar.get_or_insert('fmsapi.secrets') mobile_clientIds = Sitevar.get_or_insert('mobile.clientIds') gcm_serverKey = Sitevar.get_or_insert('gcm.serverKey') google_key = self.request.get("google_secret") firebase_key = self.request.get("firebase_secret") fmsapi_user = self.request.get("fmsapi_user") fmsapi_secret = self.request.get("fmsapi_secret") web_client_id = self.request.get("web_client_id") android_client_id = self.request.get("android_client_id") ios_client_id = self.request.get("ios_client_id") gcm_key = self.request.get("gcm_key") google_secrets.contents = {'api_key': google_key} firebase_secrets.contents = {'FIREBASE_SECRET': firebase_key} fmsapi_secrets.contents = {'username': fmsapi_user, 'authkey': fmsapi_secret} mobile_clientIds.contents = {'web': web_client_id, 'android': android_client_id, 'ios': ios_client_id} gcm_serverKey.contents = {'gcm_key': gcm_key} google_secrets.put() firebase_secrets.put() fmsapi_secrets.put() mobile_clientIds.put() gcm_serverKey.put() self.redirect('/admin/authkeys')
def get(self): self._require_admin() google_secrets = Sitevar.get_or_insert('google.secrets') firebase_secrets = Sitevar.get_or_insert('firebase.secrets') fmsapi_secrets = Sitevar.get_or_insert('fmsapi.secrets') mobile_clientIds = Sitevar.get_or_insert('mobile.clientIds') gcm_serverKey = Sitevar.get_or_insert('gcm.serverKey') fmsapi_keys = fmsapi_secrets.contents if fmsapi_secrets and isinstance( fmsapi_secrets.contents, dict) else {} clientIds = mobile_clientIds.contents if mobile_clientIds and isinstance( mobile_clientIds.contents, dict) else {} self.template_values.update({ 'google_secret': google_secrets.contents.get('api_key', "") if google_secrets else "", 'firebase_secret': firebase_secrets.contents.get('FIREBASE_SECRET', "") if firebase_secrets else "", 'fmsapi_user': fmsapi_keys.get('username', ''), 'fmsapi_secret': fmsapi_keys.get('authkey', ''), 'web_client_id': clientIds.get('web', ''), 'android_client_id': clientIds.get('android', ''), 'gcm_key': gcm_serverKey.contents.get("gcm_key", "") if gcm_serverKey else "", }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/authkeys.html') self.response.out.write(template.render(path, self.template_values))
def post(self): self._require_registration() event_key = self.request.get("event_key") status, suggestion = SuggestionCreator.createEventMediaSuggestion( author_account_key=self.user_bundle.account.key, media_url=self.request.get("media_url"), event_key=event_key) if status == 'success': # Send an update to the FUN slack slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('fun', '') if slack_url: message_body = "{0} ({1}) has suggested a video for <https://thebluealliance.com/event/{2}|{2}>: https://youtu.be/{3}.\nSee all suggestions at https://www.thebluealliance.com/suggest/event/media/review".format( self.user_bundle.account.display_name, self.user_bundle.account.email, event_key, suggestion.contents['foreign_key']) OutgoingNotificationHelper.send_slack_alert( slack_url, message_body, []) self.redirect('/suggest/event/media?event_key=%s&status=%s' % (event_key, status))
def _update_live_events_helper(cls): week_events = EventHelper.getWeekEvents() events_by_key = {} live_events = [] for event in week_events: if event.now: event._webcast = event.current_webcasts # Only show current webcasts for webcast in event.webcast: WebcastOnlineHelper.add_online_status_async(webcast) events_by_key[event.key.id()] = event if event.within_a_day: live_events.append(event) # To get Champ events to show up before they are actually going on forced_live_events = Sitevar.get_or_insert( 'forced_live_events', values_json=json.dumps([])) for event in ndb.get_multi([ndb.Key('Event', ekey) for ekey in forced_live_events.contents]): if event.webcast: for webcast in event.webcast: WebcastOnlineHelper.add_online_status_async(webcast) events_by_key[event.key.id()] = event # # Add in the Fake TBA BlueZone event (watch for circular imports) # from helpers.bluezone_helper import BlueZoneHelper # bluezone_event = BlueZoneHelper.update_bluezone(live_events) # if bluezone_event: # for webcast in bluezone_event.webcast: # WebcastOnlineHelper.add_online_status_async(webcast) # events_by_key[bluezone_event.key_name] = bluezone_event return events_by_key
def post(self): self._require_registration() event_key = self.request.get("event_key") status, suggestion = SuggestionCreator.createEventMediaSuggestion( author_account_key=self.user_bundle.account.key, media_url=self.request.get("media_url"), event_key=event_key) if status == 'success': # Send an update to the FUN slack slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('fun', '') if slack_url: message_body = u"{0} ({1}) has suggested a video for <https://www.thebluealliance.com/event/{2}|{2}>: https://youtu.be/{3}.\nSee all suggestions at https://www.thebluealliance.com/suggest/event/media/review".format( self.user_bundle.account.display_name, self.user_bundle.account.email, event_key, suggestion.contents['foreign_key']).encode('utf-8') OutgoingNotificationHelper.send_slack_alert(slack_url, message_body, []) self.redirect('/suggest/event/media?event_key=%s&status=%s' % (event_key, status))
def update_champ_numbers(cls, match): champ_numbers_sitevar = Sitevar.get_or_insert( 'champ_numbers', values_json=json.dumps({ 'kpa_accumulated': 0, 'rotors_engaged': 0, 'ready_for_takeoff': 0, })) old_contents = champ_numbers_sitevar.contents for color in ['red', 'blue']: old_contents['kpa_accumulated'] += match.score_breakdown[color]['autoFuelPoints'] + match.score_breakdown[color]['teleopFuelPoints'] if match.score_breakdown[color]['rotor4Engaged']: old_contents['rotors_engaged'] += 4 elif match.score_breakdown[color]['rotor3Engaged']: old_contents['rotors_engaged'] += 3 elif match.score_breakdown[color]['rotor2Engaged']: old_contents['rotors_engaged'] += 2 elif match.score_breakdown[color]['rotor1Engaged']: old_contents['rotors_engaged'] += 1 old_contents['ready_for_takeoff'] += match.score_breakdown[color]['teleopTakeoffPoints'] / 50 champ_numbers_sitevar.contents = old_contents champ_numbers_sitevar.put() deferred.defer( cls._patch_data, 'champ_numbers', json.dumps(old_contents), _queue="firebase")
def blacklist(website): website_blacklist_sitevar = Sitevar.get_or_insert('website_blacklist', values_json=json.dumps({'websites': []})) website_blacklist = website_blacklist_sitevar.contents.get('websites', []) if website in website_blacklist: return website_blacklist.append(website) website_blacklist_sitevar.contents = {'websites': website_blacklist} website_blacklist_sitevar.put()
def get(self): self._require_admin() google_secrets = Sitevar.get_or_insert('google.secrets') firebase_secrets = Sitevar.get_or_insert('firebase.secrets') fmsapi_secrets = Sitevar.get_or_insert('fmsapi.secrets') mobile_clientIds = Sitevar.get_or_insert('mobile.clientIds') gcm_serverKey = Sitevar.get_or_insert('gcm.serverKey') twitch_secrets = Sitevar.get_or_insert('twitch.secrets') livestream_secrets = Sitevar.get_or_insert('livestream.secrets') fmsapi_keys = fmsapi_secrets.contents if fmsapi_secrets and isinstance(fmsapi_secrets.contents, dict) else {} clientIds = mobile_clientIds.contents if mobile_clientIds and isinstance(mobile_clientIds.contents, dict) else {} self.template_values.update({ 'google_secret': google_secrets.contents.get('api_key', "") if google_secrets else "", 'firebase_secret': firebase_secrets.contents.get('FIREBASE_SECRET', "") if firebase_secrets else "", 'fmsapi_user': fmsapi_keys.get('username', ''), 'fmsapi_secret': fmsapi_keys.get('authkey', ''), 'web_client_id': clientIds.get('web', ''), 'android_client_id': clientIds.get('android', ''), 'ios_client_id': clientIds.get('ios', ''), 'gcm_key': gcm_serverKey.contents.get("gcm_key", "") if gcm_serverKey else "", 'twitch_secret': twitch_secrets.contents.get('client_id', "") if twitch_secrets else "", 'livestream_secret': livestream_secrets.contents.get('api_key', "") if livestream_secrets else "", }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/authkeys.html') self.response.out.write(template.render(path, self.template_values))
def post(self, event_key): self._require_admin() event = Event.get_by_id(event_key) if not event: self.abort(404) reg_sitevar = Sitevar.get_or_insert("cmp_registration_hacks", values_json="{}") new_divisions_to_skip = reg_sitevar.contents.get( "divisions_to_skip", []) if self.request.get("event_sync_disable"): if event_key not in new_divisions_to_skip: new_divisions_to_skip.append(event_key) else: new_divisions_to_skip = list( filter(lambda e: e != event_key, new_divisions_to_skip)) new_start_day_to_last = reg_sitevar.contents.get( "set_start_to_last_day", []) if self.request.get("set_start_day_to_last"): if event_key not in new_start_day_to_last: new_start_day_to_last.append(event_key) else: new_start_day_to_last = list( filter(lambda e: e != event_key, new_start_day_to_last)) new_skip_eventteams = reg_sitevar.contents.get("skip_eventteams", []) if self.request.get("skip_eventteams"): if event_key not in new_skip_eventteams: new_skip_eventteams.append(event_key) else: new_skip_eventteams = list( filter(lambda e: e != event_key, new_skip_eventteams)) new_name_overrides = reg_sitevar.contents.get("event_name_override", []) if self.request.get("event_name_override"): if not any(o["event"] == event_key for o in new_name_overrides): new_name_overrides.append({ "event": event_key, "name": self.request.get("event_name_override") }) else: new_name_overrides = list( filter(lambda o: o["event"] != event_key, new_name_overrides)) reg_sitevar.contents = { "divisions_to_skip": new_divisions_to_skip, "set_start_to_last_day": new_start_day_to_last, "skip_eventteams": new_skip_eventteams, "event_name_override": new_name_overrides, } reg_sitevar.put() self.redirect("/admin/event/{}".format(event_key))
def get(self): self._require_admin() status_sitevar = Sitevar.get_or_insert('apistatus', values_json="{}") trusted_sitevar = Sitevar.get_or_insert('trustedapi', values_json='{"enabled": true}') status = status_sitevar.contents android_status = status.get('android', None) ios_status = status.get('ios', None) self.template_values.update({ 'max_year': status.get('max_season', 2016), 'current_year': status.get('current_season', 2016), 'android_latest_version': android_status.get('latest_app_version', -1) if android_status else -1, 'android_min_version': android_status.get('min_app_version', -1) if android_status else -1, 'ios_latest_version': ios_status.get('latest_app_version', -1) if ios_status else -1, 'ios_min_version': ios_status.get('min_app_version', -1) if ios_status else -1, 'enable_trustedapi': trusted_sitevar.contents, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/apistatus.html') self.response.out.write(template.render(path, self.template_values))
def _add_youtube_status_async(cls, webcast): google_secrets = Sitevar.get_or_insert('google.secrets') api_key = None if google_secrets and google_secrets.contents: api_key = google_secrets.contents.get('api_key') if api_key: try: url = 'https://www.googleapis.com/youtube/v3/videos?part=snippet&id={}&key={}'.format(webcast['channel'], api_key) rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url) except Exception, e: logging.error("URLFetch failed for: {}".format(url)) raise ndb.Return(None)
def _add_twitch_status_async(cls, webcast): twitch_secrets = Sitevar.get_or_insert('twitch.secrets') client_id = None if twitch_secrets and twitch_secrets.contents: client_id = twitch_secrets.contents.get('client_id') if client_id: try: url = 'https://api.twitch.tv/kraken/streams/{}?client_id={}'.format(webcast['channel'], client_id) rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url) except Exception, e: logging.error("URLFetch failed for: {}".format(url)) raise ndb.Return(None)
def post(self): self._require_admin() google_secrets = Sitevar.get_or_insert('google.secrets') firebase_secrets = Sitevar.get_or_insert('firebase.secrets') fmsapi_secrets = Sitevar.get_or_insert('fmsapi.secrets') mobile_clientIds = Sitevar.get_or_insert('mobile.clientIds') gcm_serverKey = Sitevar.get_or_insert('gcm.serverKey') google_key = self.request.get("google_secret") firebase_key = self.request.get("firebase_secret") fmsapi_user = self.request.get("fmsapi_user") fmsapi_secret = self.request.get("fmsapi_secret") web_client_id = self.request.get("web_client_id") android_client_id = self.request.get("android_client_id") ios_client_id = self.request.get("ios_client_id") gcm_key = self.request.get("gcm_key") google_secrets.contents = {'api_key': google_key} firebase_secrets.contents = {'FIREBASE_SECRET': firebase_key} fmsapi_secrets.contents = { 'username': fmsapi_user, 'authkey': fmsapi_secret } mobile_clientIds.contents = { 'web': web_client_id, 'android': android_client_id, 'ios': ios_client_id } gcm_serverKey.contents = {'gcm_key': gcm_key} google_secrets.put() firebase_secrets.put() fmsapi_secrets.put() mobile_clientIds.put() gcm_serverKey.put() self.redirect('/admin/authkeys')
def _add_twitch_status_async(cls, webcast): twitch_secrets = Sitevar.get_or_insert('twitch.secrets') client_id = None if twitch_secrets and twitch_secrets.contents: client_id = twitch_secrets.contents.get('client_id') if client_id: try: url = 'https://api.twitch.tv/kraken/streams/{}?client_id={}'.format( webcast['channel'], client_id) rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url) except Exception, e: logging.error("URLFetch failed for: {}".format(url)) raise ndb.Return(None)
def post(self): """ Configures scheduling a registration day in advance This will enqueue the requested year's event details task every X minutes Also updates the "short cache" sitevar to reduce timeouts for that day :param date_string: YYYY-mm-dd formatted day on which we poll faster :param event_year: The year of events to fetch :param interval: How many minutes between fetches """ self._require_admin() date_string = self.request.get("date_string") event_year = self.request.get("event_year") interval = self.request.get("interval") start = datetime.strptime(date_string, "%Y-%m-%d") event_year = int(event_year) interval = int(interval) # Enqueue the tasks now = datetime.now() for i in xrange(0, 24 * 60, interval): # 24*60 is number of minutes per day task_eta = start + timedelta(minutes=i) if task_eta < now: # Don't enqueue tasks in the past continue taskqueue.add( queue_name='datafeed', target='backend-tasks', url='/backend-tasks/get/event_list/{}'.format(event_year), eta=task_eta, method='GET') # Set the cache timeout sitevar end_timestamp = (start + timedelta(days=1) - datetime(1970, 1, 1)).total_seconds() cache_key_regex = ".*{}.*".format(event_year) turbo_mode_json = { 'regex': cache_key_regex, 'valid_until': int(end_timestamp), 'cache_length': 61 } turbo_sitevar = Sitevar.get_or_insert( 'turbo_mode', description="Temporarily shorten cache expiration") turbo_sitevar.contents = turbo_mode_json turbo_sitevar.put() self.response.out.write( "Enqueued {} tasks to update {} events starting at {}".format( (24 * 60 / interval), event_year, start))
def post(self): self._require_admin() trusted_sitevar = Sitevar.get_or_insert('trustedapi') sitevar = Sitevar.get_or_insert('apistatus') old_value = sitevar.contents status = {} status['android'] = {} status['ios'] = {} status['max_season'] = int(self.request.get('max_year')) status['current_season'] = int(self.request.get('current_year')) status['android']['latest_app_version'] = int( self.request.get('android_latest_version')) status['android']['min_app_version'] = int( self.request.get('android_min_version')) status['ios']['latest_app_version'] = int( self.request.get('ios_latest_version')) status['ios']['min_app_version'] = int( self.request.get('ios_min_version')) sitevar.contents = status sitevar.put() trusted_status = { 1: True if self.request.get('enable_match_video') else False, 2: True if self.request.get('enable_event_teams') else False, 3: True if self.request.get('enable_event_matches') else False, 4: True if self.request.get('enable_event_rankings') else False, 5: True if self.request.get('enable_event_alliances') else False, 6: True if self.request.get('enable_event_awards') else False, } trusted_sitevar.contents = trusted_status trusted_sitevar.put() ApiStatusController.clear_cache_if_needed(old_value, status) self.redirect('/admin/apistatus')
def post(self): self._require_admin() user_id = self.user_bundle.account.key.id() action = self.request.get('enable') sitevar = Sitevar.get_or_insert('notifications.enable') if action == "true": sitevar.values_json = "true" logging.info("User {} enabled push notificatios".format(user_id)) else: sitevar.values_json = "false" logging.info("User {} disabled push notification".format(user_id)) sitevar.put() self.redirect('/admin/mobile')
def post(self): """ Configures scheduling a registration day in advance This will enqueue the requested year's event details task every X minutes Also updates the "short cache" sitevar to reduce timeouts for that day :param date_string: YYYY-mm-dd formatted day on which we poll faster :param event_year: The year of events to fetch :param interval: How many minutes between fetches """ self._require_admin() date_string = self.request.get("date_string") event_year = self.request.get("event_year") interval = self.request.get("interval") start = datetime.strptime(date_string, "%Y-%m-%d") event_year = int(event_year) interval = int(interval) # Enqueue the tasks now = datetime.now() for i in xrange(0, 24*60, interval): # 24*60 is number of minutes per day task_eta = start + timedelta(minutes=i) if task_eta < now: # Don't enqueue tasks in the past continue taskqueue.add( queue_name='datafeed', target='backend-tasks', url='/backend-tasks/get/event_list/{}'.format(event_year), eta=task_eta, method='GET' ) # Set the cache timeout sitevar end_timestamp = (start + timedelta(days=1) - datetime(1970, 1, 1)).total_seconds() cache_key_regex = ".*{}.*".format(event_year) turbo_mode_json = { 'regex': cache_key_regex, 'valid_until': int(end_timestamp), 'cache_length': 61 } turbo_sitevar = Sitevar.get_or_insert('turbo_mode', description="Temporarily shorten cache expiration") turbo_sitevar.contents = turbo_mode_json turbo_sitevar.put() self.response.out.write("Enqueued {} tasks to update {} events starting at {}".format((24*60/interval), event_year, start))
def post(self): self._require_registration() team_key = self.request.get("team_key") year_str = self.request.get("year") status, suggestion = SuggestionCreator.createTeamMediaSuggestion( author_account_key=self.user_bundle.account.key, media_url=self.request.get("media_url"), team_key=team_key, year_str=year_str) if status == 'success' and suggestion.contents.get('media_type') == MediaType.GRABCAD: # Send an update to the frcdesigns slack slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('tbablog', '') if slack_url: model_details = json.loads(suggestion.contents['details_json']) message_body = "{0} ({1}) has suggested a CAD model for team <https://thebluealliance.com/team/{2}/{3}|{2} in {3}>.".format( self.user_bundle.account.display_name, self.user_bundle.account.email, team_key[3:], year_str) image_attachment = { "footer": "<https://www.thebluealliance.com/suggest/cad/review|See all suggestions> on The Blue Alliance", "fallback": "CAD model", "title": model_details['model_name'], "title_link": "https://grabcad.com/library/{}".format(suggestion.contents['foreign_key']), "image_url": model_details['model_image'].replace('card.jpg', 'large.png'), "fields": [ { "title": "Accept", "value": "<https://www.thebluealliance.com/suggest/cad/review?action=accept&id={}|Click Here>".format(suggestion.key.id()), "short": True, }, { "title": "Reject", "value": "<https://www.thebluealliance.com/suggest/cad/review?action=reject&id={}|Click Here>".format(suggestion.key.id()), "short": True, } ], } SuggestionNotifier.send_slack_alert(slack_url, message_body, [image_attachment]) self.redirect('/suggest/team/media?team_key=%s&year=%s&status=%s' % (team_key, year_str, status))
def _add_livestream_status_async(cls, webcast): livestream_secrets = Sitevar.get_or_insert('livestream.secrets') api_key = None if livestream_secrets and livestream_secrets.contents: api_key = livestream_secrets.contents.get('api_key') if api_key: try: url = 'https://livestreamapis.com/v2/accounts/{}/events/{}'.format(webcast['channel'], webcast['file']) base64string = base64.encodestring('{}:'.format(api_key)).replace('\n','') headers = { 'Authorization': 'Basic {}'.format(base64string) } rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url, headers=headers) except Exception, e: logging.error("URLFetch failed for: {}".format(url)) raise ndb.Return(None)
def post(self): self._require_admin() gd_sitevar = Sitevar.get_or_insert("gameday.special_webcasts") action = self.request.get("action") item = self.request.get("item") if action == "add" and item == "webcast": self.add_special_webcast(gd_sitevar) elif action == "delete" and item == "webcast": self.delete_special_webcast(gd_sitevar) elif action == "add" and item == "alias": self.add_alias(gd_sitevar) elif action == "delete" and item == "alias": self.delete_alias(gd_sitevar) self.redirect("/admin/gameday") return
def post(self, event_key): self._require_admin() event = Event.get_by_id(event_key) if not event: self.abort(404) reg_sitevar = Sitevar.get_or_insert("cmp_registration_hacks", values_json="{}") new_divisions_to_skip = reg_sitevar.contents.get("divisions_to_skip", []) if self.request.get("event_sync_disable"): if event_key not in new_divisions_to_skip: new_divisions_to_skip.append(event_key) else: new_divisions_to_skip = list(filter(lambda e: e != event_key, new_divisions_to_skip)) new_start_day_to_last = reg_sitevar.contents.get("set_start_to_last_day", []) if self.request.get("set_start_day_to_last"): if event_key not in new_start_day_to_last: new_start_day_to_last.append(event_key) else: new_start_day_to_last= list(filter(lambda e: e != event_key, new_start_day_to_last)) new_skip_eventteams = reg_sitevar.contents.get("skip_eventteams", []) if self.request.get("skip_eventteams"): if event_key not in new_skip_eventteams: new_skip_eventteams.append(event_key) else: new_skip_eventteams = list(filter(lambda e: e != event_key, new_skip_eventteams)) new_name_overrides = reg_sitevar.contents.get("event_name_override", []) if self.request.get("event_name_override"): if not any(o["event"] == event_key for o in new_name_overrides): new_name_overrides.append({"event": event_key, "name": self.request.get("event_name_override")}) else: new_name_overrides = list(filter(lambda o: o["event"] != event_key, new_name_overrides)) reg_sitevar.contents = { "divisions_to_skip": new_divisions_to_skip, "set_start_to_last_day": new_start_day_to_last, "skip_eventteams": new_skip_eventteams, "event_name_override": new_name_overrides, } reg_sitevar.put() self.redirect("/admin/event/{}".format(event_key))
def _render(self): data = json.loads(self.request.body) current_commit_sha = data.get('current_commit', '') commit_time = data.get('commit_time', '') deploy_time = data.get('deploy_time', '') travis_job = data.get('travis_job', '') web_info = { 'current_commit': current_commit_sha, 'commit_time': commit_time, 'deploy_time': deploy_time, 'travis_job': travis_job, } status_sitevar = Sitevar.get_or_insert('apistatus', values_json='{}') contents = status_sitevar.contents contents['web'] = web_info status_sitevar.contents = contents status_sitevar.put()
def post(self): self._require_admin() sitevar = Sitevar.get_or_insert('apistatus') old_value = sitevar.contents status = {} status['android'] = {} status['ios'] = {} status['max_season'] = int(self.request.get('max_year')) status['android']['latest_app_version'] = int(self.request.get('android_latest_version')) status['android']['min_app_version'] = int(self.request.get('android_min_version')) status['ios']['latest_app_version'] = int(self.request.get('ios_latest_version')) status['ios']['min_app_version'] = int(self.request.get('ios_min_version')) sitevar.contents = status sitevar.put() ApiStatusController.clear_cache_if_needed(old_value, status) self.redirect('/admin/apistatus')
def _fastpath_review(self): self.verify_permissions() suggestion = Suggestion.get_by_id(self.request.get('id')) status = None if suggestion and suggestion.target_model == 'robot': if suggestion.review_state == Suggestion.REVIEW_PENDING: slack_message = None if self.request.get('action') == 'accept': self._process_accepted(suggestion.key.id()) status = 'accepted' slack_message = "{0} ({1}) accepted the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents['foreign_key'], suggestion.contents['reference_key'][3:], suggestion.contents['year']) elif self.request.get('action') == 'reject': self._process_rejected(suggestion.key.id()) status = 'rejected' slack_message = "{0} ({1}) rejected the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents['foreign_key'], suggestion.contents['reference_key'][3:], suggestion.contents['year']) if slack_message: slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('tbablog', '') OutgoingNotificationHelper.send_slack_alert( slack_url, slack_message) else: status = 'already_reviewed' else: status = 'bad_suggestion' if status: self.redirect('/suggest/review?status={}'.format(status), abort=True)
def _fastpath_review(self): self.verify_permissions() suggestion = Suggestion.get_by_id(self.request.get("id")) status = None if suggestion and suggestion.target_model == "robot": if suggestion.review_state == Suggestion.REVIEW_PENDING: slack_message = None if self.request.get("action") == "accept": self._process_accepted(suggestion.key.id()) status = "accepted" slack_message = "{0} ({1}) accepted the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents["foreign_key"], suggestion.contents["reference_key"][3:], suggestion.contents["year"], ) elif self.request.get("action") == "reject": self._process_rejected(suggestion.key.id()) status = "rejected" slack_message = "{0} ({1}) rejected the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents["foreign_key"], suggestion.contents["reference_key"][3:], suggestion.contents["year"], ) if slack_message: slack_sitevar = Sitevar.get_or_insert("slack.hookurls") if slack_sitevar: slack_url = slack_sitevar.contents.get("tbablog", "") SuggestionNotifier.send_slack_alert(slack_url, slack_message) else: status = "already_reviewed" else: status = "bad_suggestion" if status: self.redirect("/suggest/review?status={}".format(status), abort=True)
def _fastpath_review(self): self.verify_permissions() suggestion = Suggestion.get_by_id(self.request.get('id')) status = None if suggestion and suggestion.target_model == 'robot': if suggestion.review_state == Suggestion.REVIEW_PENDING: slack_message = None if self.request.get('action') == 'accept': self._process_accepted(suggestion.key.id()) status = 'accepted' slack_message = "{0} ({1}) accepted the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents['foreign_key'], suggestion.contents['reference_key'][3:], suggestion.contents['year'] ) elif self.request.get('action') == 'reject': self._process_rejected(suggestion.key.id()) status = 'rejected' slack_message = "{0} ({1}) rejected the <https://grabcad.com/library/{2}|suggestion> for team <https://thebluealliance.com/team/{3}/{4}|{3} in {4}>".format( self.user_bundle.account.display_name, self.user_bundle.account.email, suggestion.contents['foreign_key'], suggestion.contents['reference_key'][3:], suggestion.contents['year'] ) if slack_message: slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('tbablog', '') OutgoingNotificationHelper.send_slack_alert(slack_url, slack_message) else: status = 'already_reviewed' else: status = 'bad_suggestion' if status: self.redirect('/suggest/review?status={}'.format(status), abort=True)
def post(self): self._require_admin() config = Sitevar.get_or_insert('landing_config') props = config.contents if config else {} new_props = { 'current_landing': int(self.request.get('landing_type', LandingType.BUILDSEASON)), } for key in props.keys(): if key == 'current_landing': continue val = self.request.get('prop_{}'.format(key), '') new_props[key] = val new_key = self.request.get('new_key', '') new_val = self.request.get('new_value', '') if new_key: new_props[new_key] = new_val config.contents = new_props config.put() self.redirect('/admin/main_landing')
def _render(self): data = json.loads(self.request.body) current_commit_sha = data.get('current_commit', '') commit_time = data.get('commit_time', '') build_time = data.get('build_time', '') deploy_time = data.get('deploy_time', '') travis_job = data.get('travis_job', '') web_info = { 'current_commit': current_commit_sha, 'commit_time': commit_time, 'build_time': build_time, 'deploy_time': deploy_time, 'travis_job': travis_job, } logging.info("READ: {}".format(json.dumps(web_info))) status_sitevar = Sitevar.get_or_insert('apistatus', values_json='{}') contents = status_sitevar.contents contents['web'] = web_info status_sitevar.contents = contents status_sitevar.put()
def _add_twitch_status_async(cls, webcast): twitch_secrets = Sitevar.get_or_insert('twitch.secrets') client_id = None if twitch_secrets and twitch_secrets.contents: client_id = twitch_secrets.contents.get('client_id') client_secret = twitch_secrets.contents.get('client_secret') if client_id and client_secret: # Get auth token try: url = 'https://id.twitch.tv/oauth2/token?client_id={}&client_secret={}&grant_type=client_credentials'.format(client_id, client_secret) rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url, method='POST') except Exception, e: logging.error("URLFetch failed for: {}".format(url)) logging.error(e) raise ndb.Return(None) if result.status_code == 200: response = json.loads(result.content) token = response['access_token'] else: logging.warning("Twitch auth failed with status code: {}".format(result.status_code)) logging.warning(result.content) raise ndb.Return(None) # Get webcast status try: url = 'https://api.twitch.tv/helix/streams?user_login={}'.format(webcast['channel']) rpc = urlfetch.create_rpc() result = yield urlfetch.make_fetch_call(rpc, url, headers={ 'Authorization': 'Bearer {}'.format(token), 'Client-ID': client_id, }) except Exception, e: logging.error("URLFetch failed for: {}".format(url)) logging.error(e) raise ndb.Return(None)
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format( bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format( bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get( 'current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime( current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get( 'current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime( current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get( 'blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get( 'blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id( current_match_key) if current_match_key else None last_match = Match.get_by_id( last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format( [le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format( [le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info( "[BLUEZONE] all upcoming matches sorted by predicted time: {}". format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format( [um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info( "[BLUEZONE] potential_matches sorted by predicted time: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format( [pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info( "[BLUEZONE] potential_matches sorted by hotness: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format( [pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info( "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}". format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format( current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format( current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format( current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format( match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format( match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format( match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format( match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info( "[BLUEZONE] Event {} is blacklisted, skipping...".format( match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format( match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len( potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info( "[BLUEZONE] Adding match to blacklist: {}".format( match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format( match.key.id()) logging.info( "[BLUEZONE] scheduled time: {}, now: {}".format( current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format( current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert( slack_url, "Blacklisting match {}. Predicted time: {}, now: {}" .format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info( "[BLUEZONE] Continuing to use match: {}".format( match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format( match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info( "[BLUEZONE] Found a good new match: {}".format( match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format( match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format( match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format( match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format( [m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format( [m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter( lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps( [real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format( bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format( bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert( slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}." .format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/bluezone/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set( [f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format(bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format(bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get('current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime(current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get('current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime(current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get('blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get('blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id(current_match_key) if current_match_key else None last_match = Match.get_by_id(last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format([le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format([le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info("[BLUEZONE] all upcoming matches sorted by predicted time: {}".format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format([um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info("[BLUEZONE] potential_matches sorted by predicted time: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format([pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info("[BLUEZONE] potential_matches sorted by hotness: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format([pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info("[BLUEZONE] Current match played? {}, now = {}, cutoff = {}".format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format(current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format(current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format(current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format(match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format(match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format(match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format(match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info("[BLUEZONE] Event {} is blacklisted, skipping...".format(match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format(match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len(potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info("[BLUEZONE] Adding match to blacklist: {}".format(match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format(match.key.id()) logging.info("[BLUEZONE] scheduled time: {}, now: {}".format(current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format(current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert(slack_url, "Blacklisting match {}. Predicted time: {}, now: {}".format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info("[BLUEZONE] Continuing to use match: {}".format(match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format(match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info("[BLUEZONE] Found a good new match: {}".format(match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format(match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format(match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format(match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format([m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format([m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter(lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps([real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format(bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format(bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert(slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}.".format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set([f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
def parse(self, response): events = [] districts = {} cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') divisions_to_skip = cmp_hack_sitevar.contents.get('divisions_to_skip', []) \ if cmp_hack_sitevar else [] event_name_override = cmp_hack_sitevar.contents.get('event_name_override', []) \ if cmp_hack_sitevar else [] events_to_change_dates = cmp_hack_sitevar.contents.get('set_start_to_last_day', []) \ if cmp_hack_sitevar else [] for event in response['Events']: code = event['code'].lower() api_event_type = event['type'].lower() event_type = EventType.PRESEASON if code == 'week0' else self.EVENT_TYPES.get(api_event_type, None) if event_type is None and not self.event_short: logging.warn("Event type '{}' not recognized!".format(api_event_type)) continue # Some event types should be marked as unofficial, so sync is disabled official = True if api_event_type in self.NON_OFFICIAL_EVENT_TYPES: official = False name = event['name'] short_name = EventHelper.getShortName(name, district_code=event['districtCode']) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT district_key = District.renderKeyName(self.season, event['districtCode'].lower()) if event['districtCode'] else None address = event.get('address') venue = event['venue'] city = event['city'] state_prov = event['stateprov'] country = event['country'] start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) website = event.get('website') webcasts = [WebcastParser.webcast_dict_from_url(url) for url in event.get('webcasts', [])] # TODO read timezone from API # Special cases for district championship divisions if event_type == EventType.DISTRICT_CMP_DIVISION: split_name = name.split('-') short_name = '{} - {}'.format( ''.join(item[0].upper() for item in split_name[0].split()), split_name[-1].replace('Division', '').strip()) # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] # FIRST indicates CMP registration before divisions are assigned by adding all teams # to Einstein. We will hack around that by not storing divisions and renaming # Einstein to simply "Championship" when certain sitevar flags are set if code in self.EINSTEIN_CODES: override = [item for item in event_name_override if item['event'] == "{}{}".format(self.season, code)] if override: name = short_name.format(override[0]['name']) short_name = short_name.format(override[0]['short_name']) else: # Divisions name = '{} Division'.format(short_name) elif self.event_short: code = self.event_short event_key = "{}{}".format(self.season, code) if event_key in divisions_to_skip: continue # Allow an overriding the start date to be the beginning of the last day if event_key in events_to_change_dates: start = end.replace(hour=0, minute=0, second=0, microsecond=0) events.append(Event( id=event_key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=official, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=address, year=self.season, event_district_enum=district_enum, district_key=ndb.Key(District, district_key) if district_key else None, website=website, webcast_json=json.dumps(webcasts) if webcasts else None, )) # Build District Model if district_key and district_key not in districts: districts[district_key] = District( id=district_key, year=self.season, abbreviation=event['districtCode'].lower(), ) # Prep for division <-> parent associations district_champs_by_district = {} champ_events = [] for event in events: if event.event_type_enum == EventType.DISTRICT_CMP: district_champs_by_district[event.district_key] = event elif event.event_type_enum == EventType.CMP_FINALS: champ_events.append(event) # Build district cmp division <-> parent associations based on district # Build cmp division <-> parent associations based on date for event in events: parent_event = None if event.event_type_enum == EventType.DISTRICT_CMP_DIVISION: parent_event = district_champs_by_district.get(event.district_key) elif event.event_type_enum == EventType.CMP_DIVISION: for parent_event in champ_events: if abs(parent_event.end_date - event.end_date) < datetime.timedelta(days=1): break else: parent_event = None else: continue if parent_event is None: continue parent_event.divisions = sorted(parent_event.divisions + [event.key]) event.parent_event = parent_event.key return events, list(districts.values())
def is_blacklisted(website): website_blacklist_sitevar = Sitevar.get_or_insert('website_blacklist', values_json=json.dumps({'websites': []})) website_blacklist = website_blacklist_sitevar.contents.get('websites', []) return website in website_blacklist
from models.favorite import Favorite from models.media import Media from models.sitevar import Sitevar from models.subscription import Subscription from models.mobile_api_messages import BaseResponse, FavoriteCollection, FavoriteMessage, RegistrationRequest, \ SubscriptionCollection, SubscriptionMessage, ModelPreferenceMessage, \ MediaSuggestionMessage, PingRequest from models.mobile_client import MobileClient from models.suggestion import Suggestion WEB_CLIENT_ID = "" ANDROID_AUDIENCE = "" ANDROID_CLIENT_ID = "" IOS_CLIENT_ID = "" client_ids_sitevar = Sitevar.get_or_insert('mobile.clientIds') if isinstance(client_ids_sitevar.contents, dict): WEB_CLIENT_ID = client_ids_sitevar.contents.get("web", "") ANDROID_AUDIENCE = client_ids_sitevar.contents.get("android-audience", "") ANDROID_CLIENT_ID = client_ids_sitevar.contents.get("android", "") IOS_CLIENT_ID = client_ids_sitevar.contents.get("ios", "") if not WEB_CLIENT_ID: logging.error("Web client ID is not set, see /admin/authkeys") if not ANDROID_CLIENT_ID: logging.error("Android client ID is not set, see /admin/authkeys") if not ANDROID_AUDIENCE: logging.error("Android Audience is not set, see /admin/authkeys")
def post(self): self._require_registration() team_key = self.request.get("team_key") year_str = self.request.get("year") status, suggestion = SuggestionCreator.createTeamMediaSuggestion( author_account_key=self.user_bundle.account.key, media_url=self.request.get("media_url"), team_key=team_key, year_str=year_str) if status == 'success' and suggestion.contents.get( 'media_type') == MediaType.GRABCAD: # Send an update to the frcdesigns slack slack_sitevar = Sitevar.get_or_insert('slack.hookurls') if slack_sitevar: slack_url = slack_sitevar.contents.get('tbablog', '') if slack_url: model_details = json.loads( suggestion.contents['details_json']) message_body = "{0} ({1}) has suggested a CAD model for team <https://www.thebluealliance.com/team/{2}/{3}|{2} in {3}>.".format( self.user_bundle.account.display_name, self.user_bundle.account.email, team_key[3:], year_str) image_attachment = { "footer": "<https://www.thebluealliance.com/suggest/cad/review|See all suggestions> on The Blue Alliance", "fallback": "CAD model", "title": model_details['model_name'], "title_link": "https://grabcad.com/library/{}".format( suggestion.contents['foreign_key']), "image_url": model_details['model_image'].replace( 'card.jpg', 'large.png'), "fields": [{ "title": "Accept", "value": "<https://www.thebluealliance.com/suggest/cad/review?action=accept&id={}|Click Here>" .format(suggestion.key.id()), "short": True, }, { "title": "Reject", "value": "<https://www.thebluealliance.com/suggest/cad/review?action=reject&id={}|Click Here>" .format(suggestion.key.id()), "short": True, }], } OutgoingNotificationHelper.send_slack_alert( slack_url, message_body, [image_attachment]) self.redirect('/suggest/team/media?team_key=%s&year=%s&status=%s' % (team_key, year_str, status))
def parse(self, response): events = [] districts = {} cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') divisions_to_skip = cmp_hack_sitevar.contents.get('divisions_to_skip', []) \ if cmp_hack_sitevar else [] event_name_override = cmp_hack_sitevar.contents.get('event_name_override', []) \ if cmp_hack_sitevar else [] events_to_change_dates = cmp_hack_sitevar.contents.get('set_start_to_last_day', []) \ if cmp_hack_sitevar else [] for event in response['Events']: code = event['code'].lower() event_type = EventType.PRESEASON if code == 'week0' else self.EVENT_TYPES.get(event['type'].lower(), None) if event_type is None and not self.event_short: logging.warn("Event type '{}' not recognized!".format(event['type'])) continue name = event['name'] short_name = EventHelper.getShortName(name, district_code=event['districtCode']) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT district_key = District.renderKeyName(self.season, event['districtCode'].lower()) if event['districtCode'] else None venue = event['venue'] city = event['city'] state_prov = event['stateprov'] country = event['country'] start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) website = event.get('website') # TODO read timezone from API # Special cases for district championship divisions if event_type == EventType.DISTRICT_CMP_DIVISION: split_name = name.split('-') short_name = '{} - {}'.format( ''.join(item[0].upper() for item in split_name[0].split()), split_name[-1].replace('Division', '').strip()) # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] # FIRST indicates CMP registration before divisions are assigned by adding all teams # to Einstein. We will hack around that by not storing divisions and renaming # Einstein to simply "Championship" when certain sitevar flags are set if code in self.EINSTEIN_CODES: override = [item for item in event_name_override if item['event'] == "{}{}".format(self.season, code)] if override: name = short_name.format(override[0]['name']) short_name = short_name.format(override[0]['short_name']) else: # Divisions name = '{} Division'.format(short_name) elif self.event_short: code = self.event_short event_key = "{}{}".format(self.season, code) if event_key in divisions_to_skip: continue # Allow an overriding the start date to be the beginning of the last day if event_key in events_to_change_dates: start = end.replace(hour=0, minute=0, second=0, microsecond=0) events.append(Event( id=event_key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=None, # Even though FRC API provides address, ElasticSearch is more detailed year=self.season, event_district_enum=district_enum, district_key=ndb.Key(District, district_key) if district_key else None, website=website, )) # Build District Model if district_key and district_key not in districts: districts[district_key] = District( id=district_key, year=self.season, abbreviation=event['districtCode'].lower(), ) return events, list(districts.values())
def get(self, year): df_config = Sitevar.get_or_insert('event_list_datafeed_config') df = DatafeedFMSAPI('v2.0') df2 = DatafeedFIRSTElasticSearch() fmsapi_events, event_list_districts = df.getEventList(year) if df_config.contents.get('enable_es') == True: elasticsearch_events = df2.getEventList(year) else: elasticsearch_events = [] # All regular-season events can be inserted without any work involved. # We need to de-duplicate offseason events from the FRC Events API with a different code than the TBA event code fmsapi_events_offseason = [e for e in fmsapi_events if e.is_offseason] event_keys_to_put = set([e.key_name for e in fmsapi_events]) - set( [e.key_name for e in fmsapi_events_offseason]) events_to_put = [ e for e in fmsapi_events if e.key_name in event_keys_to_put ] matched_offseason_events, new_offseason_events = \ OffseasonEventHelper.categorize_offseasons(int(year), fmsapi_events_offseason) # For all matched offseason events, make sure the FIRST code matches the TBA FIRST code for tba_event, first_event in matched_offseason_events: tba_event.first_code = first_event.event_short events_to_put.append( tba_event) # Update TBA events - discard the FIRST event # For all new offseason events we can't automatically match, create suggestions SuggestionCreator.createDummyOffseasonSuggestions(new_offseason_events) merged_events = EventManipulator.mergeModels( list(events_to_put), elasticsearch_events ) if elasticsearch_events else list(events_to_put) events = EventManipulator.createOrUpdate(merged_events) or [] fmsapi_districts = df.getDistrictList(year) merged_districts = DistrictManipulator.mergeModels( fmsapi_districts, event_list_districts) if merged_districts: districts = DistrictManipulator.createOrUpdate(merged_districts) else: districts = [] # Fetch event details for each event for event in events: taskqueue.add(queue_name='datafeed', target='backend-tasks', url='/backend-tasks/get/event_details/' + event.key_name, method='GET') template_values = { "events": events, "districts": districts, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join( os.path.dirname(__file__), '../templates/datafeeds/fms_event_list_get.html') self.response.out.write(template.render(path, template_values))
def get(self, event_key): df = DatafeedFMSAPI('v2.0') df2 = DatafeedFIRSTElasticSearch() event = Event.get_by_id(event_key) # Update event fmsapi_events, fmsapi_districts = df.getEventDetails(event_key) elasticsearch_events = df2.getEventDetails(event) updated_event = EventManipulator.mergeModels( fmsapi_events, elasticsearch_events) if updated_event: event = EventManipulator.createOrUpdate(updated_event) DistrictManipulator.createOrUpdate(fmsapi_districts) models = df.getEventTeams(event_key) teams = [] district_teams = [] robots = [] for group in models: # models is a list of tuples (team, districtTeam, robot) if isinstance(group[0], Team): teams.append(group[0]) if isinstance(group[1], DistrictTeam): district_teams.append(group[1]) if isinstance(group[2], Robot): robots.append(group[2]) # Merge teams teams = TeamManipulator.mergeModels(teams, df2.getEventTeams(event)) # Write new models if teams: teams = TeamManipulator.createOrUpdate(teams) district_teams = DistrictTeamManipulator.createOrUpdate(district_teams) robots = RobotManipulator.createOrUpdate(robots) if not teams: # No teams found registered for this event teams = [] if type(teams) is not list: teams = [teams] # Build EventTeams cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') events_without_eventteams = cmp_hack_sitevar.contents.get('skip_eventteams', []) \ if cmp_hack_sitevar else [] skip_eventteams = event_key in events_without_eventteams event_teams = [EventTeam( id=event.key_name + "_" + team.key_name, event=event.key, team=team.key, year=event.year) for team in teams] if not skip_eventteams else [] # Delete eventteams of teams that are no longer registered if event_teams != [] or skip_eventteams: existing_event_team_keys = set(EventTeam.query(EventTeam.event == event.key).fetch(1000, keys_only=True)) event_team_keys = set([et.key for et in event_teams]) et_keys_to_delete = existing_event_team_keys.difference(event_team_keys) EventTeamManipulator.delete_keys(et_keys_to_delete) event_teams = EventTeamManipulator.createOrUpdate(event_teams) if type(event_teams) is not list: event_teams = [event_teams] template_values = { 'event': event, 'event_teams': event_teams, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_event_details_get.html') self.response.out.write(template.render(path, template_values))
def _default_sitevar(): return Sitevar.get_or_insert('notifications.enable', values_json=json.dumps(True))
def parse(self, response): events = [] districts = {} cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') divisions_to_skip = cmp_hack_sitevar.contents.get('divisions_to_skip', []) \ if cmp_hack_sitevar else [] event_name_override = cmp_hack_sitevar.contents.get('event_name_override', []) \ if cmp_hack_sitevar else [] events_to_change_dates = cmp_hack_sitevar.contents.get('set_start_to_last_day', []) \ if cmp_hack_sitevar else [] for event in response['Events']: code = event['code'].lower() event_type = EventType.PRESEASON if code == 'week0' else self.EVENT_TYPES.get(event['type'].lower(), None) if event_type is None: logging.warn("Event type '{}' not recognized!".format(event['type'])) continue name = event['name'] short_name = EventHelper.getShortName(name) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT district_key = District.renderKeyName(self.season, event['districtCode'].lower()) if event['districtCode'] else None venue = event['venue'] city = event['city'] state_prov = event['stateprov'] country = event['country'] start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) website = event.get('website') # TODO read timezone from API # Special cases for district championship divisions if event_type == EventType.DISTRICT_CMP_DIVISION: split_name = name.split('-') short_name = '{} - {}'.format( ''.join(item[0].upper() for item in split_name[0].split()), split_name[-1].replace('Division', '').strip()) # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] # FIRST indicates CMP registration before divisions are assigned by adding all teams # to Einstein. We will hack around that by not storing divisions and renaming # Einstein to simply "Championship" when certain sitevar flags are set if code in self.EINSTEIN_CODES: override = [item for item in event_name_override if item['event'] == "{}{}".format(self.season, code)] if override: name = short_name.format(override[0]['name']) short_name = short_name.format(override[0]['short_name']) else: # Divisions name = '{} Division'.format(short_name) event_key = "{}{}".format(self.season, code) if event_key in divisions_to_skip: continue # Allow an overriding the start date to be the beginning of the last day if event_key in events_to_change_dates: start = end.replace(hour=0, minute=0, second=0, microsecond=0) events.append(Event( id=event_key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=None, # Even though FRC API provides address, ElasticSearch is more detailed year=self.season, event_district_enum=district_enum, district_key=ndb.Key(District, district_key) if district_key else None, website=website, )) # Build District Model if district_key and district_key not in districts: districts[district_key] = District( id=district_key, year=self.season, abbreviation=event['districtCode'].lower(), ) return events, list(districts.values())
def parse(self, response): events = [] districts = {} cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') store_cmp_division = cmp_hack_sitevar.contents.get('should_store_divisions', True) \ if cmp_hack_sitevar else True einstein_name = cmp_hack_sitevar.contents.get('einstein_name', self.EINSTEIN_NAME_DEFAULT) \ if cmp_hack_sitevar else self.EINSTEIN_NAME_DEFAULT einstein_short_name = cmp_hack_sitevar.contents.get('einstein_short_name', self.EINSTEIN_SHORT_NAME_DEFAULT) \ if cmp_hack_sitevar else self.EINSTEIN_SHORT_NAME_DEFAULT change_einstein_dates = cmp_hack_sitevar.contents.get('should_change_einstein_dates', False) \ if cmp_hack_sitevar else False for event in response['Events']: code = event['code'].lower() event_type = EventType.PRESEASON if code == 'week0' else self.EVENT_TYPES.get(event['type'].lower(), None) if event_type is None: logging.warn("Event type '{}' not recognized!".format(event['type'])) continue name = event['name'] short_name = EventHelper.getShortName(name) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT district_key = District.renderKeyName(self.season, event['districtCode'].lower()) if event['districtCode'] else None venue = event['venue'] city = event['city'] state_prov = event['stateprov'] country = event['country'] start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) website = event.get('website') # TODO read timezone from API # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] # FIRST indicates CMP registration before divisions are assigned by adding all teams # to Einstein. We will hack around that by not storing divisions and renaming # Einstein to simply "Championship" when certain sitevar flags are set if code in self.EINSTEIN_CODES: name = short_name.format(einstein_name) short_name = short_name.format(einstein_short_name) if change_einstein_dates: # Set to beginning of last day start = end.replace(hour=0, minute=0, second=0, microsecond=0) else: # Divisions name = '{} Division'.format(short_name) # Allow skipping storing CMP divisions before they're announced if not store_cmp_division: continue events.append(Event( id="{}{}".format(self.season, code), name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=None, # Even though FRC API provides address, ElasticSearch is more detailed year=self.season, event_district_enum=district_enum, district_key=ndb.Key(District, district_key) if district_key else None, website=website, )) # Build District Model if district_key and district_key not in districts: districts[district_key] = District( id=district_key, year=self.season, abbreviation=event['districtCode'].lower(), ) return events, list(districts.values())
def is_blacklisted(website): website_blacklist_sitevar = Sitevar.get_or_insert( 'website_blacklist', values_json=json.dumps({'websites': []})) website_blacklist = website_blacklist_sitevar.contents.get( 'websites', []) return website in website_blacklist
from models.account import Account from models.favorite import Favorite from models.media import Media from models.sitevar import Sitevar from models.subscription import Subscription from models.mobile_api_messages import BaseResponse, FavoriteCollection, FavoriteMessage, RegistrationRequest, \ SubscriptionCollection, SubscriptionMessage, ModelPreferenceMessage, \ MediaSuggestionMessage from models.mobile_client import MobileClient from models.suggestion import Suggestion WEB_CLIENT_ID = "" ANDROID_AUDIENCE = "" ANDROID_CLIENT_ID = "" client_ids_sitevar = Sitevar.get_or_insert('mobile.clientIds') if isinstance(client_ids_sitevar.contents, dict): WEB_CLIENT_ID = client_ids_sitevar.contents.get("web", "") ANDROID_AUDIENCE = WEB_CLIENT_ID ANDROID_CLIENT_ID = client_ids_sitevar.contents.get("android", "") if not WEB_CLIENT_ID: logging.error("Web client ID is not set, see /admin/authkeys") if not ANDROID_CLIENT_ID: logging.error("Android client ID is not set, see /admin/authkeys") # To enable iOS access to the API, add another variable for the iOS client ID client_ids = [WEB_CLIENT_ID, ANDROID_CLIENT_ID] if tba_config.DEBUG:
def get(self, event_key): df = DatafeedFMSAPI('v2.0') df2 = DatafeedFIRSTElasticSearch() event = Event.get_by_id(event_key) # Update event fmsapi_events, fmsapi_districts = df.getEventDetails(event_key) elasticsearch_events = df2.getEventDetails(event) updated_event = EventManipulator.mergeModels(fmsapi_events, elasticsearch_events) if updated_event: event = EventManipulator.createOrUpdate(updated_event) DistrictManipulator.createOrUpdate(fmsapi_districts) models = df.getEventTeams(event_key) teams = [] district_teams = [] robots = [] for group in models: # models is a list of tuples (team, districtTeam, robot) if isinstance(group[0], Team): teams.append(group[0]) if isinstance(group[1], DistrictTeam): district_teams.append(group[1]) if isinstance(group[2], Robot): robots.append(group[2]) # Merge teams teams = TeamManipulator.mergeModels(teams, df2.getEventTeams(event)) # Write new models if teams and event.year == tba_config.MAX_YEAR: # Only update from latest year teams = TeamManipulator.createOrUpdate(teams) district_teams = DistrictTeamManipulator.createOrUpdate(district_teams) robots = RobotManipulator.createOrUpdate(robots) if not teams: # No teams found registered for this event teams = [] if type(teams) is not list: teams = [teams] # Build EventTeams cmp_hack_sitevar = Sitevar.get_or_insert('cmp_registration_hacks') events_without_eventteams = cmp_hack_sitevar.contents.get('skip_eventteams', []) \ if cmp_hack_sitevar else [] skip_eventteams = event_key in events_without_eventteams event_teams = [ EventTeam(id=event.key_name + "_" + team.key_name, event=event.key, team=team.key, year=event.year) for team in teams ] if not skip_eventteams else [] # Delete eventteams of teams that are no longer registered if event_teams and not skip_eventteams: existing_event_team_keys = set( EventTeam.query(EventTeam.event == event.key).fetch( 1000, keys_only=True)) event_team_keys = set([et.key for et in event_teams]) et_keys_to_delete = existing_event_team_keys.difference( event_team_keys) EventTeamManipulator.delete_keys(et_keys_to_delete) event_teams = EventTeamManipulator.createOrUpdate(event_teams) if type(event_teams) is not list: event_teams = [event_teams] if event.year in {2018, 2019, 2020}: avatars, keys_to_delete = df.getEventTeamAvatars(event.key_name) if avatars: MediaManipulator.createOrUpdate(avatars) MediaManipulator.delete_keys(keys_to_delete) template_values = { 'event': event, 'event_teams': event_teams, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join( os.path.dirname(__file__), '../templates/datafeeds/usfirst_event_details_get.html') self.response.out.write(template.render(path, template_values))
def disable_continuous_push(): status_sitevar = Sitevar.get_or_insert('apistatus', values_json="{}") status = status_sitevar.contents status['contbuild_enabled'] = False status_sitevar.contents = status status_sitevar.put()