def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [ Match(id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None)) for match in matches ] try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") self.redirect('/admin/event/{}'.format(event_key))
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] new_matches = MatchManipulator.createOrUpdate(matches) try: last_matches = MatchHelper.recentMatches(new_matches, 1) upcoming_matches = MatchHelper.upcomingMatches(new_matches, 8) except: logging.warning("Computing last/upcoming matches for Firebase failed!") try: FirebasePusher.updateEvent(event, last_matches, upcoming_matches) except: logging.warning("Enqueuing Firebase push failed!") self.redirect('/admin/event/{}'.format(event_key))
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) if new_matches: for match in new_matches: if hasattr(match, 'dirty') and match.dirty: # Enqueue push notification try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event.key_name, method='GET') break template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def post(self): self._require_admin() event_key = self.request.get('event_key') matches_csv = self.request.get('matches_csv') matches = OffseasonMatchesParser.parse(matches_csv) event = Event.get_by_id(event_key) matches = [Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in matches] try: FirebasePusher.updated_event(event.key_name) except: logging.warning("Enqueuing Firebase push failed!") self.redirect('/admin/event/{}'.format(event_key))
def postUpdateHook(cls, matches): ''' To run after the match has been updated. Send push notifications to subscribed users Only if the match is part of an active event ''' for match in matches: if match.event.get().now: logging.info("Sending push notifications for "+match.key_name) try: GCMMessageHelper.send_match_score_update(match) except exception: logging.error("Error sending match updates: "+str(exception)) ''' Enqueue firebase push ''' if matches: event_key = matches[0].event.id() try: FirebasePusher.updated_event(event_key) except Exception: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event_key, method='GET')
def postDeleteHook(cls, matches): ''' To run after the match has been deleted. ''' for match in matches: try: FirebasePusher.delete_match(match) except Exception: logging.warning("Enqueuing Firebase delete failed!")
def postDeleteHook(cls, matches): ''' To run after the match has been deleted. ''' for match in matches: try: FirebasePusher.delete_match(match) except Exception: logging.warning("Firebase delete_match failed!")
def send(self, keys, push_firebase=True, track_call=True): self.keys = keys # dict like {ClientType : [ key ] } ... The list for webhooks is a tuple of (key, secret) deferred.defer(self.render, self._supported_clients, _queue="push-notifications") if self._push_firebase and push_firebase: FirebasePusher.push_notification(self) if self._track_call and track_call: num_keys = 0 for v in keys.values(): # Count the number of clients receiving the notification num_keys += len(v) deferred.defer(self.track_notification, self._type, num_keys, _queue="api-track-call")
def get(self, event_key): event = Event.get_by_id(event_key) event_teams = EventTeam.query(EventTeam.event==event.key).fetch() for event_team in event_teams: status = EventTeamStatusHelper.generate_team_at_event_status(event_team.team.id(), event) event_team.status = status FirebasePusher.update_event_team_status(event_key, event_team.team.id(), status) EventTeamManipulator.createOrUpdate(event_teams) if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue self.response.out.write("Finished calculating event team statuses for: {}".format(event_key))
def updateMerge(self, new_match, old_match): """ Given an "old" and a "new" Match object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ immutable_attrs = [ "comp_level", "event", "set_number", "match_number", ] # These build key_name, and cannot be changed without deleting the model. attrs = [ "alliances_json", "game", "no_auto_update", "time", "time_string", ] list_attrs = [ "team_key_names", "tba_videos", "youtube_videos" ] push_match = not old_match.has_been_played and new_match.has_been_played for attr in attrs: if getattr(new_match, attr) is not None: if getattr(new_match, attr) != getattr(old_match, attr): setattr(old_match, attr, getattr(new_match, attr)) if attr == 'alliances_json': # Necessary since 'alliances' doesn't get changed # when mutating 'alliances_json' old_match.clearAlliances() old_match.dirty = True for attr in list_attrs: if len(getattr(new_match, attr)) > 0: if getattr(new_match, attr) != getattr(old_match, attr): setattr(new_match, attr, getattr(new_match, attr)) old_match.dirty = True if push_match: try: FirebasePusher.pushMatch(old_match) except: logging.warning("Enqueuing Firebase push failed!") return old_match
def postUpdateHook(cls, event_details_list, updated_attr_list, is_new_list): """ To run after models have been updated """ for (event_details, updated_attrs) in zip(event_details_list, updated_attr_list): event = Event.get_by_id(event_details.key.id()) if event.within_a_day and "alliance_selections" in updated_attrs: try: NotificationHelper.send_alliance_update(event) except Exception: logging.error( "Error sending alliance update notification for {}". format(event.key_name)) logging.error(traceback.format_exc()) try: TBANSHelper.alliance_selection(event) except Exception: logging.error( "Error sending alliance update notification for {}". format(event.key_name)) logging.error(traceback.format_exc()) # Enqueue task to calculate district points try: taskqueue.add( url='/tasks/math/do/district_points_calc/{}'.format( event.key.id()), method='GET') except Exception: logging.error( "Error enqueuing district_points_calc for {}".format( event.key.id())) logging.error(traceback.format_exc()) # Enqueue task to calculate event team status try: taskqueue.add(url='/tasks/math/do/event_team_status/{}'.format( event.key.id()), method='GET') except Exception: logging.error( "Error enqueuing event_team_status for {}".format( event.key.id())) logging.error(traceback.format_exc()) try: FirebasePusher.update_event_details(event_details) except Exception: logging.warning("Firebase update_event_details failed!")
def flush(self): flushed = [] flushed.append(MainChampsHandler().memcacheFlush()) flushed.append(MainCompetitionseasonHandler().memcacheFlush()) flushed.append(MainOffseasonHandler().memcacheFlush()) flushed.append(MainInsightsHandler().memcacheFlush()) flushed.append(GamedayHandler().memcacheFlush()) flushed.append(Gameday2Controller().memcacheFlush()) flushed.append(WebcastsHandler().memcacheFlush()) flushed.append(EventList().memcacheFlush()) FirebasePusher.update_live_events() return flushed
def postUpdateHook(cls, matches): """ To run after models have been updated """ if matches: event_key = matches[0].event.id() try: FirebasePusher.updated_event(event_key) except Exception: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add( url='/tasks/math/do/event_matchstats/' + event_key, method='GET')
def _render(self, *args, **kw): kickoff_datetime_est = datetime.datetime.strptime( self.template_values['kickoff_datetime'], "%Y-%m-%dT%H:%M:%S" ) if 'kickoff_datetime' in self.template_values else None kickoff_datetime_utc = pytz.utc.localize(kickoff_datetime_est + datetime.timedelta(hours=5)) special_webcasts = FirebasePusher.get_special_webcasts() is_kickoff = datetime.datetime.now( ) >= kickoff_datetime_est - datetime.timedelta( days=1) # turn on 1 day before week_events = EventHelper.getWeekEvents() self.template_values.update({ 'events': week_events, 'is_kickoff': is_kickoff, 'kickoff_datetime_est': kickoff_datetime_est, 'kickoff_datetime_utc': kickoff_datetime_utc, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), "../templates/index_kickoff.html") return template.render(path, self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() year = datetime.datetime.now().year special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, "year": year, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) insights = ndb.get_multi([ ndb.Key(Insight, Insight.renderKeyName(year, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values() ]) for insight in insights: if insight: self.template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_insights.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() popular_teams_events = TeamHelper.getPopularTeamsEvents(week_events) # Only show special webcasts that aren't also hosting an event special_webcasts = [] for special_webcast in FirebasePusher.get_special_webcasts(): add = True for event in week_events: if event.now and event.webcast: for event_webcast in event.webcast: if (special_webcast.get('type', '') == event_webcast.get('type', '') and special_webcast.get('channel', '') == event_webcast.get('channel', '') and special_webcast.get('file', '') == event_webcast.get('file', '')): add = False break if not add: break if add: special_webcasts.append(special_webcast) self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, "popular_teams_events": popular_teams_events, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
def get(self, event_key): df = DatafeedUsfirst() event = Event.get_by_id(event_key) new_matches = MatchManipulator.createOrUpdate(df.getMatches(event)) try: last_matches = MatchHelper.recentMatches(new_matches, 1) upcoming_matches = MatchHelper.upcomingMatches(new_matches, 8) except: logging.warning("Computing last/upcoming matches for Firebase failed!") try: FirebasePusher.updateEvent(event, last_matches, upcoming_matches) except: logging.warning("Enqueuing Firebase push failed!") template_values = { 'matches': new_matches, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_matches_get.html') self.response.out.write(template.render(path, template_values))
def postUpdateHook(cls, event_details_list, updated_attr_list, is_new_list): """ To run after models have been updated """ for (event_details, updated_attrs) in zip(event_details_list, updated_attr_list): event = Event.get_by_id(event_details.key.id()) try: if event.within_a_day and "alliance_selections" in updated_attrs: # Send updated alliances notification logging.info("Sending alliance notifications for {}".format(event.key_name)) NotificationHelper.send_alliance_update(event) except Exception: logging.error("Error sending alliance update notification for {}".format(event.key_name)) logging.error(traceback.format_exc()) # Enqueue task to calculate district points try: taskqueue.add( url='/tasks/math/do/district_points_calc/{}'.format(event.key.id()), method='GET') except Exception: logging.error("Error enqueuing district_points_calc for {}".format(event.key.id())) logging.error(traceback.format_exc()) # Enqueue task to calculate event team status try: taskqueue.add( url='/tasks/math/do/event_team_status/{}'.format(event.key.id()), method='GET') except Exception: logging.error("Error enqueuing event_team_status for {}".format(event.key.id())) logging.error(traceback.format_exc()) try: FirebasePusher.update_event_details(event_details) except Exception: logging.warning("Firebase update_event_details failed!")
def _render(self, *args, **kw): kickoff_datetime_utc = datetime.datetime.strptime( self.template_values['kickoff_datetime'], "%Y-%m-%dT%H:%M:%S" ) if 'kickoff_datetime' in self.template_values else None week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, 'kickoff_datetime_utc': kickoff_datetime_utc, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_offseason.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": EventHelper.getWeekEvents(), 'kickoff_datetime_utc': SeasonHelper.kickoff_datetime_utc(), "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) return jinja2_engine.render('index/index_offseason.html', self.template_values)
def _render(self, *args, **kw): effective_season_year = SeasonHelper.effective_season_year() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ 'seasonstart_datetime_utc': SeasonHelper.first_event_datetime_utc(effective_season_year), 'events': EventHelper.getWeekEvents(), "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) return jinja2_engine.render('index/index_buildseason.html', self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() year = datetime.datetime.now().year special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, "year": year, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) insights = ndb.get_multi([ndb.Key(Insight, Insight.renderKeyName(year, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values()]) for insight in insights: if insight: self.template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_insights.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): kickoff_datetime_est = datetime.datetime.strptime( self.template_values['kickoff_datetime'], "%Y-%m-%dT%H:%M:%S" ) if 'kickoff_datetime' in self.template_values else None kickoff_datetime_utc = pytz.utc.localize( kickoff_datetime_est + datetime.timedelta(hours=5)) special_webcasts = FirebasePusher.get_special_webcasts() is_kickoff = datetime.datetime.now() >= kickoff_datetime_est - datetime.timedelta(days=1) # turn on 1 day before week_events = EventHelper.getWeekEvents() self.template_values.update({ 'events': week_events, 'is_kickoff': is_kickoff, 'kickoff_datetime_est': kickoff_datetime_est, 'kickoff_datetime_utc': kickoff_datetime_utc, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), "../templates/index_kickoff.html") return template.render(path, self.template_values)
def _render(self, *args, **kw): special_webcasts = FirebasePusher.get_special_webcasts() effective_season_year = SeasonHelper.effective_season_year() self.template_values.update({ 'events': EventHelper.getWeekEvents(), 'is_kickoff': SeasonHelper.is_kickoff_at_least_one_day_away( year=effective_season_year), 'kickoff_datetime_est': SeasonHelper.kickoff_datetime_est(effective_season_year), 'kickoff_datetime_utc': SeasonHelper.kickoff_datetime_utc(effective_season_year), "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) return jinja2_engine.render('index/index_kickoff.html', self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() popular_teams_events = TeamHelper.getPopularTeamsEvents(week_events) # Only show special webcasts that aren't also hosting an event special_webcasts = [] for special_webcast in FirebasePusher.get_special_webcasts(): add = True for event in week_events: if event.now and event.webcast: for event_webcast in event.webcast: if (special_webcast.get( 'type', '') == event_webcast.get('type', '') and special_webcast.get('channel', '') == event_webcast.get('channel', '') and special_webcast.get('file', '') == event_webcast.get('file', '')): add = False break if not add: break if add: special_webcasts.append(special_webcast) self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, "popular_teams_events": popular_teams_events, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
class MatchManipulator(ManipulatorBase): """ Handle Match database writes. """ @classmethod def getCacheKeysAndControllers(cls, affected_refs): return CacheClearer.get_match_cache_keys_and_controllers(affected_refs) @classmethod def postUpdateHook(cls, matches): ''' To run after the match has been updated. Send push notifications to subscribed users Only if the match is part of an active event ''' for match in matches: if match.event.get().now: logging.info("Sending push notifications for " + match.key_name) try: NotificationHelper.send_match_score_update(match) except Exception, exception: logging.error("Error sending match updates: " + str(exception)) ''' Enqueue firebase push ''' if matches: event_key = matches[0].event.id() try: FirebasePusher.updated_event(event_key) except Exception: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats taskqueue.add(url='/tasks/math/do/event_matchstats/' + event_key, method='GET')
NotificationHelper.send_schedule_update(event) except Exception, exception: logging.error("Eror sending schedule updates for: {}".format( event.key_name)) ''' Enqueue firebase push ''' affected_stats_event_keys = set() for (match, updated_attrs, is_new) in zip(matches, updated_attr_list, is_new_list): # Only attrs that affect stats if is_new or set(['alliances_json', 'score_breakdown_json' ]).intersection(set(updated_attrs)) != set(): affected_stats_event_keys.add(match.event.id()) try: FirebasePusher.update_match(match) except Exception: logging.warning("Firebase update_match failed!") # Enqueue statistics for event_key in affected_stats_event_keys: # Enqueue task to calculate matchstats try: taskqueue.add(url='/tasks/math/do/event_matchstats/' + event_key, method='GET') except Exception: logging.error("Error enqueuing event_matchstats for {}".format( event_key)) logging.error(traceback.format_exc())
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() events = {} for event in week_events: events[event.key.id()] = event # Calculate popular teams # Get cached team keys event_team_keys = memcache.get_multi(events.keys(), namespace='event-team-keys') # Get uncached team keys to_query = set(events.keys()).difference(event_team_keys.keys()) event_teams_futures = [( event_key, EventTeam.query(EventTeam.event == ndb.Key(Event, event_key)).fetch_async(projection=[EventTeam.team]) ) for event_key in to_query] # Merge cached and uncached team keys for event_key, event_teams in event_teams_futures: event_team_keys[event_key] = [et.team.id() for et in event_teams.get_result()] memcache.set_multi(event_team_keys, 60*60*24, namespace='event-team-keys') team_keys = [] team_events = {} for event_key, event_team_keys in event_team_keys.items(): team_keys += event_team_keys for team_key in event_team_keys: team_events[team_key] = events[event_key] # Get cached counts team_favorite_counts = memcache.get_multi(team_keys, namespace='team-favorite-counts') # Get uncached counts to_count = set(team_keys).difference(team_favorite_counts.keys()) count_futures = [( team_key, Favorite.query(Favorite.model_key == team_key).count_async() ) for team_key in to_count] # Merge cached and uncached counts for team_key, count_future in count_futures: team_favorite_counts[team_key] = count_future.get_result() memcache.set_multi(team_favorite_counts, 60*60*24, namespace='team-favorite-counts') # Sort to get top popular teams popular_team_keys = [] for team_key, _ in sorted(team_favorite_counts.items(), key=lambda tc: -tc[1])[:25]: popular_team_keys.append(ndb.Key(Team, team_key)) popular_teams = sorted(ndb.get_multi(popular_team_keys), key=lambda team: team.team_number) popular_teams_events = [] for team in popular_teams: popular_teams_events.append((team, team_events[team.key.id()])) self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, "popular_teams_events": popular_teams_events, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() events = {} for event in week_events: events[event.key.id()] = event # Calculate popular teams # Get cached team keys event_team_keys = memcache.get_multi(events.keys(), namespace='event-team-keys') # Get uncached team keys to_query = set(events.keys()).difference(event_team_keys.keys()) event_teams_futures = [ (event_key, EventTeam.query( EventTeam.event == ndb.Key(Event, event_key)).fetch_async( projection=[EventTeam.team])) for event_key in to_query ] # Merge cached and uncached team keys for event_key, event_teams in event_teams_futures: event_team_keys[event_key] = [ et.team.id() for et in event_teams.get_result() ] memcache.set_multi(event_team_keys, 60 * 60 * 24, namespace='event-team-keys') team_keys = [] team_events = {} for event_key, event_team_keys in event_team_keys.items(): team_keys += event_team_keys for team_key in event_team_keys: team_events[team_key] = events[event_key] # Get cached counts team_favorite_counts = memcache.get_multi( team_keys, namespace='team-favorite-counts') # Get uncached counts to_count = set(team_keys).difference(team_favorite_counts.keys()) count_futures = [ (team_key, Favorite.query(Favorite.model_key == team_key).count_async()) for team_key in to_count ] # Merge cached and uncached counts for team_key, count_future in count_futures: team_favorite_counts[team_key] = count_future.get_result() memcache.set_multi(team_favorite_counts, 60 * 60 * 24, namespace='team-favorite-counts') # Sort to get top popular teams popular_team_keys = [] for team_key, _ in sorted(team_favorite_counts.items(), key=lambda tc: -tc[1])[:25]: popular_team_keys.append(ndb.Key(Team, team_key)) popular_teams = sorted(ndb.get_multi(popular_team_keys), key=lambda team: team.team_number) popular_teams_events = [] for team in popular_teams: popular_teams_events.append((team, team_events[team.key.id()])) self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, "popular_teams_events": popular_teams_events, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
NotificationHelper.send_schedule_update(event) except Exception, exception: logging.error("Eror sending schedule updates for: {}".format( event.key_name)) ''' Enqueue firebase push ''' affected_stats_event_keys = set() for (match, updated_attrs, is_new) in zip(matches, updated_attr_list, is_new_list): # Only attrs that affect stats if is_new or set(['alliances_json', 'score_breakdown_json' ]).intersection(set(updated_attrs)) != set(): affected_stats_event_keys.add(match.event.id()) try: FirebasePusher.update_match(match, updated_attrs) except Exception: logging.warning("Firebase update_match failed!") logging.warning(traceback.format_exc()) # Enqueue statistics for event_key in affected_stats_event_keys: # Enqueue task to calculate matchstats try: taskqueue.add(url='/tasks/math/do/event_matchstats/' + event_key, method='GET') except Exception: logging.error("Error enqueuing event_matchstats for {}".format( event_key)) logging.error(traceback.format_exc())
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format( bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format( bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get( 'current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime( current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get( 'current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime( current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get( 'blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get( 'blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id( current_match_key) if current_match_key else None last_match = Match.get_by_id( last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format( [le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format( [le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format( [le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info( "[BLUEZONE] all upcoming matches sorted by predicted time: {}". format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format( [um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info( "[BLUEZONE] potential_matches sorted by predicted time: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format( [pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info( "[BLUEZONE] potential_matches sorted by hotness: {}".format( [pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format( [pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info( "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}". format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format( current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format( current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format( current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format( match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format( match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format( match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format( match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info( "[BLUEZONE] Event {} is blacklisted, skipping...".format( match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format( match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len( potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info( "[BLUEZONE] Adding match to blacklist: {}".format( match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format( match.key.id()) logging.info( "[BLUEZONE] scheduled time: {}, now: {}".format( current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format( current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert( slack_url, "Blacklisting match {}. Predicted time: {}, now: {}" .format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info( "[BLUEZONE] Continuing to use match: {}".format( match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format( match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info( "[BLUEZONE] Found a good new match: {}".format( match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format( match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format( match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format( match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format( [m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format( [m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter( lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps( [real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format( bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format( bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert( slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}." .format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/bluezone/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set( [f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
try: logging.info("Sending schedule updates for: {}".format(event.key_name)) NotificationHelper.send_schedule_update(event) except Exception, exception: logging.error("Eror sending schedule updates for: {}".format(event.key_name)) ''' Enqueue firebase push ''' affected_stats_event_keys = set() for (match, updated_attrs, is_new) in zip(matches, updated_attr_list, is_new_list): # Only attrs that affect stats if is_new or set(['alliances_json', 'score_breakdown_json']).intersection(set(updated_attrs)) != set(): affected_stats_event_keys.add(match.event.id()) try: FirebasePusher.update_match(match, updated_attrs) except Exception: logging.warning("Firebase update_match failed!") logging.warning(traceback.format_exc()) # Enqueue statistics for event_key in affected_stats_event_keys: # Enqueue task to calculate matchstats try: taskqueue.add( url='/tasks/math/do/event_matchstats/' + event_key, method='GET') except Exception: logging.error("Error enqueuing event_matchstats for {}".format(event_key)) logging.error(traceback.format_exc())
def get(self): FirebasePusher.update_live_events()
def update_bluezone(cls, live_events): """ Find the current best match to watch Currently favors showing something over nothing, is okay with switching TO a feed in the middle of a match, but avoids switching FROM a feed in the middle of a match. 1. Get the earliest predicted unplayed match across all live events 2. Get all matches that start within TIME_BUCKET of that match 3. Switch to hottest match in that bucket unless MAX_TIME_PER_MATCH is hit (in which case blacklist for the future) 4. Repeat """ now = datetime.datetime.now() logging.info("[BLUEZONE] Current time: {}".format(now)) to_log = '--------------------------------------------------\n' to_log += "[BLUEZONE] Current time: {}\n".format(now) slack_sitevar = Sitevar.get_or_insert('slack.hookurls') slack_url = None if slack_sitevar: slack_url = slack_sitevar.contents.get('bluezone', '') bluezone_config = Sitevar.get_or_insert('bluezone') logging.info("[BLUEZONE] Config (updated {}): {}".format(bluezone_config.updated, bluezone_config.contents)) to_log += "[BLUEZONE] Config (updated {}): {}\n".format(bluezone_config.updated, bluezone_config.contents) current_match_key = bluezone_config.contents.get('current_match') last_match_key = bluezone_config.contents.get('last_match') current_match_predicted_time = bluezone_config.contents.get('current_match_predicted') if current_match_predicted_time: current_match_predicted_time = datetime.datetime.strptime(current_match_predicted_time, cls.TIME_PATTERN) current_match_switch_time = bluezone_config.contents.get('current_match_switch_time') if current_match_switch_time: current_match_switch_time = datetime.datetime.strptime(current_match_switch_time, cls.TIME_PATTERN) else: current_match_switch_time = now blacklisted_match_keys = bluezone_config.contents.get('blacklisted_matches', set()) if blacklisted_match_keys: blacklisted_match_keys = set(blacklisted_match_keys) blacklisted_event_keys = bluezone_config.contents.get('blacklisted_events', set()) if blacklisted_event_keys: blacklisted_event_keys = set(blacklisted_event_keys) current_match = Match.get_by_id(current_match_key) if current_match_key else None last_match = Match.get_by_id(last_match_key) if last_match_key else None logging.info("[BLUEZONE] live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] live_events: {}\n".format([le.key.id() for le in live_events]) live_events = filter(lambda e: e.webcast_status != 'offline', live_events) for event in live_events: # Fetch all matches and details asynchronously event.prep_matches() event.prep_details() logging.info("[BLUEZONE] Online live_events: {}".format([le.key.id() for le in live_events])) to_log += "[BLUEZONE] Online live_events: {}\n".format([le.key.id() for le in live_events]) upcoming_matches = cls.get_upcoming_matches(live_events) upcoming_matches = filter(lambda m: m.predicted_time is not None, upcoming_matches) upcoming_predictions = cls.get_upcoming_match_predictions(live_events) # (1, 2) Find earliest predicted unplayed match and all other matches # that start within TIME_BUCKET of that match upcoming_matches.sort(key=lambda match: match.predicted_time) potential_matches = [] time_cutoff = None logging.info("[BLUEZONE] all upcoming matches sorted by predicted time: {}".format([um.key.id() for um in upcoming_matches])) to_log += "[BLUEZONE] all upcoming sorted by predicted time: {}\n".format([um.key.id() for um in upcoming_matches]) for match in upcoming_matches: if match.predicted_time: if time_cutoff is None: time_cutoff = match.predicted_time + cls.TIME_BUCKET potential_matches.append(match) elif match.predicted_time < time_cutoff: potential_matches.append(match) else: break # Matches are sorted by predicted_time logging.info("[BLUEZONE] potential_matches sorted by predicted time: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by predicted time: {}\n".format([pm.key.id() for pm in potential_matches]) # (3) Choose hottest match that's not blacklisted cls.calculate_match_hotness(potential_matches, upcoming_predictions) potential_matches.sort(key=lambda match: -match.hotness) logging.info("[BLUEZONE] potential_matches sorted by hotness: {}".format([pm.key.id() for pm in potential_matches])) to_log += "[BLUEZONE] potential_matches sorted by hotness: {}\n".format([pm.key.id() for pm in potential_matches]) bluezone_matches = [] new_blacklisted_match_keys = set() # If the current match hasn't finished yet, don't even bother cutoff_time = current_match_switch_time + cls.MAX_TIME_PER_MATCH logging.info("[BLUEZONE] Current match played? {}, now = {}, cutoff = {}".format(current_match.has_been_played if current_match else None, now, cutoff_time)) to_log += "[BLUEZONE] Current match played? {}, now = {}, cutoff = {}\n".format(current_match.has_been_played if current_match else None, now, cutoff_time) if current_match and not current_match.has_been_played and now < cutoff_time \ and current_match_key not in blacklisted_match_keys \ and current_match.event_key_name not in blacklisted_event_keys: logging.info("[BLUEZONE] Keeping current match {}".format(current_match.key.id())) to_log += "[BLUEZONE] Keeping current match {}\n".format(current_match.key.id()) bluezone_matches.append(current_match) for match in potential_matches: if len(bluezone_matches) >= 2: # one current, one future break logging.info("[BLUEZONE] Trying potential match: {}".format(match.key.id())) to_log += "[BLUEZONE] Trying potential match: {}\n".format(match.key.id()) if filter(lambda m: m.key.id() == match.key.id(), bluezone_matches): logging.info("[BLUEZONE] Match {} already chosen".format(match.key.id())) to_log += "[BLUEZONE] Match {} already chosen\n".format(match.key.id()) continue if match.event_key_name in blacklisted_event_keys: logging.info("[BLUEZONE] Event {} is blacklisted, skipping...".format(match.event_key_name)) to_log += "[BLUEZONE] Event {} is blacklisted, skipping...\n".format(match.event_key_name) continue if match.key.id() not in blacklisted_match_keys: if match.key.id() == current_match_key: if current_match_predicted_time and cutoff_time < now and len(potential_matches) > 1: # We've been on this match too long new_blacklisted_match_keys.add(match.key.id()) logging.info("[BLUEZONE] Adding match to blacklist: {}".format(match.key.id())) to_log += "[BLUEZONE] Adding match to blacklist: {}\n".format(match.key.id()) logging.info("[BLUEZONE] scheduled time: {}, now: {}".format(current_match_predicted_time, now)) to_log += "[BLUEZONE] scheduled time: {}, now: {}\n".format(current_match_predicted_time, now) OutgoingNotificationHelper.send_slack_alert(slack_url, "Blacklisting match {}. Predicted time: {}, now: {}".format(match.key.id(), current_match_predicted_time, now)) else: # We can continue to use this match bluezone_matches.append(match) logging.info("[BLUEZONE] Continuing to use match: {}".format(match.key.id())) to_log += "[BLUEZONE] Continuing to use match: {}\n".format(match.key.id()) else: # Found a new good match bluezone_matches.append(match) logging.info("[BLUEZONE] Found a good new match: {}".format(match.key.id())) to_log += "[BLUEZONE] Found a good new match: {}\n".format(match.key.id()) else: logging.info("[BLUEZONE] Match already blacklisted: {}".format(match.key.id())) to_log += "[BLUEZONE] Match already blacklisted: {}\n".format(match.key.id()) new_blacklisted_match_keys.add(match.key.id()) if not bluezone_matches: logging.info("[BLUEZONE] No match selected") to_log += "[BLUEZONE] No match selected\n" logging.info("[BLUEZONE] All selected matches: {}".format([m.key.id() for m in bluezone_matches])) to_log += "[BLUEZONE] All selected matches: {}\n".format([m.key.id() for m in bluezone_matches]) # (3) Switch to hottest match fake_event = cls.build_fake_event() if bluezone_matches: bluezone_match = bluezone_matches[0] real_event = filter(lambda x: x.key_name == bluezone_match.event_key_name, live_events)[0] # Create Fake event for return fake_event.webcast_json = json.dumps([real_event.current_webcasts[0]]) if bluezone_match.key_name != current_match_key: current_match_switch_time = now logging.info("[BLUEZONE] Switching to: {}".format(bluezone_match.key.id())) to_log += "[BLUEZONE] Switching to: {}\n".format(bluezone_match.key.id()) OutgoingNotificationHelper.send_slack_alert(slack_url, "It is now {}. Switching BlueZone to {}, scheduled for {} and predicted to be at {}.".format(now, bluezone_match.key.id(), bluezone_match.time, bluezone_match.predicted_time)) if not current_match or current_match.has_been_played: last_match = current_match # Only need to update if things changed if bluezone_match.key_name != current_match_key or new_blacklisted_match_keys != blacklisted_match_keys: FirebasePusher.update_event(fake_event) bluezone_config.contents = { 'current_match': bluezone_match.key.id(), 'last_match': last_match.key.id() if last_match else '', 'current_match_predicted': bluezone_match.predicted_time.strftime(cls.TIME_PATTERN), 'blacklisted_matches': list(new_blacklisted_match_keys), 'blacklisted_events': list(blacklisted_event_keys), 'current_match_switch_time': current_match_switch_time.strftime(cls.TIME_PATTERN), } bluezone_config.put() # Log to cloudstorage log_dir = '/tbatv-prod-hrd.appspot.com/tba-logging/' log_file = 'bluezone_{}.txt'.format(now.date()) full_path = log_dir + log_file existing_contents = '' if full_path in set([f.filename for f in cloudstorage.listbucket(log_dir)]): with cloudstorage.open(full_path, 'r') as existing_file: existing_contents = existing_file.read() with cloudstorage.open(full_path, 'w') as new_file: new_file.write(existing_contents + to_log) bluezone_matches.insert(0, last_match) bluezone_matches = filter(lambda m: m is not None, bluezone_matches) FirebasePusher.replace_event_matches('bluezone', bluezone_matches) return fake_event
def flushEvent(self, event_key): flushed = self.flush() flushed.append(WebcastHandler().memcacheFlush(event_key)) FirebasePusher.update_live_events() return flushed
''' for event in unplayed_match_events: try: logging.info("Sending schedule updates for: {}".format(event.key_name)) NotificationHelper.send_schedule_update(event) except Exception, exception: logging.error("Eror sending schedule updates for: {}".format(event.key_name)) ''' Enqueue firebase push ''' event_keys = set() for match in matches: event_keys.add(match.event.id()) try: FirebasePusher.update_match(match) except Exception: logging.warning("Enqueuing Firebase push failed!") # Enqueue task to calculate matchstats for event_key in event_keys: taskqueue.add( url='/tasks/math/do/event_matchstats/' + event_key, method='GET') @classmethod def updateMerge(self, new_match, old_match, auto_union=True): """ Given an "old" and a "new" Match object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team.