def _render(self, year=None, explicit_year=False): events = event_query.EventListQuery(year).fetch() EventHelper.sort_events(events) week_events = EventHelper.groupByWeek(events) district_enums = set() for event in events: if event.event_district_enum is not None and event.event_district_enum != DistrictType.NO_DISTRICT: district_enums.add(event.event_district_enum) districts = [] # a tuple of (district abbrev, district name) for district_enum in district_enums: districts.append((DistrictType.type_abbrevs[district_enum], DistrictType.type_names[district_enum])) districts = sorted(districts, key=lambda d: d[1]) self.template_values.update({ "events": events, "explicit_year": explicit_year, "selected_year": year, "valid_years": self.VALID_YEARS, "week_events": week_events, "districts": districts, }) path = os.path.join(os.path.dirname(__file__), '../templates/event_list.html') return template.render(path, self.template_values)
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) awards = [] for award in JSONAwardsParser.parse(request.body, event_key): awards.append(Award( id=Award.render_key_name(event.key_name, award['award_type_enum']), name_str=award['name_str'], award_type_enum=award['award_type_enum'], year=event.year, event=event.key, event_type_enum=event.event_type_enum, team_list=[ndb.Key(Team, team_key) for team_key in award['team_key_list']], recipient_json_list=award['recipient_json_list'] )) # it's easier to clear all awards and add new ones than try to find the difference old_award_keys = Award.query(Award.event == event.key).fetch(None, keys_only=True) AwardManipulator.delete_keys(old_award_keys) if event.remap_teams: EventHelper.remapteams_awards(awards, event.remap_teams) AwardManipulator.createOrUpdate(awards) self.response.out.write(json.dumps({'Success': "Awards successfully updated"}))
def get(self, event_key): event = Event.get_by_id(event_key) if not event: self.abort(404) if not event.remap_teams: return event.prepAwardsMatchesTeams() # Remap matches EventHelper.remapteams_matches(event.matches, event.remap_teams) MatchManipulator.createOrUpdate(event.matches) # Remap alliance selections if event.alliance_selections: EventHelper.remapteams_alliances(event.alliance_selections, event.remap_teams) # Remap rankings if event.rankings: EventHelper.remapteams_rankings(event.rankings, event.remap_teams) if event.details and event.details.rankings2: EventHelper.remapteams_rankings2(event.details.rankings2, event.remap_teams) EventDetailsManipulator.createOrUpdate(event.details) # Remap awards EventHelper.remapteams_awards(event.awards, event.remap_teams) AwardManipulator.createOrUpdate(event.awards, auto_union=False)
def post(self, event_key): self._require_admin() # Note, we don't actually use event_key. start_date = None if self.request.get("start_date"): start_date = datetime.strptime(self.request.get("start_date"), "%Y-%m-%d") end_date = None if self.request.get("end_date"): end_date = datetime.strptime(self.request.get("end_date"), "%Y-%m-%d") event = Event( id=str(self.request.get("year")) + str.lower(str(self.request.get("event_short"))), end_date=end_date, event_short=self.request.get("event_short"), event_type_enum=EventHelper.parseEventType(self.request.get("event_type_str")), event_district_enum=EventHelper.parseDistrictName(self.request.get("event_district_str")), location=self.request.get("location"), name=self.request.get("name"), short_name=self.request.get("short_name"), start_date=start_date, website=self.request.get("website"), year=int(self.request.get("year")), official={"true": True, "false": False}.get(self.request.get("official").lower()), facebook_eid=self.request.get("facebook_eid"), webcast_json=self.request.get("webcast_json"), rankings_json=self.request.get("rankings_json"), ) event = EventManipulator.createOrUpdate(event) MemcacheWebcastFlusher.flushEvent(event.key_name) self.redirect("/admin/event/" + event.key_name)
def parse(self, response): events = [] districts = {} for event in response['Events']: code = event['code'].lower() event_type = EventType.PRESEASON if code == 'week0' else self.EVENT_TYPES.get(event['type'].lower(), None) if event_type is None: logging.warn("Event type '{}' not recognized!".format(event['type'])) continue name = event['name'] short_name = EventHelper.getShortName(name) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT district_key = District.renderKeyName(self.season, event['districtCode'].lower()) if event['districtCode'] else None venue = event['venue'] city = event['city'] state_prov = event['stateprov'] country = event['country'] start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) website = event.get('website') # TODO read timezone from API # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] if code in self.EINSTEIN_CODES: name = '{} Field'.format(short_name) start = end.replace(hour=0, minute=0, second=0, microsecond=0) # Set to beginning of last day else: # Divisions name = '{} Division'.format(short_name) events.append(Event( id="{}{}".format(self.season, code), name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=None, # Even though FRC API provides address, ElasticSearch is more detailed year=self.season, event_district_enum=district_enum, district_key=ndb.Key(District, district_key) if district_key else None, website=website, )) # Build District Model if district_key and district_key not in districts: districts[district_key] = District( id=district_key, year=self.season, abbreviation=event['districtCode'].lower(), ) return events, list(districts.values())
def get(self, event_key=None): self._require_login() self._require_registration() if event_key is None: events = EventHelper.getEventsWithinADay() EventHelper.sort_events(events) self.template_values['events'] = events self.response.out.write(jinja2_engine.render('mytba_add_hot_matches_base.html', self.template_values)) return event = Event.get_by_id(event_key) if not event: self.abort(404) subscriptions_future = Subscription.query( Subscription.model_type==ModelType.MATCH, Subscription.notification_types==NotificationType.UPCOMING_MATCH, ancestor=self.user_bundle.account.key).fetch_async(projection=[Subscription.model_key]) matches = [] if event.matchstats and 'match_predictions' in event.matchstats: match_predictions = event.matchstats['match_predictions'] max_hotness = 0 min_hotness = float('inf') for match in event.matches: if not match.has_been_played and match.key.id() in match_predictions: prediction = match_predictions[match.key.id()] red_score = prediction['red']['score'] blue_score = prediction['blue']['score'] if red_score > blue_score: winner_score = red_score loser_score = blue_score else: winner_score = blue_score loser_score = red_score hotness = winner_score + 2.0*loser_score # Favor close high scoring matches max_hotness = max(max_hotness, hotness) min_hotness = min(min_hotness, hotness) match.hotness = hotness matches.append(match) existing_subscriptions = set() for sub in subscriptions_future.get_result(): existing_subscriptions.add(sub.model_key) hot_matches = [] for match in matches: match.hotness = 100 * (match.hotness - min_hotness) / (max_hotness - min_hotness) match.already_subscribed = match.key.id() in existing_subscriptions hot_matches.append(match) hot_matches = sorted(hot_matches, key=lambda match: -match.hotness) matches_dict = {'qm': hot_matches[:25]} self.template_values['event'] = event self.template_values['matches'] = matches_dict self.response.out.write(jinja2_engine.render('mytba_add_hot_matches.html', self.template_values))
def parse(self, html): """ Parse an event's details page from USFIRST. """ # page_titles look like this: # <YEAR> <EVENT_NAME> (<EVENT_TYPE>) event_type_re = r'\((.+)\)' # locality_regions look like this: # <locality>, <region> <random string can have spaces> event_locality_region_re = r'(.*?), ([^ ]*)' result = dict() soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) page_title = soup.find('h1', {'id': 'thepagetitle'}).text result['name'] = unicode(re.sub(r'\([^)]*\)', '', page_title[4:]).strip()) result['short_name'] = EventHelper.getShortName(result['name']) result['event_type_enum'] = EventHelper.parseEventType(unicode(re.search(event_type_re, page_title).group(1).strip())) try: event_dates = soup.find('div', {'class': 'event-dates'}).text result['start_date'], result['end_date'] = self._parseEventDates(event_dates) result['year'] = int(event_dates[-4:]) except Exception, detail: logging.error('Date Parse Failed: ' + str(detail))
def parse(self, response): events = [] for event in response["hits"]["hits"]: first_eid = event["_id"] event = event["_source"] event_type = EventHelper.parseEventType(event["event_subtype"]) if event_type in self.TYPES_TO_SKIP: continue code = event["event_code"].lower() key = "{}{}".format(self.season, code) name = event["event_name"] short_name = EventHelper.getShortName(name) if event_type in EventType.DISTRICT_EVENT_TYPES: district_enum = EventHelper.getDistrictFromEventName(name) else: district_enum = DistrictType.NO_DISTRICT city = event.get("event_city", None) state_prov = event.get("event_stateprov", None) country = event.get("event_country", None) start = datetime.datetime.strptime(event["date_start"], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event["date_end"], self.DATE_FORMAT_STR) + datetime.timedelta( hours=23, minutes=59, seconds=59 ) venue_address = event["event_venue"] if "event_address1" in event and event["event_address1"]: venue_address += "\n" + event["event_address1"] if "event_address2" in event and event["event_address2"]: venue_address += "\n" + event["event_address2"] venue_address += "\n{}, {} {}\n{}".format( event["event_city"], event["event_stateprov"], event["event_postalcode"], event["event_country"] ) raw_website = event.get("event_web_url", None) website = urlparse.urlparse(raw_website, "http").geturl() if raw_website else None events.append( Event( id=key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=event["event_venue"], city=city, state_prov=state_prov, country=country, venue_address=venue_address, year=self.season, event_district_enum=district_enum, first_eid=first_eid, website=website, ) ) return events
def parse(self, response): events = [] for event in response['hits']['hits']: first_eid = event['_id'] event = event['_source'] event_type = EventHelper.parseEventType(event['event_subtype']) if event_type in self.TYPES_TO_SKIP: continue code = event['event_code'].lower() key = "{}{}".format(self.season, code) name = event['event_name'] short_name = EventHelper.getShortName(name) if event_type in EventType.DISTRICT_EVENT_TYPES: district_enum = EventHelper.getDistrictFromEventName(name) else: district_enum = DistrictType.NO_DISTRICT city = event.get('event_city', None) state_prov = event.get('event_stateprov', None) country = event.get('event_country', None) postalcode = event.get('event_postalcode', None) start = datetime.datetime.strptime(event['date_start'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['date_end'], self.DATE_FORMAT_STR) + datetime.timedelta(hours=23, minutes=59, seconds=59) venue_address = event['event_venue'] if 'event_address1' in event and event['event_address1']: venue_address += '\n' + event['event_address1'] if 'event_address2' in event and event['event_address2']: venue_address += '\n' + event['event_address2'] venue_address += '\n{}, {} {}\n{}'.format(event['event_city'], event['event_stateprov'], event['event_postalcode'], event['event_country']) raw_website = event.get('event_web_url', None) website = urlparse.urlparse(raw_website, 'http').geturl() if raw_website else None events.append(Event( id=key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=event['event_venue'], city=city, state_prov=state_prov, country=country, postalcode=postalcode, venue_address=venue_address, year=self.season, event_district_enum=district_enum, first_eid=first_eid, website=website )) return events
def parse(self, response): events = [] for event in response["Events"]: code = event["code"].lower() event_type = self.EVENT_TYPES.get(event["type"].lower(), None) if event_type is None: logging.warn("Event type '{}' not recognized!".format(event["type"])) continue name = event["name"] short_name = EventHelper.getShortName(name) district_enum = ( EventHelper.parseDistrictName(event["districtCode"].lower()) if event["districtCode"] else DistrictType.NO_DISTRICT ) venue = event["venue"] city = event["city"] state_prov = event["stateprov"] country = event["country"] start = datetime.datetime.strptime(event["dateStart"], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event["dateEnd"], self.DATE_FORMAT_STR) # TODO read timezone from API # Special cases for champs if code in self.EVENT_CODE_EXCEPTIONS: code, short_name = self.EVENT_CODE_EXCEPTIONS[code] if code == "cmp": # Einstein name = "{} Field".format(short_name) start = end.replace(hour=0, minute=0, second=0, microsecond=0) # Set to beginning of last day else: # Divisions name = "{} Division".format(short_name) events.append( Event( id="{}{}".format(self.season, code), name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, city=city, state_prov=state_prov, country=country, venue_address=None, # FIRST API doesn't provide detailed venue address year=self.season, event_district_enum=district_enum, ) ) return events
def _process_request(self, request, event_key): alliance_selections = JSONAllianceSelectionsParser.parse(request.body) event_details = EventDetails( id=event_key, alliance_selections=alliance_selections ) if self.event.remap_teams: EventHelper.remapteams_alliances(event_details.alliance_selections, self.event.remap_teams) EventDetailsManipulator.createOrUpdate(event_details) self.response.out.write(json.dumps({'Success': "Alliance selections successfully updated"}))
def _render(self, district_abbrev, year=None, explicit_year=False): district_type = DistrictType.abbrevs[district_abbrev] event_keys = Event.query(Event.year == year, Event.event_district_enum == district_type).fetch(None, keys_only=True) if not event_keys: self.abort(404) # needed for valid_years all_cmp_event_keys_future = Event.query(Event.event_district_enum == district_type, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True) # needed for valid_districts district_cmp_keys_future = Event.query(Event.year == year, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True) # to compute valid_districts event_futures = ndb.get_multi_async(event_keys) event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True) if year == 2014: # TODO: only 2014 has accurate rankings calculations team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()])) events = [event_future.get_result() for event_future in event_futures] EventHelper.sort_events(events) district_cmp_futures = ndb.get_multi_async(district_cmp_keys_future.get_result()) if year == 2014: # TODO: only 2014 has accurate rankings calculations team_totals = DistrictHelper.calculate_rankings(events, team_futures, year) else: team_totals = None valid_districts = set() for district_cmp_future in district_cmp_futures: district_cmp = district_cmp_future.get_result() cmp_dis_type = district_cmp.event_district_enum if cmp_dis_type is None: logging.warning("District event {} has unknown district type!".format(district_cmp.key.id())) else: valid_districts.add((DistrictType.type_names[cmp_dis_type], DistrictType.type_abbrevs[cmp_dis_type])) valid_districts = sorted(valid_districts, key=lambda (name, _): name) self.template_values.update({ 'explicit_year': explicit_year, 'year': year, 'valid_years': sorted(set([int(event_key.id()[:4]) for event_key in all_cmp_event_keys_future.get_result()])), 'valid_districts': valid_districts, 'district_name': DistrictType.type_names[district_type], 'district_abbrev': district_abbrev, 'events': events, 'team_totals': team_totals, }) path = os.path.join(os.path.dirname(__file__), '../templates/district_details.html') return template.render(path, self.template_values)
def parse(self, html): """ Parse an event's details page from USFIRST. """ # locality_regions look like this: # <locality>, <region> <random string can have spaces> event_locality_region_re = r"(.*?), ([^ ]*)" result = dict() soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) for tr in soup.findAll("tr"): tds = tr.findAll("td") if len(tds) > 1: field = str(tds[0].string) if field == "Event": result["name"] = unicode("".join(tds[1].findAll(text=True))).strip() result["short_name"] = EventHelper.getShortName(result["name"]) if field == "Event Subtype": result["event_type_enum"] = EventHelper.parseEventType(unicode(tds[1].string)) if field == "When": try: event_dates = str(tds[1].string).strip() result["start_date"], result["end_date"] = self._parseEventDates(event_dates) result["year"] = int(event_dates[-4:]) except Exception, detail: logging.error("Date Parse Failed: " + str(detail)) if field == "Where": address_lines_stripped = [ re.sub("\s+", " ", line.replace(u"\xa0", " ")).strip() for line in tds[1].findAll(text=True) ] result["venue_address"] = unicode("\r\n".join(address_lines_stripped)).encode("ascii", "ignore") match = re.match(event_locality_region_re, address_lines_stripped[-2]) locality, region = match.group(1), match.group(2) country = address_lines_stripped[-1] result["location"] = "%s, %s, %s" % (locality, region, country) if field == "Event Info": result["website"] = unicode(tds[1].a["href"]) if field == "Match Results": # http://www2.usfirst.org/2010comp/Events/SDC/matchresults.html m = re.match( r"http://www2\.usfirst\.org/%scomp/Events/([a-zA-Z0-9]*)/matchresults\.html" % result["year"], tds[1].a["href"], ) if m is None: # Some 2013 events are beautiful-souping tds[1].a["href"] to "http://www2.usfirst.org/2013comp/Events/FLBR" inexplicbly m = re.match( r"http://www2\.usfirst\.org/%scomp/Events/([a-zA-Z0-9]*)" % result["year"], tds[1].a["href"] ) result["event_short"] = m.group(1).lower()
def get(self): self._require_login() self._require_registration() user = self.user_bundle.account.key now = datetime.datetime.now() team_favorites_future = Favorite.query(Favorite.model_type == ModelType.TEAM, ancestor=user).fetch_async() live_events = EventHelper.getEventsWithinADay() favorite_team_keys = map(lambda f: ndb.Key(Team, f.model_key), team_favorites_future.get_result()) favorite_teams_future = ndb.get_multi_async(favorite_team_keys) live_eventteams_futures = [] for event in live_events: live_eventteams_futures.append(EventTeamsQuery(event.key_name).fetch_async()) favorite_teams = [team_future.get_result() for team_future in favorite_teams_future] favorite_teams_events_futures = [] for team in favorite_teams: favorite_teams_events_futures.append(TeamYearEventsQuery(team.key_name, now.year).fetch_async()) live_events_with_teams = EventTeamStatusHelper.buildEventTeamStatus(live_events, live_eventteams_futures, favorite_teams) future_events_by_event = {} for team, events_future in zip(favorite_teams, favorite_teams_events_futures): events = events_future.get_result() if not events: continue EventHelper.sort_events(events) next_event = next((e for e in events if e.start_date > now and not e.within_a_day), None) if next_event: if next_event.key_name not in future_events_by_event: future_events_by_event[next_event.key_name] = (next_event, []) future_events_by_event[next_event.key_name][1].append(team) future_events_with_teams = [] for event_key, data in future_events_by_event.iteritems(): future_events_with_teams.append((data[0], TeamHelper.sortTeams(data[1]))) future_events_with_teams.sort(key=lambda x: x[0].name) future_events_with_teams.sort(key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0])) future_events_with_teams.sort(key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0])) self.template_values.update({ 'live_events_with_teams': live_events_with_teams, 'future_events_with_teams': future_events_with_teams, }) path = os.path.join(os.path.dirname(__file__), '../templates/mytba_live.html') self.response.out.write(template.render(path, self.template_values))
def _render(self, district_abbrev, year=None): self._set_district(district_abbrev) if self.year < 2009: return json.dumps([], ensure_ascii=True) event_keys = Event.query(Event.year == self.year, Event.event_district_enum == self.district).fetch( None, keys_only=True ) if not event_keys: return json.dumps([], ensure_ascii=True) events = ndb.get_multi(event_keys) event_futures = ndb.get_multi_async(event_keys) event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True) team_futures = ndb.get_multi_async( set([ndb.Key(Team, et_key.id().split("_")[1]) for et_key in event_team_keys_future.get_result()]) ) events = [event_future.get_result() for event_future in event_futures] EventHelper.sort_events(events) team_totals = DistrictHelper.calculate_rankings(events, team_futures, self.year) rankings = [] current_rank = 1 for key, points in team_totals: point_detail = {} point_detail["rank"] = current_rank point_detail["team_key"] = key point_detail["event_points"] = {} for event in points["event_points"]: event_key = event[0].key_name point_detail["event_points"][event_key] = event[1] event_details = Event.get_by_id(event_key) point_detail["event_points"][event[0].key_name]["district_cmp"] = ( True if event_details.event_type_enum == EventType.DISTRICT_CMP else False ) if "rookie_bonus" in points: point_detail["rookie_bonus"] = points["rookie_bonus"] else: point_detail["rookie_bonus"] = 0 point_detail["point_total"] = points["point_total"] rankings.append(point_detail) current_rank += 1 return json.dumps(rankings)
def post(self, event_key): self._require_admin() # Note, we don't actually use event_key. start_date = None if self.request.get("start_date"): start_date = datetime.strptime(self.request.get("start_date"), "%Y-%m-%d") end_date = None if self.request.get("end_date"): end_date = datetime.strptime(self.request.get("end_date"), "%Y-%m-%d") event = Event( id=str(self.request.get("year")) + str.lower(str(self.request.get("event_short"))), end_date=end_date, event_short=self.request.get("event_short"), event_type_enum=EventHelper.parseEventType(self.request.get("event_type_str")), event_district_enum=EventHelper.parseDistrictName(self.request.get("event_district_str")), venue=self.request.get("venue"), venue_address=self.request.get("venue_address"), city=self.request.get("city"), state_prov=self.request.get("state_prov"), postalcode=self.request.get("postalcode"), country=self.request.get("country"), name=self.request.get("name"), short_name=self.request.get("short_name"), start_date=start_date, website=self.request.get("website"), year=int(self.request.get("year")), official={"true": True, "false": False}.get(self.request.get("official").lower()), facebook_eid=self.request.get("facebook_eid"), custom_hashtag=self.request.get("custom_hashtag"), webcast_json=self.request.get("webcast_json"), ) event = EventManipulator.createOrUpdate(event) if self.request.get("alliance_selections_json") or self.request.get("rankings_json"): event_details = EventDetails( id=event_key, alliance_selections=json.loads(self.request.get("alliance_selections_json")), rankings=json.loads(self.request.get("rankings_json")) ) EventDetailsManipulator.createOrUpdate(event_details) MemcacheWebcastFlusher.flushEvent(event.key_name) self.redirect("/admin/event/" + event.key_name)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() popular_teams_events = TeamHelper.getPopularTeamsEvents(week_events) # Only show special webcasts that aren't also hosting an event special_webcasts = [] for special_webcast in FirebasePusher.get_special_webcasts(): add = True for event in week_events: if event.now and event.webcast: for event_webcast in event.webcast: if (special_webcast.get('type', '') == event_webcast.get('type', '') and special_webcast.get('channel', '') == event_webcast.get('channel', '') and special_webcast.get('file', '') == event_webcast.get('file', '')): add = False break if not add: break if add: special_webcasts.append(special_webcast) self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, "popular_teams_events": popular_teams_events, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
def postUpdateHook(cls, events, updated_attr_list, is_new_list): """ To run after models have been updated """ for (event, updated_attrs) in zip(events, updated_attr_list): try: if event.within_a_day and "alliance_selections_json" in updated_attrs: # Send updated alliances notification logging.info("Sending alliance notifications for {}".format(event.key_name)) NotificationHelper.send_alliance_update(event) except Exception: logging.error("Error sending alliance update notification for {}".format(event.key_name)) logging.error(traceback.format_exc()) try: event.timezone_id = EventHelper.get_timezone_id(event.location, event.key.id()) cls.createOrUpdate(event, run_post_update_hook=False) except Exception: logging.warning("Timezone update for event {} failed!".format(event.key_name)) # Enqueue task to calculate district points for event in events: taskqueue.add( url='/tasks/math/do/district_points_calc/{}'.format(event.key.id()), method='GET')
def verbose_name(self): from helpers.event_helper import EventHelper if self.comp_level == "qm" or self.comp_level == "f" or EventHelper.is_2015_playoff(self.event_key_name): return "%s %s" % (self.COMP_LEVELS_VERBOSE[self.comp_level], self.match_number) else: return "%s %s Match %s" % (self.COMP_LEVELS_VERBOSE[self.comp_level], self.set_number, self.match_number)
def _render(self, *args, **kw): special_webcasts_future = Sitevar.get_by_id_async('gameday.special_webcasts') special_webcasts_temp = special_webcasts_future.get_result() if special_webcasts_temp: special_webcasts_temp = special_webcasts_temp.contents.get("webcasts", []) else: special_webcasts_temp = [] special_webcasts = [] for webcast in special_webcasts_temp: toAppend = {} for key, value in webcast.items(): toAppend[str(key)] = str(value) special_webcasts.append(toAppend) ongoing_events = [] ongoing_events_w_webcasts = [] week_events = EventHelper.getWeekEvents() for event in week_events: if event.now: ongoing_events.append(ModelToDict.eventConverter(event)) if event.webcast: ongoing_events_w_webcasts.append(ModelToDict.eventConverter(event)) webcasts_json = { 'special_webcasts': special_webcasts, 'ongoing_events': ongoing_events, 'ongoing_events_w_webcasts': ongoing_events_w_webcasts } self.template_values.update({ 'webcasts_json': json.dumps(webcasts_json) }) path = os.path.join(os.path.dirname(__file__), '../templates/gameday2.html') return template.render(path, self.template_values)
def parse(self, html): """ Parse the list of events from USFIRST. This provides us with basic information about events and is how we first discover them. """ events = list() soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) for tr in soup.findAll('tr'): # Events are in table rows event = dict() try: tds = tr.findAll('td') event["event_type_enum"] = EventHelper.parseEventType(unicode(tds[0].string)) url_get_params = urlparse.parse_qs(urlparse.urlparse(tds[1].a["href"]).query) event["first_eid"] = url_get_params["eid"][0] event["name"] = ''.join(tds[1].a.findAll(text=True)).strip() # <em>s in event names fix #event.venue = unicode(tds[2].string) #event.location = unicode(tds[3].string) # try: # event_dates = str(tds[4].string).strip() # event.start_date, event.stop_date = self.parseEventDates(event_dates) # event.year = int(event_dates[-4:]) # except Exception, detail: # logging.error('Date Parse Failed: ' + str(detail)) if event.get("event_type_enum", None) in self.REGIONAL_EVENT_TYPES: events.append(event) except Exception, detail: logging.info('Event parsing failed: ' + str(detail))
def _update_live_events_helper(cls): week_events = EventHelper.getWeekEvents() events_by_key = {} live_events = [] for event in week_events: if event.now: event._webcast = event.current_webcasts # Only show current webcasts for webcast in event.webcast: WebcastOnlineHelper.add_online_status_async(webcast) events_by_key[event.key.id()] = event if event.within_a_day: live_events.append(event) # To get Champ events to show up before they are actually going on forced_live_events = Sitevar.get_or_insert( 'forced_live_events', values_json=json.dumps([])) for event in ndb.get_multi([ndb.Key('Event', ekey) for ekey in forced_live_events.contents]): if event.webcast: for webcast in event.webcast: WebcastOnlineHelper.add_online_status_async(webcast) events_by_key[event.key.id()] = event # # Add in the Fake TBA BlueZone event (watch for circular imports) # from helpers.bluezone_helper import BlueZoneHelper # bluezone_event = BlueZoneHelper.update_bluezone(live_events) # if bluezone_event: # for webcast in bluezone_event.webcast: # WebcastOnlineHelper.add_online_status_async(webcast) # events_by_key[bluezone_event.key_name] = bluezone_event return events_by_key
def get(self): live_events = EventHelper.getEventsWithinADay() try: BlueZoneHelper.update_bluezone(live_events) except Exception, e: logging.error("BlueZone update failed") logging.exception(e)
def get(self): live_events = EventHelper.getEventsWithinADay() for event in live_events: taskqueue.add(url='/tasks/math/do/predict_match_times/{}'.format(event.key_name), method='GET') # taskqueue.add(url='/tasks/do/bluezone_update', method='GET') self.response.out.write("Enqueued time prediction for {} events".format(len(live_events)))
def doMatchInsights(self, year): """ Calculate match insights for a given year. Returns a list of Insights. """ # Only fetch from DB once official_events = Event.query(Event.year == year).order(Event.start_date).fetch(1000) events_by_week = EventHelper.groupByWeek(official_events) week_event_matches = [] # Tuples of: (week, events) where events are tuples of (event, matches) for week, events in events_by_week.items(): if week == OFFSEASON_EVENTS_LABEL: continue week_events = [] for event in events: if not event.official: continue matches = event.matches week_events.append((event, matches)) week_event_matches.append((week, week_events)) insights = [] insights += self._calculateHighscoreMatchesByWeek(week_event_matches, year) insights += self._calculateHighscoreMatches(week_event_matches, year) insights += self._calculateMatchAveragesByWeek(week_event_matches, year) insights += self._calculateScoreDistribution(week_event_matches, year) insights += self._calculateNumMatches(week_event_matches, year) return insights
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() self.template_values.update({"events": week_events}) path = os.path.join(os.path.dirname(__file__), "../templates/index_competitionseason.html") return template.render(path, self.template_values)
def get(self): self._require_registration() current_events = filter(lambda e: e.now, EventHelper.getEventsWithinADay()) popular_teams_events = TeamHelper.getPopularTeamsEvents(current_events) popular_team_keys = set() for team, _ in popular_teams_events: popular_team_keys.add(team.key.id()) for event in current_events: event.prep_details() event.prep_matches() finished_matches = [] current_matches = [] upcoming_matches = [] ranks = {} alliances = {} for event in current_events: if not event.details: continue finished_matches += MatchHelper.recentMatches(event.matches, num=1) for i, match in enumerate(MatchHelper.upcomingMatches(event.matches, num=3)): if not match.time: continue if not event.details.predictions or match.key.id() not in event.details.predictions['match_predictions']['qual' if match.comp_level == 'qm' else 'playoff']: match.prediction = defaultdict(lambda: defaultdict(float)) match.bluezone_score = 0 else: match.prediction = event.details.predictions['match_predictions']['qual' if match.comp_level == 'qm' else 'playoff'][match.key.id()] match.bluezone_score = self.get_qual_bluezone_score(match.prediction) if match.comp_level == 'qm' else self.get_elim_bluezone_score(match.prediction) if i == 0: current_matches.append(match) else: upcoming_matches.append(match) if event.details.rankings2: for rank in event.details.rankings2: ranks[rank['team_key']] = rank['rank'] if event.alliance_selections: for i, alliance in enumerate(event.alliance_selections): for pick in alliance['picks']: alliances[pick] = i + 1 finished_matches = sorted(finished_matches, key=lambda m: m.actual_time if m.actual_time else m.time) current_matches = sorted(current_matches, key=lambda m: m.predicted_time if m.predicted_time else m.time) upcoming_matches = sorted(upcoming_matches, key=lambda m: m.predicted_time if m.predicted_time else m.time) self.template_values.update({ 'finished_matches': finished_matches, 'current_matches': current_matches, 'upcoming_matches': upcoming_matches, 'ranks': ranks, 'alliances': alliances, 'popular_team_keys': popular_team_keys, }) self.response.out.write(jinja2_engine.render('match_suggestion.html', self.template_values))
def test_event_parse_district_name(self): """ A bunch of tests from various years """ self.assertEqual(EventHelper.parseDistrictName('FIRST in Michigan'), DistrictType.MICHIGAN) self.assertEqual(EventHelper.parseDistrictName('Mid-Atlantic Robotics'), DistrictType.MID_ATLANTIC) self.assertEqual(EventHelper.parseDistrictName('New England'), DistrictType.NEW_ENGLAND) self.assertEqual(EventHelper.parseDistrictName('Pacific Northwest'), DistrictType.PACIFIC_NORTHWEST) self.assertEqual(EventHelper.parseDistrictName('IndianaFIRST'), DistrictType.INDIANA) self.assertEqual(EventHelper.parseDistrictName('Not a valid district'), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName('California'), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName(None), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName(''), DistrictType.NO_DISTRICT)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() template_values = { "events": week_events, } path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, template_values)
def get(self, event_key): event = Event.get_by_id(event_key) event.short_name = EventHelper.getShortName(event.name) EventManipulator.createOrUpdate(event) template_values = {'event': event} path = os.path.join(os.path.dirname(__file__), '../templates/math/event_short_name_calc_do.html') self.response.out.write(template.render(path, template_values))
def _process_request(self, request, event_key): awards = [] for award in JSONAwardsParser.parse(request.body, event_key): awards.append(Award( id=Award.render_key_name(self.event.key_name, award['award_type_enum']), name_str=award['name_str'], award_type_enum=award['award_type_enum'], year=self.event.year, event=self.event.key, event_type_enum=self.event.event_type_enum, team_list=[ndb.Key(Team, team_key) for team_key in award['team_key_list']], recipient_json_list=award['recipient_json_list'] )) # it's easier to clear all awards and add new ones than try to find the difference old_award_keys = Award.query(Award.event == self.event.key).fetch(None, keys_only=True) AwardManipulator.delete_keys(old_award_keys) if self.event.remap_teams: EventHelper.remapteams_awards(awards, self.event.remap_teams) AwardManipulator.createOrUpdate(awards) self.response.out.write(json.dumps({'Success': "Awards successfully updated"}))
def _render(self, *args, **kw): special_webcasts_future = Sitevar.get_by_id_async( 'gameday.special_webcasts') special_webcasts_temp = special_webcasts_future.get_result() if special_webcasts_temp: special_webcasts_temp = special_webcasts_temp.contents.get( "webcasts", []) else: special_webcasts_temp = [] special_webcasts = [] special_webcast_keys = set() for webcast in special_webcasts_temp: toAppend = {} for key, value in webcast.items(): toAppend[str(key)] = str(value) special_webcasts.append(toAppend) special_webcast_keys.add(webcast['key_name']) ongoing_events = [] ongoing_events_w_webcasts = [] week_events = EventHelper.getWeekEvents() for event in week_events: if event.now and event.key.id() not in special_webcast_keys: ongoing_events.append(event) if event.webcast: valid = [] for webcast in event.webcast: if 'type' in webcast and 'channel' in webcast: event_webcast = {'event': event} valid.append(event_webcast) # Add webcast numbers if more than one for an event if len(valid) > 1: count = 1 for event in valid: event['count'] = count count += 1 ongoing_events_w_webcasts += valid self.template_values.update({ 'special_webcasts': special_webcasts, 'ongoing_events': ongoing_events, 'ongoing_events_w_webcasts': ongoing_events_w_webcasts }) path = os.path.join(os.path.dirname(__file__), '../templates/gameday.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): endbuild_datetime_est = datetime.datetime(2018, 2, 20, 23, 59) endbuild_datetime_utc = pytz.utc.localize( endbuild_datetime_est + datetime.timedelta(hours=5)) week_events = EventHelper.getWeekEvents() self.template_values.update({ 'endbuild_datetime_est': endbuild_datetime_est, 'endbuild_datetime_utc': endbuild_datetime_utc, 'events': week_events, }) path = os.path.join(os.path.dirname(__file__), "../templates/index_buildseason.html") return template.render(path, self.template_values)
def parse(self, response): events = [] for event in response['Events']: code = event['code'].lower() key = "{}{}".format(self.season, code) name = event['name'] short_name = EventHelper.getShortName(name) event_type = EventHelper.parseEventType(event['type']) district_enum = EventHelper.parseDistrictName(event['districtCode'].lower()) if event['districtCode'] else DistrictType.NO_DISTRICT venue = event['venue'] location = "{}, {}, {}".format(event['city'], event['stateprov'], event['country']) start = datetime.datetime.strptime(event['dateStart'], self.DATE_FORMAT_STR) end = datetime.datetime.strptime(event['dateEnd'], self.DATE_FORMAT_STR) # TODO read timezone from API # Do not read in CMP divisions, we'll add those manually if event_type in EventType.CMP_EVENT_TYPES: continue events.append(Event( id=key, name=name, short_name=short_name, event_short=code, event_type_enum=event_type, official=True, start_date=start, end_date=end, venue=venue, location=location, venue_address="{}, {}".format(venue, location), year=self.season, event_district_enum=district_enum )) return events
def _render(self, *args, **kw): kickoff_datetime_utc = datetime.datetime.strptime( self.template_values['kickoff_datetime'], "%Y-%m-%dT%H:%M:%S" ) if 'kickoff_datetime' in self.template_values else None week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, 'kickoff_datetime_utc': kickoff_datetime_utc, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_offseason.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() year = datetime.datetime.now().year self.template_values.update({ "events": week_events, "year": year, }) insights = ndb.get_multi([ndb.Key(Insight, Insight.renderKeyName(year, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values()]) for insight in insights: if insight: self.template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_insights.html') return template.render(path, self.template_values)
def winning_alliance(self): from helpers.event_helper import EventHelper if self._winning_alliance is None: if EventHelper.is_2015_playoff( self.event_key_name) and self.comp_level != 'f': return '' # report all 2015 non finals matches as ties highest_score = 0 for alliance in self.alliances: if int(self.alliances[alliance]["score"]) > highest_score: highest_score = int(self.alliances[alliance]["score"]) self._winning_alliance = alliance elif int(self.alliances[alliance]["score"]) == highest_score: self._winning_alliance = "" return self._winning_alliance
def get(self, when): if when == "now": events = EventHelper.getEventsWithinADay() events = filter(lambda e: e.official, events) elif when == "last_day_only": events = EventHelper.getEventsWithinADay() events = filter(lambda e: e.official and e.ends_today, events) else: event_keys = Event.query(Event.official == True).filter(Event.year == int(when)).fetch(500, keys_only=True) events = ndb.get_multi(event_keys) for event in events: taskqueue.add( queue_name='datafeed', url='/tasks/get/fmsapi_event_alliances/' + event.key_name, method='GET') template_values = { 'events': events } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_event_alliances_enqueue.html') self.response.out.write(template.render(path, template_values))
class ApiTrustedEventMatchesUpdate(ApiTrustedBaseController): """ Creates/updates matches """ REQUIRED_AUTH_TYPES = {AuthType.EVENT_MATCHES} def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] needs_time = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string needs_time.append(match) matches.append(match) if needs_time: try: logging.debug("Calculating time!") MatchHelper.add_match_times(event, needs_time) except Exception, e: logging.error("Failed to calculate match times") if event.remap_teams: EventHelper.remapteams_matches(matches, event.remap_teams) MatchManipulator.createOrUpdate(matches) self.response.out.write( json.dumps({'Success': "Matches successfully updated"}))
def winning_alliance(self): from helpers.event_helper import EventHelper from helpers.match_helper import MatchHelper if self._winning_alliance is None: if EventHelper.is_2015_playoff(self.event_key_name) and self.comp_level != 'f': return '' # report all 2015 non finals matches as ties red_score = int(self.alliances['red']['score']) blue_score = int(self.alliances['blue']['score']) if red_score > blue_score: self._winning_alliance = 'red' elif blue_score > red_score: self._winning_alliance = 'blue' else: # tie self._winning_alliance = MatchHelper.tiebreak_winner(self) return self._winning_alliance
def _render(self, *args, **kw): endbuild_datetime_est = datetime.datetime.strptime( self.template_values['build_season_end'], "%Y-%m-%dT%H:%M:%S" ) if 'build_season_end' in self.template_values else SeasonHelper.stop_build_date() endbuild_datetime_utc = pytz.utc.localize( endbuild_datetime_est + datetime.timedelta(hours=5)) week_events = EventHelper.getWeekEvents() self.template_values.update({ 'endbuild_datetime_est': endbuild_datetime_est, 'endbuild_datetime_utc': endbuild_datetime_utc, 'events': week_events, }) path = os.path.join(os.path.dirname(__file__), "../templates/index_buildseason.html") return template.render(path, self.template_values)
def get(self, when): if when == "now": events = EventHelper.getEventsWithinADay() else: events = Event.query(Event.year == int(when)).fetch(500) for event in events: taskqueue.add(url='/tasks/math/do/event_matchstats/' + event.key_name, method='GET') template_values = {'event_count': len(events), 'year': when} path = os.path.join(os.path.dirname(__file__), '../templates/math/event_matchstats_enqueue.html') self.response.out.write(template.render(path, template_values))
def _render(self, *args, **kw): special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": EventHelper.getWeekEvents(), 'kickoff_datetime_utc': SeasonHelper.kickoff_datetime_utc(), "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) return jinja2_engine.render('index/index_offseason.html', self.template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) path = os.path.join(os.path.dirname(__file__), '../templates/index_competitionseason.html') return template.render(path, self.template_values)
def _render(self, *args, **kw): events = EventHelper.getWeekEvents() webcasts = [] for event in events: webcasts.append({ "key": event.key_name, "name": event.name, "webcast": event.webcast, }) template_values = {'webcasts': webcasts} path = os.path.join(os.path.dirname(__file__), '../templates/gameday2.html') return template.render(path, template_values)
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() template_values = { "events": week_events, } insights = ndb.get_multi([ ndb.Key(Insight, Insight.renderKeyName(2013, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values() ]) for insight in insights: if insight: template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_insights.html') return template.render(path, template_values)
def _render(self, *args, **kw): kickoff_datetime_est = datetime.datetime(2018, 1, 6, 10, 00) kickoff_datetime_utc = pytz.utc.localize( kickoff_datetime_est + datetime.timedelta(hours=5)) is_kickoff = datetime.datetime.now() >= kickoff_datetime_est - datetime.timedelta(days=1) # turn on 1 day before week_events = EventHelper.getWeekEvents() self.template_values.update({ 'events': week_events, 'is_kickoff': is_kickoff, 'kickoff_datetime_est': kickoff_datetime_est, 'kickoff_datetime_utc': kickoff_datetime_utc, }) path = os.path.join(os.path.dirname(__file__), "../templates/index_kickoff.html") return template.render(path, self.template_values)
def parse(self, html): """ Parse the list of events from USFIRST. This provides us with basic information about events and is how we first discover them. """ events = list() soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) for tr in soup.findAll('tr'): # Events are in table rows event = dict() try: tds = tr.findAll('td') if tds[0].string is None: # this may happen if this is a district event, in which case we can also extract the district name event_type_str = unicode( tds[0].findAll(text=True)[2].string) # for future use: # district_name_str = unicode(tds[0].findAll('em')[0].string) else: event_type_str = unicode(tds[0].string) event["event_type_enum"] = EventHelper.parseEventType( event_type_str) url_get_params = urlparse.parse_qs( urlparse.urlparse(tds[1].a["href"]).query) event["first_eid"] = url_get_params["eid"][0] event["name"] = ''.join(tds[1].a.findAll( text=True)).strip() # <em>s in event names fix #event.venue = unicode(tds[2].string) #event.location = unicode(tds[3].string) # try: # event_dates = str(tds[4].string).strip() # event.start_date, event.stop_date = self.parseEventDates(event_dates) # event.year = int(event_dates[-4:]) # except Exception, detail: # logging.error('Date Parse Failed: ' + str(detail)) if event.get("event_type_enum", None) in EventType.NON_CMP_EVENT_TYPES: events.append(event) except Exception, detail: logging.info('Event parsing failed: ' + str(detail))
def _render(self, *args, **kw): effective_season_year = SeasonHelper.effective_season_year() special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ 'seasonstart_datetime_utc': SeasonHelper.first_event_datetime_utc(effective_season_year), 'events': EventHelper.getWeekEvents(), "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) return jinja2_engine.render('index/index_buildseason.html', self.template_values)
def get(self, when): if when == "now": events = EventHelper.getEventsWithinADay() else: event_keys = Event.query(Event.official == True).filter(Event.year == int(when)).fetch(500, keys_only=True) events = ndb.get_multi(event_keys) for event in events: taskqueue.add( queue_name='usfirst', url='/tasks/get/usfirst_awards/%s' % (event.key_name), method='GET') template_values = { 'events': events, } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_awards_enqueue.html') self.response.out.write(template.render(path, template_values))
def postUpdateHook(cls, events, updated_attr_list, is_new_list): """ To run after models have been updated """ for (event, updated_attrs) in zip(events, updated_attr_list): try: event.timezone_id = EventHelper.get_timezone_id( event.location, event.key.id()) cls.createOrUpdate(event, run_post_update_hook=False) except Exception: logging.warning("Timezone update for event {} failed!".format( event.key_name)) # Enqueue task to calculate district points for event in events: taskqueue.add(url='/tasks/math/do/district_points_calc/{}'.format( event.key.id()), method='GET')
def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents() year = datetime.datetime.now().year special_webcasts = FirebasePusher.get_special_webcasts() self.template_values.update({ "events": week_events, "year": year, "any_webcast_online": any(w.get('status') == 'online' for w in special_webcasts), "special_webcasts": special_webcasts, }) insights = ndb.get_multi([ndb.Key(Insight, Insight.renderKeyName(year, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values()]) for insight in insights: if insight: self.template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_insights.html') return template.render(path, self.template_values)
def test_event_parse_district_name(self): """ A bunch of tests from various years """ self.assertEqual(EventHelper.parseDistrictName('FIRST in Michigan'), DistrictType.MICHIGAN) self.assertEqual( EventHelper.parseDistrictName('Mid-Atlantic Robotics'), DistrictType.MID_ATLANTIC) self.assertEqual(EventHelper.parseDistrictName('New England'), DistrictType.NEW_ENGLAND) self.assertEqual(EventHelper.parseDistrictName('Pacific Northwest'), DistrictType.PACIFIC_NORTHWEST) self.assertEqual(EventHelper.parseDistrictName('Not a valid district'), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName('California'), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName(None), DistrictType.NO_DISTRICT) self.assertEqual(EventHelper.parseDistrictName(''), DistrictType.NO_DISTRICT)
def postUpdateHook(cls, events, updated_attr_list, is_new_list): """ To run after models have been updated """ for (event, updated_attrs) in zip(events, updated_attr_list): try: if event.within_a_day and "alliance_selections_json" in updated_attrs: # Send updated alliances notification logging.info("Sending alliance notifications for {}".format(event.key_name)) NotificationHelper.send_alliance_update(event) except Exception: logging.error("Error sending alliance update notification for {}".format(event.key_name)) logging.error(traceback.format_exc()) try: event.timezone_id = EventHelper.get_timezone_id(event.location, event.key.id()) cls.createOrUpdate(event, run_post_update_hook=False) except Exception: logging.warning("Timezone update for event {} failed!".format(event.key_name))
def _render(self, year=None, explicit_year=False): event_keys = Event.query(Event.year == year).fetch(1000, keys_only=True) events = ndb.get_multi(event_keys) events.sort(key=EventHelper.distantFutureIfNoStartDate) week_events = EventHelper.groupByWeek(events) template_values = { "events": events, "explicit_year": explicit_year, "selected_year": year, "valid_years": self.VALID_YEARS, "week_events": week_events, } path = os.path.join(os.path.dirname(__file__), '../templates/event_list.html') return template.render(path, template_values)
def winning_alliance(self): from helpers.event_helper import EventHelper from helpers.match_helper import MatchHelper if self._winning_alliance is None: if EventHelper.is_2015_playoff(self.event_key_name) and self.comp_level != 'f': return '' # report all 2015 non finals matches as ties red_score = int(self.alliances['red']['score']) blue_score = int(self.alliances['blue']['score']) if red_score > blue_score: self._winning_alliance = 'red' elif blue_score > red_score: self._winning_alliance = 'blue' else: # tie event = self.event.get() if event and event.playoff_type == PlayoffType.ROUND_ROBIN_6_TEAM and event.event_type_enum == EventType.CMP_FINALS: self._winning_alliance = '' else: self._winning_alliance = MatchHelper.tiebreak_winner(self) return self._winning_alliance
def addTeamEvents(cls, team_dict, year): """ Consume a Team dict, and return it with a year's Events. """ memcache_key = "api_team_events_%s_%s" % (team_dict["key"], year) event_list = memcache.get(memcache_key) if event_list is None: team = Team.get_by_id(team_dict["key"]) events = [a.event.get() for a in EventTeam.query(EventTeam.team == team.key, EventTeam.year == int(year)).fetch(1000)] events = sorted(events, key=lambda event: event.start_date) event_list = [cls.getEventInfo(e.key_name) for e in events] for event_dict, event in zip(event_list, events): event_dict["team_wlt"] = EventHelper.getTeamWLT(team_dict["key"], event) # TODO: Reduce caching time before 2013 season. 2592000 is one month -gregmarra if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, event_list, 2592000) team_dict["events"] = event_list return team_dict
def get(self): self._require_registration() current_events = filter(lambda e: e.now, EventHelper.getEventsWithinADay()) for event in current_events: event.prep_details() event.prep_matches() finished_matches = [] current_matches = [] upcoming_matches = [] ranks = {} for event in current_events: finished_matches += MatchHelper.recentMatches(event.matches, num=1) for i, match in enumerate(MatchHelper.upcomingMatches(event.matches, num=3)): if match.key.id() not in event.details.predictions['match_predictions']['qual' if match.comp_level == 'qm' else 'playoff']: match.prediction = defaultdict(lambda: defaultdict()) match.bluezone_score = 0 continue match.prediction = event.details.predictions['match_predictions']['qual' if match.comp_level == 'qm' else 'playoff'][match.key.id()] match.bluezone_score = self.get_qual_bluezone_score(match.prediction) if match.comp_level == 'qm' else self.get_elim_bluezone_score(match.prediction) if i == 0: current_matches.append(match) else: upcoming_matches.append(match) for rank in event.details.rankings2: ranks[rank['team_key']] = rank['rank'] finished_matches = sorted(finished_matches, key=lambda m: m.actual_time if m.actual_time else m.time) current_matches = sorted(current_matches, key=lambda m: m.predicted_time if m.predicted_time else m.time) upcoming_matches = sorted(upcoming_matches, key=lambda m: m.predicted_time if m.predicted_time else m.time) self.template_values.update({ 'finished_matches': finished_matches, 'current_matches': current_matches, 'upcoming_matches': upcoming_matches, 'ranks': ranks, }) self.response.out.write(jinja2_engine.render('match_suggestion.html', self.template_values))
def getEventDetails(self, first_eid): url = self.EVENT_DETAILS_URL_PATTERN % (first_eid) event, _ = self.parse(url, UsfirstEventDetailsParser) if event is None: return None return Event(id=str(event["year"]) + str.lower(str(event["event_short"])), end_date=event.get("end_date", None), event_short=event.get("event_short", None), event_type_enum=event.get("event_type_enum", None), first_eid=first_eid, name=event.get("name", None), short_name=event.get("short_name", None), official=True, start_date=event.get("start_date", None), venue_address=event.get("venue_address", None), venue=event.get("venue", None), location=event.get("location", None), timezone_id=EventHelper.get_timezone_id(event), website=event.get("website", None), year=event.get("year", None))
def post(self, event_key): self._require_admin() # Note, we don't actually use event_key. start_date = None if self.request.get("start_date"): start_date = datetime.strptime(self.request.get("start_date"), "%Y-%m-%d") end_date = None if self.request.get("end_date"): end_date = datetime.strptime(self.request.get("end_date"), "%Y-%m-%d") event = Event( id=str(self.request.get("year")) + str.lower(str(self.request.get("event_short"))), end_date=end_date, event_short=self.request.get("event_short"), event_type_enum=EventHelper.parseEventType( self.request.get("event_type_str")), location=self.request.get("location"), name=self.request.get("name"), short_name=self.request.get("short_name"), start_date=start_date, website=self.request.get("website"), year=int(self.request.get("year")), official={ "true": True, "false": False }.get(self.request.get("official").lower()), facebook_eid=self.request.get("facebook_eid"), webcast_json=self.request.get("webcast_json"), rankings_json=self.request.get("rankings_json"), ) event = EventManipulator.createOrUpdate(event) self.redirect("/admin/event/" + event.key_name)