def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] needs_time = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName(event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string needs_time.append(match) matches.append(match) if needs_time: try: logging.debug("Calculating time!") MatchHelper.add_match_times(event, needs_time) except Exception, e: logging.error("Failed to calculate match times")
def _process_request(self, request, event_key): event = Event.get_by_id(event_key) year = int(event_key[:4]) matches = [] for match in JSONMatchesParser.parse(request.body, year): match = Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string logging.debug("Calculating time!") MatchHelper.add_match_times(event, [match]) matches.append(match) MatchManipulator.createOrUpdate(matches) self.response.out.write(json.dumps({'Success': "Matches successfully updated"}))
def _process_request(self, request, event_key): matches = [] needs_time = [] for match in JSONMatchesParser.parse(request.body, self.event.year): match = Match( id=Match.renderKeyName( self.event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=self.event.key, year=self.event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None), score_breakdown_json=match.get("score_breakdown_json", None), time_string=match.get("time_string", None), time=match.get("time", None), ) if (not match.time or match.time == "") and match.time_string: # We can calculate the real time from the time string needs_time.append(match) matches.append(match) if needs_time: try: logging.debug("Calculating time!") MatchHelper.add_match_times(self.event, needs_time) except Exception, e: logging.error("Failed to calculate match times")
def getMatches(self, event): matches_url = self.YEAR_MATCH_RESULTS_URL_PATTERN.get( event.year, self.DEFAULT_MATCH_RESULTS_URL_PATTERN) % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) match_dicts, _ = self.parse(matches_url, self.YEAR_MATCH_PARSER.get(event.year, self.DEFAULT_MATCH_PARSER)) if not match_dicts: # Matches have not been played, but qual match schedule may be out # If this is run when there are already matches in the DB, it will overwrite scores! # Check to make sure event has no existing matches if len(Match.query(Match.event == event.key).fetch(1, keys_only=True)) == 0: logging.warning("No matches found for {}. Trying to parse qual match schedule.".format(event.key.id())) qual_match_sched_url = self.MATCH_SCHEDULE_QUAL_URL_PATTERN % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) match_dicts, _ = self.parse(qual_match_sched_url, self.MATCH_SCHEDULE_PARSER) for match_dict in match_dicts: alliances = json.loads(match_dict['alliances_json']) if (alliances['red']['score'] == -1 or alliances['blue']['score'] == -1 or match_dict['comp_level'] in Match.ELIM_LEVELS): break else: # Only qual matches have been played and they have all been played # If this is run when there are already elim matches in the DB, it will overwrite scores! # Check to make sure event has no existing elim matches if len(Match.query(Match.event == event.key, Match.comp_level.IN(Match.ELIM_LEVELS)).fetch(1, keys_only=True)) == 0: logging.warning("No elim matches found for {}. Trying to parse elim match schedule.".format(event.key.id())) elim_match_sched_url = self.MATCH_SCHEDULE_ELIMS_URL_PATTERN % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) elim_match_dicts, _ = self.parse(elim_match_sched_url, self.MATCH_SCHEDULE_PARSER) match_dicts += elim_match_dicts matches = [Match( id=Match.renderKeyName( event.key.id(), match_dict.get("comp_level", None), match_dict.get("set_number", 0), match_dict.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match_dict.get("set_number", 0), match_number=match_dict.get("match_number", 0), comp_level=match_dict.get("comp_level", None), team_key_names=match_dict.get("team_key_names", None), time_string=match_dict.get("time_string", None), alliances_json=match_dict.get("alliances_json", None) ) for match_dict in match_dicts] MatchHelper.add_match_times(event, matches) return matches
def getMatches(self, event): matches_url = self.YEAR_MATCH_RESULTS_URL_PATTERN.get( event.year, self.DEFAULT_MATCH_RESULTS_URL_PATTERN) % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) match_dicts, _ = self.parse(matches_url, self.YEAR_MATCH_PARSER.get(event.year, self.DEFAULT_MATCH_PARSER)) if not match_dicts: # Matches have not been played, but qual match schedule may be out # If this is run when there are already matches in the DB, it will overwrite scores! # Check to make sure event has no existing matches if len(Match.query(Match.event == event.key).fetch(1, keys_only=True)) == 0: logging.warning("No matches found for {}. Trying to parse qual match schedule.".format(event.key.id())) qual_match_sched_url = self.MATCH_SCHEDULE_QUAL_URL_PATTERN % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) match_dicts, _ = self.parse(qual_match_sched_url, self.MATCH_SCHEDULE_PARSER) for match_dict in match_dicts: alliances = json.loads(match_dict['alliances_json']) if (alliances['red']['score'] == -1 or alliances['blue']['score'] == -1 or match_dict['comp_level'] in Match.ELIM_LEVELS): break else: # Only qual matches have been played and they have all been played # If this is run when there are already elim matches in the DB, it will overwrite scores! # Check to make sure event has no existing elim matches if len(Match.query(Match.event == event.key, Match.comp_level.IN(Match.ELIM_LEVELS)).fetch(1, keys_only=True)) == 0: logging.warning("No elim matches found for {}. Trying to parse elim match schedule.".format(event.key.id())) elim_match_sched_url = self.MATCH_SCHEDULE_ELIMS_URL_PATTERN % ( event.year, self.EVENT_SHORT_EXCEPTIONS.get(event.event_short, event.event_short)) elim_match_dicts, _ = self.parse(elim_match_sched_url, self.MATCH_SCHEDULE_PARSER) match_dicts += elim_match_dicts matches = [Match( id=Match.renderKeyName( event.key.id(), match_dict.get("comp_level", None), match_dict.get("set_number", 0), match_dict.get("match_number", 0)), event=event.key, game=Match.FRC_GAMES_BY_YEAR.get(event.year, "frc_unknown"), set_number=match_dict.get("set_number", 0), match_number=match_dict.get("match_number", 0), comp_level=match_dict.get("comp_level", None), team_key_names=match_dict.get("team_key_names", None), time_string=match_dict.get("time_string", None), alliances_json=match_dict.get("alliances_json", None) ) for match_dict in match_dicts] MatchHelper.add_match_times(event, matches) return matches
def test_match_times(self): with open('test_data/usfirst_html/usfirst_event_matches_2013cama.html', 'r') as f: # using matches from a random event as data match_dicts, _ = UsfirstMatchesParser.parse(f.read()) matches = self.matchDictToMatches(match_dicts) MatchHelper.add_match_times(self.event, matches) self.assertEqual(len(matches), 92) PST_OFFSET = -5 self.assertEqual(matches[0].time, datetime.datetime(2014, 2, 28, 9, 0) - datetime.timedelta(hours=PST_OFFSET)) self.assertEqual(matches[75].time, datetime.datetime(2014, 3, 1, 11, 50) - datetime.timedelta(hours=PST_OFFSET))
def test_match_times_dst(self): with open('test_data/usfirst_html/usfirst_event_matches_2012ct.html', 'r') as f: # using matches from a random event as data match_dicts, _ = UsfirstMatchesParser.parse(f.read()) matches = self.matchDictToMatches(match_dicts) MatchHelper.add_match_times(self.event_dst, matches) self.assertEqual(len(matches), 125) PST_OFFSET = -8 PDT_OFFSET = -7 self.assertEqual(matches[0].time, datetime.datetime(2014, 3, 8, 9, 0) - datetime.timedelta(hours=PST_OFFSET)) self.assertEqual(matches[-1].time, datetime.datetime(2014, 3, 9, 16, 5) - datetime.timedelta(hours=PDT_OFFSET))