def test_last_event(fake_events): fake_event = fake_events assert Event.last_event() == fake_event assert len(Event.query.all()) == 3 assert Event.last_on() == fake_event
class TestListDistrictsController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<year:>', ApiDistrictListController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(root_path=".") self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.DISTRICT_CMP, event_district_enum=DistrictType.NEW_ENGLAND, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, city="Clemson", state_prov="SC", country="USA", venue="Long Beach Arena", venue_address="Long Beach Arena\r\n300 East Ocean Blvd\r\nLong Beach, CA 90802\r\nUSA", start_date=datetime(2010, 03, 24), webcast_json="[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org", ) self.event.put() self.event_details = EventDetails( id=self.event.key.id(), alliance_selections=[ {"declines": [], "picks": ["frc971", "frc254", "frc1662"]}, {"declines": [], "picks": ["frc1678", "frc368", "frc4171"]}, {"declines": [], "picks": ["frc2035", "frc192", "frc4990"]}, {"declines": [], "picks": ["frc1323", "frc846", "frc2135"]}, {"declines": [], "picks": ["frc2144", "frc1388", "frc668"]}, {"declines": [], "picks": ["frc1280", "frc604", "frc100"]}, {"declines": [], "picks": ["frc114", "frc852", "frc841"]}, {"declines": [], "picks": ["frc2473", "frc3256", "frc1868"]} ] ) self.event_details.put() def tearDown(self): self.testbed.deactivate() def assertDistrictKeys(self, district): self.assertEqual(district["key"], DistrictType.type_abbrevs[DistrictType.NEW_ENGLAND]) self.assertEqual(district["name"], DistrictType.type_names[DistrictType.NEW_ENGLAND]) def testDistrictApi(self): response = self.testapp.get('/{}'.format(self.event.year), headers={"X-TBA-App-Id": "tba-tests:disstrict-controller-test:v01"}) districts = json.loads(response.body) self.assertDistrictKeys(districts[0])
def get(self): self.response.write('Welcome to attender server! Here is a cron job for pulling events from sorces: meetup.com and evenbrite.com') ev = Event() at = Attendings() logging.info("Adding new events to DataStore") results = self.obj.request_events(radius="25") logging.info("Events added: {}".format(results)) logging.info("Deleting old events from DataStore") #Delete passed events qe = ev.return_all_events() results = qe.filter(Event.date < datetime.now()) for res in results: logging.info(str(res.key.id())) old_attending = Attendings.query(Attendings.event_id == int(res.key.id())).get() logging.info("query {}".format(old_attending)) if old_attending is not None: old_attending.key.delete() res.key.delete() #Update city names for q in qe: changed = self.api_obj.check_city(q.city) if changed: q.city = changed q.put()
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address="Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr" ) event.put() team = Team( id="frc177", team_number=177, website="http://www.bobcatrobotics.org" ) team.put() event_team = EventTeam( id="%s_%s" % (event.key.id(), team.key.id()), event=event.key, team=team.key, year=None) event_team.put()
class TestEventMatchApiController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventMatchesController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(root_path=".") self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, location='Clemson, SC', start_date=datetime(2010, 03, 24), ) self.event.put() self.match = Match( id="2010sc_qm1", alliances_json="""{"blue": {"score": 57, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": 74, "teams": ["frc281", "frc571", "frc176"]}}""", comp_level="qm", event=self.event.key, year=2010, set_number=1, match_number=1, team_key_names=[u'frc281', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], youtube_videos=["94UGXIq6jUA"], tba_videos=[".mp4"], time=datetime.fromtimestamp(1409527874) ) self.match.put() def tearDown(self): self.testbed.deactivate() def assertMatchJson(self, matches): match = matches[0] self.assertEqual(str(match["key"]), self.match.key.string_id()) self.assertEqual(match["comp_level"], self.match.comp_level) self.assertEqual(match["event_key"], self.match.event.string_id()) self.assertEqual(match["set_number"], self.match.set_number) self.assertEqual(match["match_number"], self.match.match_number) self.assertEqual(match["videos"], self.match.videos) self.assertEqual(match["time_string"], self.match.time_string) self.assertEqual(match["time"], 1409527874) def testEventMatchApi(self): response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"}) match_json = json.loads(response.body) self.assertMatchJson(match_json)
def _create_event(self, title, description, start_date_str, end_date_str): start_date = datetime.strptime(start_date_str, "%Y-%m-%d %H:%M:%S") end_date = datetime.strptime(end_date_str, "%Y-%m-%d %H:%M:%S") event = Event(title = title, description = description, start_date = start_date, end_date = end_date) event_key = str(event.put()) return event
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.account = Account.get_or_insert( "123", email="*****@*****.**", registered=True) self.account.put() self.account_banned = Account.get_or_insert( "456", email="*****@*****.**", registered=True, shadow_banned=True, ) self.account_banned.put() event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON) event.put() self.match = Match(id="2016test_f1m1", event=ndb.Key(Event, "2016test"), year=2016, comp_level="f", set_number=1, match_number=1, alliances_json='') self.match.put()
def test_webcast_good_date(self): event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON) event.put() status = SuggestionCreator.createEventWebcastSuggestion( self.account.key, "http://twitch.tv/frcgamesense", "2017-02-28", "2016test") self.assertEqual(status, 'success') suggestions = Suggestion.query().fetch() self.assertIsNotNone(suggestions) self.assertEqual(len(suggestions), 1) suggestion = suggestions[0] self.assertIsNotNone(suggestion) self.assertEqual(suggestion.target_key, "2016test") self.assertEqual(suggestion.author, self.account.key) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_PENDING) self.assertIsNotNone(suggestion.contents) self.assertEqual(suggestion.contents.get('webcast_url'), "http://twitch.tv/frcgamesense") self.assertIsNotNone(suggestion.contents.get('webcast_dict')) self.assertEqual(suggestion.contents.get('webcast_date'), "2017-02-28")
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.event_nyny = Event( id="2016nyny", name="NYC Regional", event_type_enum=EventType.REGIONAL, short_name="NYC", event_short="nyny", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York" ) self.event_nyny.put() self.event_micmp = Event( id="2016micmp", name="Michigan District Champs", event_type_enum=EventType.DISTRICT_CMP, short_name="Michigan", event_short="micmp", year=2016, end_date=datetime(2016, 03, 27), official=True, start_date=datetime(2016, 03, 24), timezone_id="America/New_York", playoff_type=PlayoffType.BRACKET_16_TEAM ) self.event_micmp.put()
def get_events(self, limit=False): from models.event import Event from models.schedule import Schedule event = Event(uid=self.username) events = [] for e in event.get_my_events('events'): if e.resource_type == 'user': resource_name = User(id=e.resource_id).get_by_id().username url_prefix = '/user/' + e.resource_id elif e.resource_type == 'schedule': resource_name = Schedule(id=e.resource_id).get_by_id().subject url_prefix = '/show/' + e.resource_id el = { 'owner': e.uid, 'description': e.description, 'resource_type': e.resource_type, 'resource_id': e.resource_id, 'resource_name': resource_name, 'created_at': e.created_at, 'url': url_prefix } events.append(el) return events
def show(id=None): # getting schedule schedule = Schedule(id=id) schedule = schedule.get_by_id() # getting zombie related to specific schedule zombie = Zombie() zombies = zombie.get_by_schedule(schedule) # getting statistics # TODO: statics should be collected in one cycle!! statistics = {} event = Event(resource_id=id, resource_type='schedule') #statistics['views'] = event.get_views_by_schedule() statistics['stars'] = event.get_stars_by_schedule() statistics['links'] = zombie.count_links(zombies) statistics['users'] = zombie.count_users(zombies) statistics['images'] = zombie.count_images(zombies) statistics['zombies'] = zombies.count() photos = zombie.get_photos(zombies) links = zombie.get_links(zombies) # should return schedule and zombies return render_template('show.html', schedule=schedule, zombies=zombies, statistics=statistics, photos=photos, links=links)
class TestDatafeedUsfirstTeams(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime.datetime(2010, 03, 27), official=True, location='Clemson, SC', start_date=datetime.datetime(2010, 03, 24), ) self.event.put() self.match = Match( id="2010sc_qm1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=self.event.key, year=2010, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'] ) self.match.put() def tearDown(self): self.testbed.deactivate()
class TestEventListApiController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<year:>', ApiEventListController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub() self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, location='Clemson, SC', start_date=datetime(2010, 03, 24), ) self.event.put() def tearDown(self): self.testbed.deactivate() def testEventListApi(self): response = self.testapp.get('/2010', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"}) event_dict = json.loads(response.body) self.assertEqual(event_dict[0], self.event.key_name)
def create_target_model(self, suggestion): event_id = self.request.get("event_short", None) event_key = str(self.request.get("year")) + str.lower(str(self.request.get("event_short"))) if not event_id: # Need to supply a key :( return 'missing_key', None if not Event.validate_key_name(event_key): # Bad event key generated return 'bad_key', None start_date = None if self.request.get("start_date"): start_date = datetime.strptime(self.request.get("start_date"), "%Y-%m-%d") end_date = None if self.request.get("end_date"): end_date = datetime.strptime(self.request.get("end_date"), "%Y-%m-%d") existing_event = Event.get_by_id(event_key) if existing_event: return 'duplicate_key', None first_code = self.request.get("first_code", '') event = Event( id=event_key, end_date=end_date, event_short=self.request.get("event_short"), event_type_enum=EventType.OFFSEASON, district_key=None, venue=self.request.get("venue"), venue_address=self.request.get("venue_address"), city=self.request.get("city"), state_prov=self.request.get("state"), country=self.request.get("country"), name=self.request.get("name"), short_name=self.request.get("short_name"), start_date=start_date, website=self.request.get("website"), year=int(self.request.get("year")), first_code=first_code, official=(not first_code == ''), ) EventManipulator.createOrUpdate(event) author = suggestion.author.get() OutgoingNotificationHelper.send_suggestion_result_email( to=author.email, subject="[TBA] Offseason Event Suggestion: {}".format(event.name), email_body="""Dear {}, Thank you for suggesting an offseason event to The Blue Alliance. Your suggestion has been approved and you can find the event at https://thebluealliance.com/event/{} If you are the event's organizer and would like to upload teams attending, match videos, or real-time match results to TBA before or during the event, you can do so using the TBA EventWizard - request auth keys here: https://www.thebluealliance.com/request/apiwrite Thanks for helping make TBA better, The Blue Alliance Admins """.format(author.nickname, event_key) ) return 'success', event_key
def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventRankingsController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.rankings = [ ["Rank", "Team", "QS", "ASSIST", "AUTO", "T&C", "TELEOP", "Record (W-L-T)", "DQ", "PLAYED"], ["1", "1126", "20.00", "240.00", "480.00", "230.00", "478.00", "10-2-0", "0", "12"], ["2", "5030", "20.00", "200.00", "290.00", "220.00", "592.00", "10-2-0", "0", "12"], ["3", "250", "20.00", "70.00", "415.00", "220.00", "352.00", "10-2-0", "0", "12"] ] self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, city="Clemson", state_prov="SC", country="USA", start_date=datetime(2010, 03, 24) ) self.event.put() self.event_details = EventDetails( id=self.event.key.id(), rankings=self.rankings ) self.event_details.put() self.eventNoRanks = Event( id="2010ct", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="ct", year=2010, end_date=datetime(2010, 03, 27), official=True, city="Clemson", state_prov="SC", country="USA", start_date=datetime(2010, 03, 24), ) self.eventNoRanks.put()
def _render(self, *args, **kw): path = os.path.join(os.path.dirname(__file__), "../templates/eventwizard.html") selected_event_key = self.request.get('event', '') if selected_event_key and Event.validate_key_name(selected_event_key): selected_event = Event.get_by_id(selected_event_key) if selected_event: self.template_values['selected_event'] = selected_event return template.render(path, self.template_values)
def activity_log(): event = Event(name='bla') event.save() events = Event.objects( creation_time__gte=(datetime.now() - timedelta(**{TIME_UNIT: TIME_MEASUREMENT}))) return jsonify(title='events from the last {} {}'.format( TIME_MEASUREMENT, TIME_UNIT), msg=[event.toMinimalJson() for event in events])
def test_no_role(self): event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON) event.put() status = SuggestionCreator.createApiWriteSuggestion( self.account.key, "2016test", "", [1, 2, 3]) self.assertEqual(status, 'no_affiliation')
def send_power_off(): if request.json: description = request.json.get('description') or 'website off' else: description = 'website off' resp = arduino.turn_off() if resp.get('return_value', 0) == 1: Event.create_event(EventType.off, description) return 'OK', 200 return 'Error', 500
def testWebcastAlreadyExists(self): event = Event(id="2016test", name="Test Event", event_short="Test Event", year=2016, event_type_enum=EventType.OFFSEASON, webcast_json="[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]") event.put() status = SuggestionCreator.createEventWebcastSuggestion( self.account.key, "http://twitch.tv/frcgamesense", "2016test") self.assertEqual(status, 'webcast_exists')
def save_in_db(self, event, source, category=None): mydb = DAL() sec = event['date'] / 1000 e = Event() date = datetime.fromtimestamp(sec) if category is not None: e.update_category(event['id'], category) mydb.set_event_details(event['id'], event['name'], date, event['city'], event['address'], event['description'], event['host'], event['event_url'], event['attendees'], event['price'], category, source)
class TestMatchSuggestionAccepter(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.account = Account( email="*****@*****.**", ) self.account.put() self.suggestion = Suggestion( author=self.account.key, contents_json="{\"youtube_videos\":[\"123456\"]}", target_key="2012ct_qm1", target_model="match" ) self.suggestion.put() self.event = Event( id="2012ct", event_short="ct", year=2012, event_type_enum=EventType.REGIONAL, ) self.event.put() self.match = Match( id="2012ct_qm1", alliances_json="""{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=self.event.key, year=2012, set_number=1, match_number=1, team_key_names=[u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'], youtube_videos=["abcdef"] ) self.match.put() def tearDown(self): self.testbed.deactivate() def test_accept_suggestions(self): MatchSuggestionAccepter.accept_suggestion(self.match, self.suggestion) match = Match.get_by_id("2012ct_qm1") self.assertTrue("abcdef" in match.youtube_videos) self.assertTrue("123456" in match.youtube_videos)
def test_2017scmb_sequence(self): event = Event( id='2017scmb', event_short='scmb', year=2017, event_type_enum=0, timezone_id='America/New_York' ) event.put() event_code = 'scmb' file_prefix = 'frc-api-response/v2.0/2017/schedule/{}/playoff/hybrid/'.format(event_code) context = ndb.get_context() result = context.urlfetch('https://www.googleapis.com/storage/v1/b/bucket/o?bucket=tbatv-prod-hrd.appspot.com&prefix={}'.format(file_prefix)).get_result() for item in json.loads(result.content)['items']: filename = item['name'] time_str = filename.replace(file_prefix, '').replace('.json', '').strip() file_time = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f") query_time = file_time + datetime.timedelta(seconds=30) MatchManipulator.createOrUpdate(DatafeedFMSAPI('v2.0', sim_time=query_time).getMatches('2017{}'.format(event_code)), run_post_update_hook=False) MatchHelper.deleteInvalidMatches(event.matches, event) qf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'qf').fetch() self.assertEqual(len(qf_matches), 11) sf_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'sf').fetch() self.assertEqual(len(sf_matches), 4) f_matches = Match.query(Match.event == ndb.Key(Event, '2017scmb'), Match.comp_level == 'f').fetch() self.assertEqual(len(f_matches), 3) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['red']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['red']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m1').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['red']['score'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').alliances['blue']['score'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['red']['totalPoints'], 213) self.assertEqual(Match.get_by_id('2017scmb_qf4m2').score_breakdown['blue']['totalPoints'], 305) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['red']['score'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').alliances['blue']['score'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['red']['totalPoints'], 312) self.assertEqual(Match.get_by_id('2017scmb_qf4m3').score_breakdown['blue']['totalPoints'], 255) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['red']['score'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').alliances['blue']['score'], 306) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['red']['totalPoints'], 310) self.assertEqual(Match.get_by_id('2017scmb_qf4m4').score_breakdown['blue']['totalPoints'], 306)
class TestDatafeedUsfirstTeams(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.event = Event( id = "2010sc", name = "Palmetto Regional", event_type = "Regional", short_name = "Palmetto", event_short = "sc", year = 2010, end_date = datetime.datetime(2010, 03, 27), official = True, location = 'Clemson, SC', start_date = datetime.datetime(2010, 03, 24), ) self.event.put() self.match = Match( id = "2010sc_qm1", alliances_json = """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level = "qm", event = self.event.key, game = "frc_2010_bkwy", set_number = 1, match_number = 1, team_key_names = [u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073'] ) self.match.put() def tearDown(self): self.testbed.deactivate() def test_doEventTeamUpdate(self): # call EventTeamUpdate with 2010sc eventteamupdate = EventTeamUpdate() eventteamupdate.response = Response() eventteamupdate.get("2010sc") # Teams were generated by EventTeamUpdate, make sure EventTeams # exist and feature Team Keys event_team_from_match_one = EventTeam.get_by_id("2010sc_frc69") self.assertEqual(event_team_from_match_one.event, self.event.key) self.assertEqual(event_team_from_match_one.team, ndb.Key(Team, "frc69")) event_team_from_match_two = EventTeam.get_by_id("2010sc_frc20") self.assertEqual(event_team_from_match_two.event, self.event.key) self.assertEqual(event_team_from_match_two.team, ndb.Key(Team, "frc20"))
def get(self, when): if when == "all": event_keys = Event.query().fetch(10000, keys_only=True) else: event_keys = Event.query(Event.year == int(when)).fetch(10000, keys_only=True) for event_key in event_keys: taskqueue.add(url="/tasks/math/do/eventteam_update/" + event_key.id(), method="GET") template_values = {"event_keys": event_keys} path = os.path.join(os.path.dirname(__file__), "../templates/math/eventteam_update_enqueue.html") self.response.out.write(template.render(path, template_values))
def unattend(u_key, e_key): event1 = Event() qry = User.get_by_id(u_key) if qry is None: return 1 if Event.get_by_id(e_key) is None: return 2 attendings1 = Attendings() q = attendings1.check_attend_exist(u_key, e_key) if q: q.key.delete() event1.update_attendees(e_key, action="sub") return 0
def test_event(session): e = Event() e.event = EventType.on e.event_description == 'test description' session.add(e) session.commit() t = session.query(Event).first() assert t is not None assert e == t assert t.event == EventType.on
def _render(self, district_abbrev, year=None, explicit_year=False): district_type = DistrictType.abbrevs[district_abbrev] event_keys = Event.query(Event.year == year, Event.event_district_enum == district_type).fetch(None, keys_only=True) if not event_keys: self.abort(404) # needed for valid_years all_cmp_event_keys_future = Event.query(Event.event_district_enum == district_type, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True) # needed for valid_districts district_cmp_keys_future = Event.query(Event.year == year, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True) # to compute valid_districts event_futures = ndb.get_multi_async(event_keys) event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True) if year == 2014: # TODO: only 2014 has accurate rankings calculations team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()])) events = [event_future.get_result() for event_future in event_futures] EventHelper.sort_events(events) district_cmp_futures = ndb.get_multi_async(district_cmp_keys_future.get_result()) if year == 2014: # TODO: only 2014 has accurate rankings calculations team_totals = DistrictHelper.calculate_rankings(events, team_futures, year) else: team_totals = None valid_districts = set() for district_cmp_future in district_cmp_futures: district_cmp = district_cmp_future.get_result() cmp_dis_type = district_cmp.event_district_enum if cmp_dis_type is None: logging.warning("District event {} has unknown district type!".format(district_cmp.key.id())) else: valid_districts.add((DistrictType.type_names[cmp_dis_type], DistrictType.type_abbrevs[cmp_dis_type])) valid_districts = sorted(valid_districts, key=lambda (name, _): name) self.template_values.update({ 'explicit_year': explicit_year, 'year': year, 'valid_years': sorted(set([int(event_key.id()[:4]) for event_key in all_cmp_event_keys_future.get_result()])), 'valid_districts': valid_districts, 'district_name': DistrictType.type_names[district_type], 'district_abbrev': district_abbrev, 'events': events, 'team_totals': team_totals, }) path = os.path.join(os.path.dirname(__file__), '../templates/district_details.html') return template.render(path, self.template_values)
def create_entry_from_params(self, params): type = params["type"] if type == "event": if "description" in params: new_event = Event(title = params["title"], description = params["description"], start_date = params["start_date"], end_date = params["end_date"]) else: new_event = Event(title = params["title"], start_date = params["start_date"], end_date = params["end_date"]) new_event.put() return new_event if type == "sportsevent": if "description" in params: new_sports_event = SportsEvent(title = params["title"], description = params["description"], start_date = params["start_date"], end_date = params["end_date"], league = params["league"], season = params["season"], home_team = params["home_team"], away_team = params["away_team"], teams = [params["home_team"].key(), params["away_team"].key()], completed = params["completed"], cancelled = params["cancelled"], home_team_score = params["home_team_score"], away_team_score = params["away_team_score"], ref_id = params["ref_id"], game_kind = params["game_kind"]) else: new_sports_event = SportsEvent(title = params["title"], start_date = params["start_date"], end_date = params["end_date"], league = params["league"], season = params["season"], home_team = params["home_team"], away_team = params["away_team"], teams = [params["home_team"].key(), params["away_team"].key()], completed = params["completed"], cancelled = params["cancelled"], home_team_score = params["home_team_score"], away_team_score = params["away_team_score"], ref_id = params["ref_id"], game_kind = params["game_kind"]) new_sports_event.put() return new_sports_event
class TestEventStatsApiController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventStatsController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.matchstats = { "dprs": {"971": 10.52178695299036, "114": 23.7313645955704, "115": 29.559784481082044}, "oprs": {"971": 91.42946669932006, "114": 59.27751047482864, "115": 13.285278757495144}, "ccwms": {"971": 80.90767974632955, "114": 35.54614587925829, "115": -16.27450572358693}, } self.event = Event( id="2010sc", name="Palmetto Regional", event_type_enum=EventType.REGIONAL, short_name="Palmetto", event_short="sc", year=2010, end_date=datetime(2010, 03, 27), official=True, city="Clemson", state_prov="SC", country="USA", start_date=datetime(2010, 03, 24) ) self.event.put() self.event_details = EventDetails( id=self.event.key.id(), matchstats=self.matchstats ) self.event_details.put() def tearDown(self): self.testbed.deactivate() def testEventStatsApi(self): response = self.testapp.get('/2010sc', headers={"X-TBA-App-Id": "tba-tests:event-controller-test:v01"}) matchstats = json.loads(response.body) self.assertEqual(self.matchstats, matchstats)
def _ids_and_events(cls, suggestion): event_key = suggestion.contents['event_key'] account = suggestion.author.get() existing_keys = ApiAuthAccess.query(ApiAuthAccess.event_list == ndb.Key(Event, event_key)) existing_users = [key.owner.get() if key.owner else None for key in existing_keys] return suggestion.key.id(), Event.get_by_id(event_key), account, zip(existing_keys, existing_users), suggestion
class TestFMSAPIEventParser(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.event = Event( id="2015waamv", end_date=datetime.datetime(2015, 4, 2, 0, 0), event_short="waamv", event_type_enum=EventType.REGIONAL, district_key=None, first_eid="13467", name="PNW District - Auburn Mountainview Event", start_date=datetime.datetime(2015, 3, 31, 0, 0), year=2015, timezone_id='America/Los_Angeles' ) self.event.put() def tearDown(self): self.testbed.deactivate() def test_parseAwards(self): with open('test_data/fms_api/2015waamv_staging_awards.json', 'r') as f: awards = FMSAPIAwardsParser(self.event).parse(json.loads(f.read())) self.assertEqual(len(awards), 5) for award in awards: if award.key.id() == '2015waamv_3': self.assertEqual(award.name_str, 'Woodie Flowers Award') self.assertEqual(award.award_type_enum, 3) self.assertTrue({'team_number': None, 'awardee': 'Bob'} in award.recipient_list) elif award.key.id() == '2015waamv_17': self.assertEqual(award.name_str, 'Quality Award sponsored by Motorola') self.assertEqual(award.award_type_enum, 17) self.assertTrue({'team_number': 1318, 'awardee': None} in award.recipient_list) elif award.key.id() == '2015waamv_4': self.assertEqual(award.name_str, 'FIRST Dean\'s List Award') self.assertEqual(award.award_type_enum, 4) self.assertTrue({'team_number': 123, 'awardee': 'Person Name 1'} in award.recipient_list) self.assertTrue({'team_number': 321, 'awardee': 'Person Name 2'} in award.recipient_list) def test_parseMatches(self): with open('test_data/fms_api/2015waamv_staging_matches.json', 'r') as f: matches, _ = FMSAPIHybridScheduleParser(2015, 'waamv').parse(json.loads(f.read())) self.assertEqual(len(matches), 64) matches = sorted(matches, key=lambda m: m.play_order) # Test played match match = matches[0] self.assertEqual(match.comp_level, "qm") self.assertEqual(match.set_number, 1) self.assertEqual(match.match_number, 1) self.assertEqual(match.team_key_names, [u'frc4131', u'frc4469', u'frc3663', u'frc3684', u'frc5295', u'frc2976']) self.assertEqual(match.alliances_json, """{"blue": {"dqs": [], "surrogates": [], "score": 30, "teams": ["frc4131", "frc4469", "frc3663"]}, "red": {"dqs": [], "surrogates": [], "score": 18, "teams": ["frc3684", "frc5295", "frc2976"]}}""") self.assertEqual(match.time, datetime.datetime(2015, 2, 27, 0, 0)) self.assertEqual(match.actual_time, datetime.datetime(2015, 2, 27, 0, 0)) # Test unplayed match match = matches[11] self.assertEqual(match.comp_level, "qm") self.assertEqual(match.set_number, 1) self.assertEqual(match.match_number, 12) self.assertEqual(match.team_key_names, [u'frc3663', u'frc5295', u'frc2907', u'frc2046', u'frc3218', u'frc2412']) self.assertEqual(match.alliances_json, """{"blue": {"dqs": [], "surrogates": [], "score": null, "teams": ["frc3663", "frc5295", "frc2907"]}, "red": {"dqs": [], "surrogates": [], "score": null, "teams": ["frc2046", "frc3218", "frc2412"]}}""") self.assertEqual(match.time, datetime.datetime(2015, 2, 27, 2, 17)) self.assertEqual(match.actual_time, None) def test_parseEventAlliances(self): with open('test_data/fms_api/2015waamv_staging_alliances.json', 'r') as f: alliances = FMSAPIEventAlliancesParser().parse(json.loads(f.read())) self.assertEqual(alliances, [{'declines': [], 'picks': ['frc1', 'frc2', 'frc3'], 'backup': None, 'name': 'Alliance 1'}, {'declines': [], 'picks': ['frc5', 'frc6', 'frc7', 'frc8'], 'backup': None, 'name': 'Alliance 2'}, {'declines': [], 'picks': ['frc9', 'frc10', 'frc11', 'frc12'], 'backup': None, 'name': 'Alliance 3'}, {'declines': [], 'picks': ['frc13', 'frc14', 'frc15', 'frc16'], 'backup': None, 'name': 'Alliance 4'}, {'declines': [], 'picks': ['frc17', 'frc18', 'frc19', 'frc20'], 'backup': None, 'name': 'Alliance 5'}, {'declines': [], 'picks': ['frc21', 'frc22', 'frc23', 'frc24'], 'backup': None, 'name': 'Alliance 6'}, {'declines': [], 'picks': ['frc25', 'frc26', 'frc27', 'frc28'], 'backup': None, 'name': 'Alliance 7'}, {'declines': [], 'picks': ['frc29', 'frc30', 'frc31', 'frc31'], 'backup': None, 'name': 'Alliance 8'}]) def test_parseEventRankings(self): with open('test_data/fms_api/2015waamv_staging_rankings.json', 'r') as f: rankings = FMSAPIEventRankingsParser(2015).parse(json.loads(f.read())) self.assertEqual(rankings, [['Rank', 'Team', 'Qual Avg', 'Auto', 'Container', 'Coopertition', 'Litter', 'Tote', 'Played'], [1, 2906, 76, 6, 48, 80, 6, 12, 2], [2, 4726, 74, 6, 48, 80, 6, 8, 2], [3, 2929, 70, 6, 48, 80, 6, 0, 2], [4, 2907, 59, 14, 0, 40, 0, 64, 2], [5, 2046, 58, 14, 24, 20, 4, 60, 2], [6, 1294, 54, 14, 0, 40, 0, 60, 2], [7, 360, 51, 0, 48, 20, 12, 22, 2], [8, 3218, 50, 18, 12, 40, 4, 26, 2], [9, 3237, 48, 18, 12, 0, 4, 14, 1], [10, 3781, 46, 46, 20, 20, 6, 0, 1], [11, 3393, 46, 46, 20, 20, 6, 0, 1], [12, 1983, 46, 14, 0, 0, 4, 80, 2], [13, 4579, 44, 0, 24, 60, 4, 0, 2], [14, 3220, 44, 18, 12, 40, 4, 14, 2], [15, 3049, 44, 0, 0, 40, 4, 0, 1], [16, 5295, 43, 22, 0, 0, 0, 70, 2], [17, 4131, 43, 14, 28, 0, 6, 38, 2], [18, 4911, 38, 0, 0, 40, 0, 36, 2], [19, 2605, 36, 0, 28, 0, 10, 34, 2], [20, 3586, 34, 20, 0, 0, 0, 60, 2], [21, 2976, 33, 8, 0, 40, 0, 18, 2], [22, 3684, 29, 8, 0, 40, 0, 10, 2], [23, 3223, 28, 0, 0, 40, 4, 12, 2], [24, 3588, 27, 0, 0, 0, 4, 56, 2], [25, 4450, 24, 0, 24, 20, 4, 0, 2], [26, 2927, 22, 0, 0, 40, 4, 0, 2], [27, 2412, 22, 0, 0, 40, 4, 0, 2], [28, 3221, 21, 0, 0, 0, 8, 40, 2], [29, 3663, 15, 14, 0, 0, 0, 16, 2], [30, 4469, 15, 14, 0, 0, 0, 16, 2], [31, 2557, 15, 6, 0, 0, 0, 36, 2], [32, 1318, 1, 6, 0, 0, 0, 8, 2]]) def test_parse2017Awards(self): self.event = Event( id="2017cmpmo", end_date=datetime.datetime(2017, 4, 29, 0, 0), event_short="cmpmo", event_type_enum=EventType.CMP_FINALS, district_key=None, first_eid="22465", name="Einstein Field (St. Louis)", start_date=datetime.datetime(2017, 4, 29, 0, 0), year=2017, timezone_id='America/Chicago' ) self.event.put() with open('test_data/fms_api/2017cmpmo_awards.json', 'r') as f: awards = FMSAPIAwardsParser(self.event).parse(json.loads(f.read())) self.assertEqual(len(awards), 6) for award in awards: if award.key.id() == '2017cmpmo_0': self.assertEqual(award.name_str, 'Chairman\'s Award') self.assertEqual(award.award_type_enum, 0) self.assertFalse({'team_number': 2169, 'awardee': None} in award.recipient_list) self.assertFalse({'team_number': 1885, 'awardee': None} in award.recipient_list) self.assertTrue({'team_number': 2614, 'awardee': None} in award.recipient_list) elif award.key.id() == '2017cmpmo_69': self.assertEqual(award.name_str, 'Chairman\'s Award Finalist') self.assertEqual(award.award_type_enum, 69) self.assertTrue({'team_number': 2169, 'awardee': None} in award.recipient_list) self.assertTrue({'team_number': 1885, 'awardee': None} in award.recipient_list) self.assertFalse({'team_number': 2614, 'awardee': None} in award.recipient_list)
def get_events_async(): event_keys = yield Event.query().order(-Event.year).order( Event.name).fetch_async(keys_only=True) events = yield ndb.get_multi_async(event_keys) raise ndb.Return(events)
def _render(self, event_key): event = Event.get_by_id(event_key) if not event: self.abort(404) event.prepAwardsMatchesTeams() awards = AwardHelper.organizeAwards(event.awards) cleaned_matches = MatchHelper.deleteInvalidMatches(event.matches) matches = MatchHelper.organizeMatches(cleaned_matches) teams = TeamHelper.sortTeams(event.teams) num_teams = len(teams) middle_value = num_teams / 2 if num_teams % 2 != 0: middle_value += 1 teams_a, teams_b = teams[:middle_value], teams[middle_value:] oprs = [i for i in event.matchstats['oprs'].items() ] if (event.matchstats is not None and 'oprs' in event.matchstats) else [] oprs = sorted(oprs, key=lambda t: t[1], reverse=True) # sort by OPR oprs = oprs[:15] # get the top 15 OPRs if event.now: matches_recent = MatchHelper.recentMatches(cleaned_matches) matches_upcoming = MatchHelper.upcomingMatches(cleaned_matches) else: matches_recent = None matches_upcoming = None bracket_table = MatchHelper.generateBracket(matches, event.alliance_selections) is_2015_playoff = EventHelper.is_2015_playoff(event_key) if is_2015_playoff: playoff_advancement = MatchHelper.generatePlayoffAdvancement2015( matches, event.alliance_selections) for comp_level in ['qf', 'sf']: if comp_level in bracket_table: del bracket_table[comp_level] else: playoff_advancement = None district_points_sorted = None if event.district_points: district_points_sorted = sorted( event.district_points['points'].items(), key=lambda (team, points): -points['total']) event_insights = EventInsightsHelper.calculate_event_insights( cleaned_matches, event.year) self.template_values.update({ "event": event, "matches": matches, "matches_recent": matches_recent, "matches_upcoming": matches_upcoming, "awards": awards, "teams_a": teams_a, "teams_b": teams_b, "num_teams": num_teams, "oprs": oprs, "bracket_table": bracket_table, "playoff_advancement": playoff_advancement, "district_points_sorted": district_points_sorted, "is_2015_playoff": is_2015_playoff, "event_insights": event_insights }) if event.within_a_day: self._cache_expiration = self.SHORT_CACHE_EXPIRATION return jinja2_engine.render('event_details.html', self.template_values)
def get(self, event_key): if tba_config.CONFIG["env"] == "prod": # disable in prod for now logging.error("Tried to restore {} from CSV in prod! No can do.".format(event_key)) return event = Event.get_by_id(event_key) # alliances result = urlfetch.fetch(self.ALLIANCES_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.ALLIANCES_URL.format(event.year, event_key, event_key))) else: data = result.content.replace('frc', '') alliance_selections = CSVAllianceSelectionsParser.parse(data) event_details = EventDetails( id=event_key, alliance_selections=alliance_selections ) EventDetailsManipulator.createOrUpdate(event_details) # awards result = urlfetch.fetch(self.AWARDS_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.AWARDS_URL.format(event.year, event_key, event_key))) else: # convert into expected input format data = StringIO.StringIO() writer = csv.writer(data, delimiter=',') for row in csv.reader(StringIO.StringIO(result.content), delimiter=','): writer.writerow([event.year, event.event_short, row[1], row[2].replace('frc', ''), row[3]]) awards = [] for award in CSVAwardsParser.parse(data.getvalue()): awards.append(Award( id=Award.render_key_name(event.key_name, award['award_type_enum']), name_str=award['name_str'], award_type_enum=award['award_type_enum'], year=event.year, event=event.key, event_type_enum=event.event_type_enum, team_list=[ndb.Key(Team, 'frc{}'.format(team_number)) for team_number in award['team_number_list']], recipient_json_list=award['recipient_json_list'] )) AwardManipulator.createOrUpdate(awards) # matches result = urlfetch.fetch(self.MATCHES_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.MATCHES_URL.format(event.year, event_key, event_key))) else: data = result.content.replace('frc', '').replace('{}_'.format(event_key), '') match_dicts, _ = OffseasonMatchesParser.parse(data) matches = [ Match( id=Match.renderKeyName( event.key.id(), match.get("comp_level", None), match.get("set_number", 0), match.get("match_number", 0)), event=event.key, year=event.year, set_number=match.get("set_number", 0), match_number=match.get("match_number", 0), comp_level=match.get("comp_level", None), team_key_names=match.get("team_key_names", None), alliances_json=match.get("alliances_json", None) ) for match in match_dicts] MatchManipulator.createOrUpdate(matches) # rankings result = urlfetch.fetch(self.RANKINGS_URL.format(event.year, event_key, event_key)) if result.status_code != 200: logging.warning('Unable to retreive url: ' + (self.RANKINGS_URL.format(event.year, event_key, event_key))) else: # convert into expected input format rankings = list(csv.reader(StringIO.StringIO(result.content), delimiter=',')) event_details = EventDetails( id=event_key, rankings=rankings ) EventDetailsManipulator.createOrUpdate(event_details) self.response.out.write("Done restoring {}!".format(event_key))
def _query_async(self): year = self._query_args[0] events = yield Event.query(Event.year == year).fetch_async() raise ndb.Return(events)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/event/<event_key>', EventDetail, 'event-detail'), RedirectRoute(r'/event/<event_key>/insights', EventInsights, 'event-insights'), RedirectRoute(r'/events/<year:[0-9]+>', EventList, 'event-list-year'), RedirectRoute(r'/events', EventList, 'event-list'), ]) self.testapp = webtest.TestApp(app) self.district = District(id='2016ne', abbreviation='ne', year=2016, display_name='New England') self.district.put() self.event1 = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.DISTRICT_CMP, district_key=ndb.Key(District, '2016ne'), short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=True, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json= "[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org", ) self.event1.put() # To test that /events defaults to current year this_year = datetime.now().year self.event2 = Event( id="{}necmp".format(this_year), name="New England District Championship", event_type_enum=EventType.DISTRICT_CMP, district_key=ndb.Key(District, '2016ne'), short_name="New England", event_short="necmp", year=this_year, end_date=datetime(this_year, 03, 27), official=True, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(this_year, 03, 24), webcast_json= "[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org", ) self.event2.put() self.event1_details = EventDetails(id=self.event1.key.id(), predictions={ "ranking_prediction_stats": { 'qual': None, 'playoff': None }, "match_predictions": { 'qual': None, 'playoff': None }, "ranking_predictions": None, "match_prediction_stats": { 'qual': None, 'playoff': None } }) self.event1_details.put() self.event2_details = EventDetails(id=self.event2.key.id(), predictions={ "ranking_prediction_stats": { 'qual': None, 'playoff': None }, "match_predictions": { 'qual': None, 'playoff': None }, "ranking_predictions": None, "match_prediction_stats": { 'qual': None, 'playoff': None } }) self.event2_details.put()
class TestSuggestEventWebcastController(unittest2.TestCase): def loginUser(self): self.testbed.setup_env(user_email="*****@*****.**", user_id="123", user_is_admin='0', overwrite=True) self.account = Account.get_or_insert("123", email="*****@*****.**", registered=True) def givePermission(self): self.account.permissions.append(AccountPermissions.REVIEW_MEDIA) self.account.put() def createSuggestion(self): status = SuggestionCreator.createMatchVideoYouTubeSuggestion( self.account.key, "H-54KMwMKY0", "2016necmp_f1m1") self.assertEqual(status, 'success') return Suggestion.render_media_key_name(2016, 'match', '2016necmp_f1m1', 'youtube', 'H-54KMwMKY0') def setUp(self): self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=1) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_urlfetch_stub() self.testbed.init_taskqueue_stub(_all_queues_valid=True) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/suggest/match/video/review', SuggestMatchVideoReviewController, 'suggest-video', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app) self.event = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.DISTRICT_CMP, event_district_enum=DistrictType.NEW_ENGLAND, short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=False, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json="", website="http://www.firstsv.org", ) self.event.put() self.match = Match(id="2016necmp_f1m1", event=ndb.Key(Event, "2016necmp"), year=2016, comp_level="f", set_number=1, match_number=1, team_key_names=[ 'frc846', 'frc2135', 'frc971', 'frc254', 'frc1678', 'frc973' ], time=datetime.fromtimestamp(1409527874), time_string="4:31 PM", tba_videos=[], alliances_json='{\ "blue": {\ "score": 270,\ "teams": [\ "frc846",\ "frc2135",\ "frc971"]},\ "red": {\ "score": 310,\ "teams": [\ "frc254",\ "frc1678",\ "frc973"]}}', score_breakdown_json='{\ "blue": {\ "auto": 70,\ "teleop_goal+foul": 40,\ "assist": 120,\ "truss+catch": 40\ },"red": {\ "auto": 70,\ "teleop_goal+foul": 50,\ "assist": 150,\ "truss+catch": 40}}') self.match.put() self.match2 = Match(id="2016necmp_f1m2", event=ndb.Key(Event, "2016necmp"), year=2016, comp_level="f", set_number=1, match_number=2, team_key_names=[ 'frc846', 'frc2135', 'frc971', 'frc254', 'frc1678', 'frc973' ], time=datetime.fromtimestamp(1409527874), time_string="4:31 PM", tba_videos=[], alliances_json='{\ "blue": {\ "score": 270,\ "teams": [\ "frc846",\ "frc2135",\ "frc971"]},\ "red": {\ "score": 310,\ "teams": [\ "frc254",\ "frc1678",\ "frc973"]}}', score_breakdown_json='{\ "blue": {\ "auto": 70,\ "teleop_goal+foul": 40,\ "assist": 120,\ "truss+catch": 40\ },"red": {\ "auto": 70,\ "teleop_goal+foul": 50,\ "assist": 150,\ "truss+catch": 40}}') self.match2.put() def tearDown(self): self.testbed.deactivate() def getSuggestionForm(self): response = self.testapp.get('/suggest/match/video/review') self.assertEqual(response.status_int, 200) form = response.forms.get('review_videos', None) self.assertIsNotNone(form) return form def testLogInRedirect(self): response = self.testapp.get('/suggest/match/video/review', status='3*') response = response.follow(expect_errors=True) self.assertTrue( response.request.path.startswith("/account/login_required")) def testNoPermissions(self): self.loginUser() response = self.testapp.get('/suggest/match/video/review', status='3*') response = response.follow(expect_errors=True) self.assertEqual(response.request.path, '/') def testNothingToReview(self): self.loginUser() self.givePermission() response = self.testapp.get('/suggest/match/video/review') self.assertEqual(response.status_int, 200) def testAcceptSuggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) # Make sure the video gets associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertTrue('H-54KMwMKY0' in match.youtube_videos) def testAcceptNewKey(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m2') response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) # Make sure the video gets associated match = Match.get_by_id(self.match2.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertTrue('H-54KMwMKY0' in match.youtube_videos) # Make sure we don't add it to the first match match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos) def testAcceptBadKey(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('accept_keys[]', suggestion_id) form.set('key-{}'.format(suggestion_id), '2016necmp_f1m3') # This match doesn't exist response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we don't mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_PENDING) # Make sure the video doesn't get associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertIsNotNone(match.youtube_videos) self.assertFalse('H-54KMwMKY0' in match.youtube_videos) def testRejectSuggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm() form.set('reject_keys[]', suggestion_id) response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_REJECTED) # Make sure the video gets associated match = Match.get_by_id(self.match.key_name) self.assertIsNotNone(match) self.assertFalse(match.youtube_videos)
def get(self, event_key): self._require_admin() event = Event.get_by_id(event_key) if not event: self.abort(404) event.prepAwardsMatchesTeams() reg_sitevar = Sitevar.get_by_id("cmp_registration_hacks") api_keys = ApiAuthAccess.query( ApiAuthAccess.event_list == ndb.Key(Event, event_key)).fetch() event_medias = Media.query(Media.references == event.key).fetch(500) playoff_template = PlayoffAdvancementHelper.getPlayoffTemplate(event) elim_bracket_html = jinja2_engine.render( "bracket_partials/bracket_table.html", { "bracket_table": event.playoff_bracket, "event": event }) advancement_html = jinja2_engine.render( "playoff_partials/{}.html".format(playoff_template), { "event": event, "playoff_advancement": event.playoff_advancement, "playoff_advancement_tiebreakers": PlayoffAdvancementHelper.ROUND_ROBIN_TIEBREAKERS.get( event.year), "bracket_table": event.playoff_bracket }) if playoff_template else "None" self.template_values.update({ "event": event, "medias": event_medias, "cache_key": event_controller.EventDetail('2016nyny').cache_key.format( event.key_name), "flushed": self.request.get("flushed"), "playoff_types": PlayoffType.type_names, "write_auths": api_keys, "event_sync_disable": reg_sitevar and event_key in reg_sitevar.contents.get('divisions_to_skip', []), "set_start_day_to_last": reg_sitevar and event_key in reg_sitevar.contents.get( 'set_start_to_last_day', []), "skip_eventteams": reg_sitevar and event_key in reg_sitevar.contents.get('skip_eventteams', []), "event_name_override": next( iter( filter(lambda e: e.get("event") == event_key, reg_sitevar.contents.get("event_name_override", []))), {}).get("name", ""), "elim_bracket_html": elim_bracket_html, "advancement_html": advancement_html, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/event_details.html') self.response.out.write(template.render(path, self.template_values))
def setUp(self): self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=1) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_urlfetch_stub() self.testbed.init_taskqueue_stub(_all_queues_valid=True) ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/suggest/match/video/review', SuggestMatchVideoReviewController, 'suggest-video', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app) self.event = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.DISTRICT_CMP, event_district_enum=DistrictType.NEW_ENGLAND, short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=False, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json="", website="http://www.firstsv.org", ) self.event.put() self.match = Match(id="2016necmp_f1m1", event=ndb.Key(Event, "2016necmp"), year=2016, comp_level="f", set_number=1, match_number=1, team_key_names=[ 'frc846', 'frc2135', 'frc971', 'frc254', 'frc1678', 'frc973' ], time=datetime.fromtimestamp(1409527874), time_string="4:31 PM", tba_videos=[], alliances_json='{\ "blue": {\ "score": 270,\ "teams": [\ "frc846",\ "frc2135",\ "frc971"]},\ "red": {\ "score": 310,\ "teams": [\ "frc254",\ "frc1678",\ "frc973"]}}', score_breakdown_json='{\ "blue": {\ "auto": 70,\ "teleop_goal+foul": 40,\ "assist": 120,\ "truss+catch": 40\ },"red": {\ "auto": 70,\ "teleop_goal+foul": 50,\ "assist": 150,\ "truss+catch": 40}}') self.match.put() self.match2 = Match(id="2016necmp_f1m2", event=ndb.Key(Event, "2016necmp"), year=2016, comp_level="f", set_number=1, match_number=2, team_key_names=[ 'frc846', 'frc2135', 'frc971', 'frc254', 'frc1678', 'frc973' ], time=datetime.fromtimestamp(1409527874), time_string="4:31 PM", tba_videos=[], alliances_json='{\ "blue": {\ "score": 270,\ "teams": [\ "frc846",\ "frc2135",\ "frc971"]},\ "red": {\ "score": 310,\ "teams": [\ "frc254",\ "frc1678",\ "frc973"]}}', score_breakdown_json='{\ "blue": {\ "auto": 70,\ "teleop_goal+foul": 40,\ "assist": 120,\ "truss+catch": 40\ },"red": {\ "auto": 70,\ "teleop_goal+foul": 50,\ "assist": 150,\ "truss+catch": 40}}') self.match2.put()
class TestSuggestApiWriteController(unittest2.TestCase): def loginUser(self): self.testbed.setup_env(user_email="*****@*****.**", user_id="123", user_is_admin='0', overwrite=True) Account.get_or_insert("123", email="*****@*****.**", registered=True) def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_user_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/request/apiwrite', SuggestApiWriteController, 'request-apiwrite', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app) self.event = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.OFFSEASON, event_district_enum=DistrictType.NEW_ENGLAND, short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=False, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json= "[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org", ) self.event.put() def tearDown(self): self.testbed.deactivate() def getSuggestionForm(self): response = self.testapp.get('/request/apiwrite') self.assertEqual(response.status_int, 200) form = response.forms.get('suggest_apiwrite', None) self.assertIsNotNone(form) return form def testLogInRedirect(self): response = self.testapp.get('/request/apiwrite', status='3*') response = response.follow(expect_errors=True) self.assertTrue( response.request.path.startswith("/account/login_required")) def testSubmitEmptyForm(self): self.loginUser() form = self.getSuggestionForm() response = form.submit().follow() self.assertEqual(response.status_int, 200) # We should throw an error becase no affiliation was set request = response.request self.assertEqual(request.GET.get('status'), 'no_affiliation') def testSuggestApiWrite(self): self.loginUser() form = self.getSuggestionForm() form['event_key'] = '2016necmp' form['role'] = 'Test Code' form.get('auth_types', index=0).checked = True form.get('auth_types', index=1).checked = True response = form.submit().follow() self.assertEqual(response.status_int, 200) # Make sure the Suggestion gets created suggestion = Suggestion.query().fetch()[0] self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_PENDING) self.assertEqual(suggestion.contents['event_key'], '2016necmp') self.assertEqual(suggestion.contents['affiliation'], 'Test Code') self.assertListEqual(suggestion.contents['auth_types'], [AuthType.MATCH_VIDEO, AuthType.EVENT_TEAMS]) # Ensure we show a success message on the page request = response.request self.assertEqual(request.GET.get('status'), 'success') def testNoEvent(self): self.loginUser() form = self.getSuggestionForm() form['event_key'] = '' form['role'] = 'Test Code' response = form.submit().follow() self.assertEqual(response.status_int, 200) request = response.request self.assertEqual(request.GET.get('status'), 'bad_event') def testNonExistentEvent(self): self.loginUser() form = self.getSuggestionForm() form['event_key'] = '2016foobar' form['role'] = 'Test Code' response = form.submit().follow() self.assertEqual(response.status_int, 200) request = response.request self.assertEqual(request.GET.get('status'), 'bad_event')
class TestEventManipulator(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.maxDiff = None with open('test_data/usfirst_html/usfirst_event_rankings_2012ct.html', 'r') as f: good_rankings, _ = UsfirstEventRankingsParser.parse(f.read()) with open( 'test_data/usfirst_html/usfirst_event_rankings_2012ct_bad.html', 'r') as f: bad_rankings, _ = UsfirstEventRankingsParser.parse(f.read()) self.old_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address= "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", rankings_json=json.dumps(good_rankings)) self.new_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address= "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", matchstats_json=json.dumps({ 'oprs': { '4255': 7.4877151786460301, '2643': 27.285682906835952, '852': 10.452538750544525, '4159': 25.820137009871139, '581': 18.513816255143144 } }), facebook_eid="7", webcast_json=json.dumps([{ 'type': 'ustream', 'channel': 'foo' }]), rankings_json=json.dumps(bad_rankings)) def tearDown(self): self.testbed.deactivate() def assertMergedEvent(self, event): self.assertOldEvent(event) self.assertEqual( event.matchstats, { 'oprs': { '4255': 7.4877151786460301, '2643': 27.285682906835952, '852': 10.452538750544525, '4159': 25.820137009871139, '581': 18.513816255143144 } }) self.assertEqual(event.facebook_eid, "7") self.assertEqual(event.webcast[0]['type'], 'ustream') self.assertEqual(event.webcast[0]['channel'], 'foo') self.assertEqual(event.rankings, [[ 'Rank', 'Team', 'QS', 'HP', 'BP', 'TP', 'CP', 'Record (W-L-T)', 'DQ', 'Played' ], [ '1', '2168', '32.00', '147.00', '60.00', '208.00', '14', '9-1-0', '0', '10' ], [ '2', '118', '31.00', '168.00', '90.00', '231.00', '17', '7-3-0', '0', '10' ], [ '3', '177', '30.00', '177.00', '120.00', '151.00', '14', '8-2-0', '0', '10' ], [ '4', '195', '29.00', '116.00', '70.00', '190.00', '16', '6-3-1', '0', '10' ], [ '5', '237', '28.00', '120.00', '60.00', '123.00', '14', '7-3-0', '0', '10' ], [ '6', '1071', '28.00', '115.00', '120.00', '142.00', '10', '9-1-0', '0', '10' ], [ '7', '173', '28.00', '114.00', '110.00', '108.00', '14', '7-3-0', '0', '10' ], [ '8', '1073', '28.00', '110.00', '100.00', '152.00', '11', '8-1-1', '0', '10' ], [ '9', '694', '28.00', '78.00', '100.00', '140.00', '14', '7-3-0', '0', '10' ], [ '10', '558', '27.00', '152.00', '100.00', '145.00', '13', '7-3-0', '0', '10' ], [ '11', '175', '27.00', '141.00', '160.00', '117.00', '13', '7-3-0', '0', '10' ], [ '12', '181', '26.00', '151.00', '70.00', '95.00', '14', '6-4-0', '0', '10' ], [ '13', '176', '26.00', '120.00', '60.00', '90.00', '18', '4-6-0', '0', '10' ], [ '14', '1511', '26.00', '111.00', '80.00', '164.00', '14', '6-4-0', '0', '10' ], [ '15', '126', '26.00', '108.00', '70.00', '165.00', '14', '6-4-0', '0', '10' ], [ '16', '4122', '26.00', '92.00', '100.00', '78.00', '14', '6-4-0', '0', '10' ], [ '17', '869', '25.00', '68.00', '130.00', '75.00', '12', '6-3-1', '0', '10' ], [ '18', '3464', '24.00', '135.00', '80.00', '109.00', '14', '5-5-0', '0', '10' ], [ '19', '3467', '24.00', '101.00', '80.00', '123.00', '10', '7-3-0', '0', '10' ], [ '20', '3718', '24.00', '100.00', '60.00', '106.00', '12', '6-4-0', '0', '10' ], [ '21', '3461', '24.00', '79.00', '30.00', '94.00', '14', '5-5-0', '0', '10' ], [ '22', '4055', '24.00', '78.00', '80.00', '79.00', '16', '4-6-0', '0', '10' ], [ '23', '1922', '23.00', '114.00', '110.00', '151.00', '10', '6-3-1', '0', '10' ], [ '24', '95', '22.00', '120.00', '70.00', '123.00', '14', '4-6-0', '0', '10' ], [ '25', '1991', '22.00', '113.00', '100.00', '58.00', '12', '5-5-0', '0', '10' ], [ '26', '839', '22.00', '96.00', '110.00', '136.00', '10', '6-4-0', '0', '10' ], [ '27', '1099', '21.00', '126.00', '110.00', '97.00', '8', '6-3-1', '0', '10' ], [ '28', '230', '20.00', '143.00', '80.00', '104.00', '8', '6-4-0', '0', '10' ], [ '29', '3017', '20.00', '134.00', '50.00', '88.00', '12', '4-6-0', '0', '10' ], [ '30', '2067', '20.00', '128.00', '80.00', '122.00', '10', '5-5-0', '0', '10' ], [ '31', '250', '20.00', '118.00', '40.00', '99.00', '10', '5-5-0', '0', '10' ], [ '32', '155', '20.00', '100.00', '50.00', '74.00', '12', '4-6-0', '0', '10' ], [ '33', '236', '20.00', '99.00', '20.00', '126.00', '10', '5-5-0', '0', '10' ], [ '34', '1124', '20.00', '92.00', '80.00', '109.00', '8', '6-4-0', '0', '10' ], [ '35', '3146', '20.00', '81.00', '110.00', '81.00', '6', '7-3-0', '0', '10' ], [ '36', '663', '20.00', '71.00', '90.00', '90.00', '12', '4-6-0', '0', '10' ], [ '37', '1699', '20.00', '70.00', '80.00', '139.00', '12', '4-6-0', '0', '10' ], [ '38', '1027', '20.00', '53.00', '70.00', '97.00', '12', '4-6-0', '0', '10' ], [ '39', '20', '19.00', '79.00', '70.00', '106.00', '9', '5-5-0', '0', '10' ], [ '40', '3182', '18.00', '108.00', '60.00', '147.00', '8', '5-5-0', '0', '10' ], [ '41', '229', '18.00', '97.00', '40.00', '153.00', '10', '4-6-0', '0', '10' ], [ '42', '1665', '18.00', '95.00', '120.00', '106.00', '10', '4-6-0', '0', '10' ], [ '43', '228', '18.00', '81.00', '60.00', '163.00', '10', '4-6-0', '0', '10' ], [ '44', '178', '18.00', '81.00', '50.00', '58.00', '12', '3-7-0', '0', '10' ], [ '45', '1740', '18.00', '62.00', '20.00', '99.00', '8', '5-5-0', '0', '10' ], [ '46', '3634', '18.00', '54.00', '30.00', '105.00', '10', '4-6-0', '0', '10' ], [ '47', '2791', '18.00', '53.00', '100.00', '108.00', '10', '4-6-0', '0', '10' ], [ '48', '571', '18.00', '53.00', '70.00', '109.00', '10', '4-6-0', '0', '10' ], [ '49', '2170', '17.00', '89.00', '60.00', '103.00', '9', '4-5-0', '1', '10' ], [ '50', '1493', '16.00', '150.00', '60.00', '132.00', '6', '5-5-0', '0', '10' ], [ '51', '549', '16.00', '129.00', '100.00', '91.00', '6', '5-5-0', '0', '10' ], [ '52', '743', '16.00', '70.00', '30.00', '67.00', '10', '3-7-0', '0', '10' ], [ '53', '2836', '16.00', '64.00', '80.00', '126.00', '8', '4-6-0', '0', '10' ], [ '54', '999', '14.00', '114.00', '20.00', '79.00', '10', '2-8-0', '0', '10' ], [ '55', '3525', '14.00', '109.00', '40.00', '66.00', '6', '4-6-0', '0', '10' ], [ '56', '3104', '14.00', '92.00', '20.00', '80.00', '6', '4-6-0', '0', '10' ], [ '57', '3555', '14.00', '68.00', '60.00', '68.00', '8', '3-7-0', '0', '10' ], [ '58', '4134', '13.00', '96.00', '30.00', '80.00', '6', '3-6-1', '0', '10' ], [ '59', '1559', '12.00', '110.00', '10.00', '94.00', '8', '2-8-0', '0', '10' ], [ '60', '3719', '12.00', '97.00', '60.00', '95.00', '6', '3-7-0', '0', '10' ], [ '61', '3654', '12.00', '59.00', '20.00', '57.00', '8', '2-8-0', '0', '10' ], [ '62', '2785', '12.00', '41.00', '70.00', '96.00', '8', '2-8-0', '0', '10' ], [ '63', '1880', '10.00', '57.00', '40.00', '86.00', '6', '2-8-0', '0', '10' ], [ '64', '1784', '10.00', '44.00', '40.00', '60.00', '6', '2-7-0', '1', '10' ]]) def assertOldEvent(self, event): self.assertEqual(event.key.id(), "2011ct") self.assertEqual(event.name, "Northeast Utilities FIRST Connecticut Regional") self.assertEqual(event.event_type_enum, EventType.REGIONAL) self.assertEqual(event.start_date, datetime.datetime(2011, 3, 31, 0, 0)) self.assertEqual(event.end_date, datetime.datetime(2011, 4, 2, 0, 0)) self.assertEqual(event.year, 2011) self.assertEqual( event.venue_address, "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA" ) self.assertEqual(event.website, "http://www.ctfirst.org/ctr") self.assertEqual(event.event_short, "ct") def test_createOrUpdate(self): EventManipulator.createOrUpdate(self.old_event) self.assertOldEvent(Event.get_by_id("2011ct")) EventManipulator.createOrUpdate(self.new_event) self.assertMergedEvent(Event.get_by_id("2011ct")) def test_findOrSpawn(self): self.old_event.put() self.assertMergedEvent(EventManipulator.findOrSpawn(self.new_event)) def test_updateMerge(self): self.assertMergedEvent( EventManipulator.updateMerge(self.new_event, self.old_event))
class TestSuggestApiWriteReviewController(unittest2.TestCase): def setUp(self): self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=1) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_memcache_stub() self.testbed.init_user_stub() self.testbed.init_mail_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests app = webapp2.WSGIApplication([ RedirectRoute(r'/suggest/apiwrite/review', SuggestApiWriteReviewController, 'review-apiwrite', strict_slash=True), ], debug=True) self.testapp = webtest.TestApp(app) self.event = Event( id="2016necmp", name="New England District Championship", event_type_enum=EventType.OFFSEASON, event_district_enum=DistrictType.NEW_ENGLAND, short_name="New England", event_short="necmp", year=2016, end_date=datetime(2016, 03, 27), official=False, city='Hartford', state_prov='CT', country='USA', venue="Some Venue", venue_address="Some Venue, Hartford, CT, USA", timezone_id="America/New_York", start_date=datetime(2016, 03, 24), webcast_json= "[{\"type\": \"twitch\", \"channel\": \"frcgamesense\"}]", website="http://www.firstsv.org") self.event.put() def tearDown(self): self.testbed.deactivate() def loginUser(self): self.testbed.setup_env(user_email="*****@*****.**", user_id="123", user_is_admin='0', overwrite=True) self.account = Account.get_or_insert("123", email="*****@*****.**", registered=True) def givePermission(self): self.account.permissions.append(AccountPermissions.REVIEW_APIWRITE) self.account.put() def createSuggestion(self): status = SuggestionCreator.createApiWriteSuggestion( self.account.key, '2016necmp', 'Test', [AuthType.EVENT_MATCHES]) self.assertEqual(status, 'success') return Suggestion.query(Suggestion.target_key == '2016necmp').fetch( keys_only=True)[0].id() def getSuggestionForm(self, suggestion_id): response = self.testapp.get('/suggest/apiwrite/review') self.assertEqual(response.status_int, 200) form = response.forms.get('apiwrite_review_{}'.format(suggestion_id), None) self.assertIsNotNone(form) return form def test_login_redirect(self): response = self.testapp.get('/suggest/apiwrite/review', status='3*') response = response.follow(expect_errors=True) self.assertTrue( response.request.path.startswith("/account/login_required")) def test_no_permissions(self): self.loginUser() response = self.testapp.get('/suggest/apiwrite/review', status='3*') response = response.follow(expect_errors=True) self.assertEqual(response.request.path, '/') def test_nothing_to_review(self): self.loginUser() self.givePermission() response = self.testapp.get('/suggest/apiwrite/review') self.assertEqual(response.status_int, 200) # Make sure none of the forms on the page are for suggestions for form_id in response.forms.keys(): self.assertFalse( "{}".format(form_id).startswith('apiwrite_review_')) def test_accespt_suggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm(suggestion_id) response = form.submit('verdict', value='accept').follow() self.assertEqual(response.status_int, 200) # Make sure the ApiWrite object gets created auth = ApiAuthAccess.query().fetch()[0] self.assertIsNotNone(auth) self.assertEqual(auth.owner, self.account.key) self.assertListEqual(auth.event_list, [self.event.key]) self.assertListEqual(auth.auth_types_enum, [AuthType.EVENT_MATCHES]) self.assertIsNotNone(auth.secret) self.assertIsNotNone(auth.expiration) # Make sure we mark the Suggestion as REVIEWED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_ACCEPTED) def test_reject_suggestion(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm(suggestion_id) response = form.submit('verdict', value='reject').follow() self.assertEqual(response.status_int, 200) auths = ApiAuthAccess.query().fetch() self.assertEqual(len(auths), 0) # Make sure we mark the Suggestion as REJECTED suggestion = Suggestion.get_by_id(suggestion_id) self.assertIsNotNone(suggestion) self.assertEqual(suggestion.review_state, Suggestion.REVIEW_REJECTED) def test_existing_auth_keys(self): self.loginUser() self.givePermission() existing_auth = ApiAuthAccess(id='tEsT_id_0', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2016necmp')], auth_types_enum=[AuthType.EVENT_TEAMS]) existing_auth.put() suggestion_id = self.createSuggestion() form = self.getSuggestionForm(suggestion_id) response = form.submit('verdict', value='accept').follow() self.assertEqual(response.status_int, 200) auths = ApiAuthAccess.query().fetch() self.assertTrue(len(auths), 2) def test_accept_suggestion_with_different_auth_types(self): self.loginUser() self.givePermission() suggestion_id = self.createSuggestion() form = self.getSuggestionForm(suggestion_id) form.get('auth_types', index=0).checked = True # MATCH_VIDEO form.get('auth_types', index=1).checked = True # EVENT_TEAMS form.get('auth_types', index=2).checked = False # EVENT_MATCHES response = form.submit('verdict', value='accept').follow() self.assertEqual(response.status_int, 200) # Make sure the ApiWrite object gets created auth = ApiAuthAccess.query().fetch()[0] self.assertIsNotNone(auth) self.assertEqual(auth.owner, self.account.key) self.assertListEqual(auth.event_list, [self.event.key]) self.assertSetEqual(set(auth.auth_types_enum), {AuthType.EVENT_TEAMS, AuthType.MATCH_VIDEO}) self.assertIsNotNone(auth.secret) self.assertIsNotNone(auth.expiration)
def get(self, event_key=None): self._require_registration() if event_key is None: events = EventHelper.getEventsWithinADay() EventHelper.sort_events(events) self.template_values['events'] = events self.response.out.write(jinja2_engine.render('mytba_add_hot_matches_base.html', self.template_values)) return event = Event.get_by_id(event_key) if not event: self.abort(404) subscriptions_future = Subscription.query( Subscription.model_type==ModelType.MATCH, Subscription.notification_types==NotificationType.UPCOMING_MATCH, ancestor=self.user_bundle.account.key).fetch_async(projection=[Subscription.model_key]) matches = [] if event.details and event.details.predictions and event.details.predictions['match_predictions']: match_predictions = dict( event.details.predictions['match_predictions']['qual'].items() + event.details.predictions['match_predictions']['playoff'].items()) max_hotness = 0 min_hotness = float('inf') for match in event.matches: if not match.has_been_played and match.key.id() in match_predictions: prediction = match_predictions[match.key.id()] red_score = prediction['red']['score'] blue_score = prediction['blue']['score'] if red_score > blue_score: winner_score = red_score loser_score = blue_score else: winner_score = blue_score loser_score = red_score hotness = winner_score + 2.0*loser_score # Favor close high scoring matches max_hotness = max(max_hotness, hotness) min_hotness = min(min_hotness, hotness) match.hotness = hotness matches.append(match) existing_subscriptions = set() for sub in subscriptions_future.get_result(): existing_subscriptions.add(sub.model_key) hot_matches = [] for match in matches: match.hotness = 100 * (match.hotness - min_hotness) / (max_hotness - min_hotness) match.already_subscribed = match.key.id() in existing_subscriptions hot_matches.append(match) hot_matches = sorted(hot_matches, key=lambda match: -match.hotness) matches_dict = {'qm': hot_matches[:25]} self.template_values['event'] = event self.template_values['matches'] = matches_dict self.response.out.write(jinja2_engine.render('mytba_add_hot_matches.html', self.template_values))
def post(self, event_key_id): self._require_admin() event = Event.get_by_id(event_key_id) event.prepAwardsMatchesTeams() remap_teams = {} for key, value in json.loads(self.request.get('remap_teams')).items(): remap_teams['frc{}'.format(key)] = 'frc{}'.format(value) # Remap matches for match in event.matches: for old_team, new_team in remap_teams.items(): # Update team key names for i, key in enumerate(match.team_key_names): if key == old_team: match.dirty = True if new_team.isdigit(): # Only if non "B" teams match.team_key_names[i] = new_team else: del match.team_key_names[i] # Update alliances for color in ['red', 'blue']: for i, key in enumerate(match.alliances[color]['teams']): if key == old_team: match.dirty = True match.alliances[color]['teams'][i] = new_team match.alliances_json = json.dumps(match.alliances) MatchManipulator.createOrUpdate(event.matches) # Remap alliance selections if event.alliance_selections: for row in event.alliance_selections: for choice in ['picks', 'declines']: for old_team, new_team in remap_teams.items(): for i, key in enumerate(row[choice]): if key == old_team: row[choice][i] = new_team # Remap rankings if event.rankings: for row in event.rankings: for old_team, new_team in remap_teams.items(): if row[1] == old_team[3:]: row[1] = new_team[3:] EventDetailsManipulator.createOrUpdate(event.details) # Remap awards for award in event.awards: for old_team, new_team in remap_teams.items(): # Update team keys for i, key in enumerate(award.team_list): if key.id() == old_team: award.dirty = True if new_team.isdigit(): # Only if non "B" teams award.team_list[i] = ndb.Key(Team, new_team) else: del award.team_list[i] # Update recipient list for recipient in award.recipient_list: if str(recipient['team_number']) == old_team[3:]: award.dirty = True recipient['team_number'] = new_team[3:] award.recipient_json_list = [json.dumps(r) for r in award.recipient_list] AwardManipulator.createOrUpdate(event.awards, auto_union=False) self.redirect("/admin/event/" + event.key_name)
def test_createOrUpdate(self): EventManipulator.createOrUpdate(self.old_event) self.assertOldEvent(Event.get_by_id("2011ct")) EventManipulator.createOrUpdate(self.new_event) self.assertMergedEvent(Event.get_by_id("2011ct"))
def _query_async(self): district_key = self._query_args[0] events = yield Event.query(Event.district_key == ndb.Key( District, district_key)).fetch_async() raise ndb.Return(events)
class TestEventManipulator(unittest2.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_taskqueue_stub(root_path=".") self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.maxDiff = None self.old_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, event_district_enum=DistrictType.NO_DISTRICT, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address="Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", ) self.new_event = Event( id="2011ct", end_date=datetime.datetime(2011, 4, 2, 0, 0), event_short="ct", event_type_enum=EventType.REGIONAL, event_district_enum=DistrictType.NO_DISTRICT, first_eid="5561", name="Northeast Utilities FIRST Connecticut Regional", start_date=datetime.datetime(2011, 3, 31, 0, 0), year=2011, venue_address="Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA", website="http://www.ctfirst.org/ctr", facebook_eid="7", webcast_json=json.dumps([{'type': 'ustream', 'channel': 'foo'}]), ) def tearDown(self): self.testbed.deactivate() def assertMergedEvent(self, event): self.assertOldEvent(event) self.assertEqual(event.facebook_eid, "7") self.assertEqual(event.webcast[0]['type'], 'ustream') self.assertEqual(event.webcast[0]['channel'], 'foo') def assertOldEvent(self, event): self.assertEqual(event.key.id(), "2011ct") self.assertEqual(event.name, "Northeast Utilities FIRST Connecticut Regional") self.assertEqual(event.event_type_enum, EventType.REGIONAL) self.assertEqual(event.event_district_enum, DistrictType.NO_DISTRICT) self.assertEqual(event.start_date, datetime.datetime(2011, 3, 31, 0, 0)) self.assertEqual(event.end_date, datetime.datetime(2011, 4, 2, 0, 0)) self.assertEqual(event.year, 2011) self.assertEqual(event.venue_address, "Connecticut Convention Center\r\n100 Columbus Blvd\r\nHartford, CT 06103\r\nUSA") self.assertEqual(event.website, "http://www.ctfirst.org/ctr") self.assertEqual(event.event_short, "ct") def test_createOrUpdate(self): EventManipulator.createOrUpdate(self.old_event) self.assertOldEvent(Event.get_by_id("2011ct")) EventManipulator.createOrUpdate(self.new_event) self.assertMergedEvent(Event.get_by_id("2011ct")) def test_findOrSpawn(self): self.old_event.put() self.assertMergedEvent(EventManipulator.findOrSpawn(self.new_event)) def test_updateMerge(self): self.assertMergedEvent(EventManipulator.updateMerge(self.new_event, self.old_event))
def _query_async(self): event_key = self._query_args[0] event = yield Event.get_by_id_async(event_key) raise ndb.Return(event)
def get(self): self._require_registration() user = self.user_bundle.account.key favorites = Favorite.query(ancestor=user).fetch() subscriptions = Subscription.query(ancestor=user).fetch() team_keys = set() team_fav = {} team_subs = {} event_keys = set() event_fav = {} event_subs = {} events = [] match_keys = set() match_event_keys = set() match_fav = {} match_subs = {} for item in favorites + subscriptions: if item.model_type == ModelType.TEAM: team_keys.add(ndb.Key(Team, item.model_key)) if type(item) == Favorite: team_fav[item.model_key] = item elif type(item) == Subscription: team_subs[item.model_key] = item elif item.model_type == ModelType.MATCH: match_keys.add(ndb.Key(Match, item.model_key)) match_event_keys.add(ndb.Key(Event, item.model_key.split('_')[0])) if type(item) == Favorite: match_fav[item.model_key] = item elif type(item) == Subscription: match_subs[item.model_key] = item elif item.model_type == ModelType.EVENT: if item.model_key.endswith('*'): # All year events wildcard event_year = int(item.model_key[:-1]) events.append(Event( # add fake event for rendering id=item.model_key, short_name='ALL EVENTS', event_short=item.model_key, year=event_year, start_date=datetime.datetime(event_year, 1, 1), end_date=datetime.datetime(event_year, 1, 1) )) else: event_keys.add(ndb.Key(Event, item.model_key)) if type(item) == Favorite: event_fav[item.model_key] = item elif type(item) == Subscription: event_subs[item.model_key] = item team_futures = ndb.get_multi_async(team_keys) event_futures = ndb.get_multi_async(event_keys) match_futures = ndb.get_multi_async(match_keys) match_event_futures = ndb.get_multi_async(match_event_keys) teams = sorted([team_future.get_result() for team_future in team_futures], key=lambda x: x.team_number) team_fav_subs = [] for team in teams: fav = team_fav.get(team.key.id(), None) subs = team_subs.get(team.key.id(), None) team_fav_subs.append((team, fav, subs)) events += [event_future.get_result() for event_future in event_futures] EventHelper.sort_events(events) event_fav_subs = [] for event in events: fav = event_fav.get(event.key.id(), None) subs = event_subs.get(event.key.id(), None) event_fav_subs.append((event, fav, subs)) matches = [match_future.get_result() for match_future in match_futures] match_events = [match_event_future.get_result() for match_event_future in match_event_futures] MatchHelper.natural_sort_matches(matches) match_fav_subs_by_event = {} for event in match_events: match_fav_subs_by_event[event.key.id()] = (event, []) for match in matches: event_key = match.key.id().split('_')[0] fav = match_fav.get(match.key.id(), None) subs = match_subs.get(match.key.id(), None) match_fav_subs_by_event[event_key][1].append((match, fav, subs)) event_match_fav_subs = sorted(match_fav_subs_by_event.values(), key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0])) event_match_fav_subs = sorted(event_match_fav_subs, key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0])) self.template_values['team_fav_subs'] = team_fav_subs self.template_values['event_fav_subs'] = event_fav_subs self.template_values['event_match_fav_subs'] = event_match_fav_subs self.template_values['status'] = self.request.get('status') self.template_values['year'] = datetime.datetime.now().year self.response.out.write(jinja2_engine.render('mytba.html', self.template_values))
class TestApiTrustedController(unittest2.TestCase): def setUp(self): self.testapp = webtest.TestApp(api_main.app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(root_path=".") self.teams_auth = ApiAuthAccess( id='tEsT_id_0', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.EVENT_TEAMS]) self.matches_auth = ApiAuthAccess( id='tEsT_id_1', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.EVENT_MATCHES]) self.rankings_auth = ApiAuthAccess( id='tEsT_id_2', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.EVENT_RANKINGS]) self.alliances_auth = ApiAuthAccess( id='tEsT_id_3', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.EVENT_ALLIANCES]) self.awards_auth = ApiAuthAccess( id='tEsT_id_4', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.EVENT_AWARDS]) self.video_auth = ApiAuthAccess( id='tEsT_id_5', secret='321tEsTsEcReT', description='test', event_list=[ndb.Key(Event, '2014casj')], auth_types_enum=[AuthType.MATCH_VIDEO]) self.event = Event( id='2014casj', event_type_enum=EventType.REGIONAL, event_short='casj', year=2014, ) self.event.put() def tearDown(self): self.testbed.deactivate() def test_auth(self): request_path = '/api/trusted/v1/event/2014casj/matches/update' # Fail response = self.testapp.post(request_path, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) # Fail request_body = json.dumps([]) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) self.rankings_auth.put() self.matches_auth.put() # Pass sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) # Fail; bad X-TBA-Auth-Id sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'badTestAuthId', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) # Fail; bad sig response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': '123abc' }, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) # Fail; bad sig due to wrong body body2 = json.dumps([{}]) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, body2, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) # Fail; bad event request_path2 = '/api/trusted/v1/event/2014cama/matches/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path2, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) self.assertTrue('Error' in response.json) # Fail; insufficient auth_types_enum sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_2', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) def test_alliance_selections_update(self): self.alliances_auth.put() alliances = [['frc971', 'frc254', 'frc1662'], ['frc1678', 'frc368', 'frc4171'], ['frc2035', 'frc192', 'frc4990'], ['frc1323', 'frc846', 'frc2135'], ['frc2144', 'frc1388', 'frc668'], ['frc1280', 'frc604', 'frc100'], ['frc114', 'frc852', 'frc841'], ['frc2473', 'frc3256', 'frc1868']] request_body = json.dumps(alliances) request_path = '/api/trusted/v1/event/2014casj/alliance_selections/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_3', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) for i, selection in enumerate(self.event.alliance_selections): self.assertEqual(alliances[i], selection['picks']) def test_awards_update(self): self.awards_auth.put() awards = [{ 'name_str': 'Winner', 'team_key': 'frc254' }, { 'name_str': 'Winner', 'team_key': 'frc604' }, { 'name_str': 'Volunteer Blahblah', 'team_key': 'frc1', 'awardee': 'Bob Bobby' }] request_body = json.dumps(awards) request_path = '/api/trusted/v1/event/2014casj/awards/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_4', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_awards = Award.query(Award.event == self.event.key).fetch(None) self.assertEqual(len(db_awards), 2) self.assertTrue('2014casj_1' in [a.key.id() for a in db_awards]) self.assertTrue('2014casj_5' in [a.key.id() for a in db_awards]) awards = [{ 'name_str': 'Winner', 'team_key': 'frc254' }, { 'name_str': 'Winner', 'team_key': 'frc604' }] request_body = json.dumps(awards) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_4', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_awards = Award.query(Award.event == self.event.key).fetch(None) self.assertEqual(len(db_awards), 1) self.assertTrue('2014casj_1' in [a.key.id() for a in db_awards]) def test_matches_update(self): self.matches_auth.put() update_request_path = '/api/trusted/v1/event/2014casj/matches/update' delete_request_path = '/api/trusted/v1/event/2014casj/matches/delete' delete_all_request_path = '/api/trusted/v1/event/2014casj/matches/delete_all' # add one match matches = [{ 'comp_level': 'qm', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 25 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 26 }, }, 'time_string': '9:00 AM', 'time_utc': '2014-08-31T16:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 1) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) # add another match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 1, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 250 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 260 }, }, 'time_string': '10:00 AM', 'time_utc': '2014-08-31T17:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_qm1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) # add a match and delete a match matches = [{ 'comp_level': 'f', 'set_number': 1, 'match_number': 2, 'alliances': { 'red': { 'teams': ['frc1', 'frc2', 'frc3'], 'score': 250 }, 'blue': { 'teams': ['frc4', 'frc5', 'frc6'], 'score': 260 }, }, 'score_breakdown': { 'red': { 'auto': 20, 'assist': 40, 'truss+catch': 20, 'teleop_goal+foul': 20 }, 'blue': { 'auto': 40, 'assist': 60, 'truss+catch': 10, 'teleop_goal+foul': 40 }, }, 'time_string': '11:00 AM', 'time_utc': '2014-08-31T18:00:00', }] request_body = json.dumps(matches) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', update_request_path, request_body)).hexdigest() response = self.testapp.post(update_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) keys_to_delete = ['qm1'] request_body = json.dumps(keys_to_delete) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_request_path, request_body)).hexdigest() response = self.testapp.post(delete_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.json['keys_deleted'], ['qm1']) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 2) self.assertTrue('2014casj_f1m1' in [m.key.id() for m in db_matches]) self.assertTrue('2014casj_f1m2' in [m.key.id() for m in db_matches]) # verify match data match = Match.get_by_id('2014casj_f1m2') self.assertEqual(match.time, datetime.datetime(2014, 8, 31, 18, 0)) self.assertEqual(match.time_string, '11:00 AM') self.assertEqual(match.alliances['red']['score'], 250) self.assertEqual(match.score_breakdown['red']['truss+catch'], 20) # test delete all matches request_body = '' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_all_request_path, request_body)).hexdigest() response = self.testapp.post(delete_all_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 400) request_body = '2014casj' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', delete_all_request_path, request_body)).hexdigest() response = self.testapp.post(delete_all_request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_1', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_matches = Match.query(Match.event == self.event.key).fetch(None) self.assertEqual(len(db_matches), 0) def test_rankings_update(self): self.rankings_auth.put() rankings = { 'breakdowns': ['QS', 'Auton', 'Teleop', 'T&C'], 'rankings': [{ 'team_key': 'frc254', 'rank': 1, 'played': 10, 'dqs': 0, 'QS': 20, 'Auton': 500, 'Teleop': 500, 'T&C': 200 }, { 'team_key': 'frc971', 'rank': 2, 'played': 10, 'dqs': 0, 'QS': 20, 'Auton': 500, 'Teleop': 500, 'T&C': 200 }], } request_body = json.dumps(rankings) request_path = '/api/trusted/v1/event/2014casj/rankings/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_2', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual( self.event.rankings[0], ['Rank', 'Team', 'QS', 'Auton', 'Teleop', 'T&C', 'DQ', 'Played']) self.assertEqual(self.event.rankings[1], [1, '254', 20, 500, 500, 200, 0, 10]) def test_rankings_wlt_update(self): self.rankings_auth.put() rankings = { 'breakdowns': ['QS', 'Auton', 'Teleop', 'T&C', 'wins', 'losses', 'ties'], 'rankings': [{ 'team_key': 'frc254', 'rank': 1, 'wins': 10, 'losses': 0, 'ties': 0, 'played': 10, 'dqs': 0, 'QS': 20, 'Auton': 500, 'Teleop': 500, 'T&C': 200 }, { 'team_key': 'frc971', 'rank': 2, 'wins': 10, 'losses': 0, 'ties': 0, 'played': 10, 'dqs': 0, 'QS': 20, 'Auton': 500, 'Teleop': 500, 'T&C': 200 }], } request_body = json.dumps(rankings) request_path = '/api/trusted/v1/event/2014casj/rankings/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_2', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(self.event.rankings[0], [ 'Rank', 'Team', 'QS', 'Auton', 'Teleop', 'T&C', 'Record (W-L-T)', 'DQ', 'Played' ]) self.assertEqual(self.event.rankings[1], [1, '254', 20, 500, 500, 200, '10-0-0', 0, 10]) def test_eventteams_update(self): self.teams_auth.put() team_list = ['frc254', 'frc971', 'frc604'] request_body = json.dumps(team_list) # Insert teams into db, otherwise they won't get added (see 072058b) Team(id='frc254', team_number=254).put() Team(id='frc971', team_number=971).put() Team(id='frc604', team_number=604).put() Team(id='frc100', team_number=100).put() request_path = '/api/trusted/v1/event/2014casj/team_list/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query( EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 3) self.assertTrue( '2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc971' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc604' in [et.key.id() for et in db_eventteams]) team_list = ['frc254', 'frc100'] request_body = json.dumps(team_list) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query( EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 2) self.assertTrue( '2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc100' in [et.key.id() for et in db_eventteams]) def test_eventteams_unknown(self): self.teams_auth.put() team_list = ['frc254', 'frc971', 'frc604'] request_body = json.dumps(team_list) # Insert teams into db, otherwise they won't get added (see 072058b) Team(id='frc254', team_number=254).put() Team(id='frc971', team_number=971).put() request_path = '/api/trusted/v1/event/2014casj/team_list/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query( EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 2) self.assertTrue( '2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc971' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc604' not in [et.key.id() for et in db_eventteams]) team_list = ['frc254', 'frc100'] request_body = json.dumps(team_list) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query( EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 1) self.assertTrue( '2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue( '2014casj_frc100' not in [et.key.id() for et in db_eventteams]) def test_match_videos_add(self): self.video_auth.put() match1 = Match( id="2014casj_qm1", alliances_json= """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="qm", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[ u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073' ], youtube_videos=["abcdef"]) match1.put() match2 = Match( id="2014casj_sf1m1", alliances_json= """{"blue": {"score": -1, "teams": ["frc3464", "frc20", "frc1073"]}, "red": {"score": -1, "teams": ["frc69", "frc571", "frc176"]}}""", comp_level="sf", event=ndb.Key(Event, '2014casj'), year=2014, set_number=1, match_number=1, team_key_names=[ u'frc69', u'frc571', u'frc176', u'frc3464', u'frc20', u'frc1073' ], ) match2.put() match_videos = {'qm1': 'aFZy8iibMD0', 'sf1m1': 'RpSgUrsghv4'} request_body = json.dumps(match_videos) request_path = '/api/trusted/v1/event/2014casj/match_videos/add' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={ 'X-TBA-Auth-Id': 'tEsT_id_5', 'X-TBA-Auth-Sig': sig }, expect_errors=True) self.assertEqual(response.status_code, 200) self.assertEqual(set(Match.get_by_id('2014casj_qm1').youtube_videos), {'abcdef', 'aFZy8iibMD0'}) self.assertEqual(set(Match.get_by_id('2014casj_sf1m1').youtube_videos), {'RpSgUrsghv4'})
def get_event(): """Event all event in time range.""" json_data = request.args or {} return make_response(jsonify({"data": Event.get_events(json_data)}))
def post(self, event_key): event_key = event_key.lower( ) # Normalize keys to lower case (TBA convention) # Make sure we are processing for a valid event first # (it's fine to do this before auth, since leaking the existence of an # event isn't really that big a deal) self.event = Event.get_by_id(event_key) if not self.event: self._errors = json.dumps( {"Error": "Event {} not found".format(event_key)}) self.abort(404) # Start by allowing admins to edit any event user_is_admin = (self._user_bundle.user and self._user_bundle.is_current_user_admin) # Also grant access if the user as the EVENTWIZARD permission and this # is a current year offseason event account = self._user_bundle.account current_year = datetime.datetime.now().year user_has_permission = (self.event.event_type_enum == EventType.OFFSEASON and self.event.year == current_year and account is not None and AccountPermissions.OFFSEASON_EVENTWIZARD in account.permissions) user_has_auth = (user_is_admin or user_has_permission) if not user_has_auth and self._user_bundle.user: # See if this user has any auth keys granted to its account now = datetime.datetime.now() auth_tokens = ApiAuthAccess.query( ApiAuthAccess.owner == account.key, ApiAuthAccess.event_list == ndb.Key(Event, event_key), ndb.OR(ApiAuthAccess.expiration == None, ApiAuthAccess.expiration >= now)).fetch() user_has_auth = any( self._validate_auth(auth, event_key) is None for auth in auth_tokens) if not user_has_auth: # If not, check if auth id/secret were passed as headers auth_id = self.request.headers.get('X-TBA-Auth-Id') if not auth_id: self._errors = json.dumps({ "Error": "Must provide a request header parameter 'X-TBA-Auth-Id'" }) self.abort(400) auth_sig = self.request.headers.get('X-TBA-Auth-Sig') if not auth_sig: self._errors = json.dumps({ "Error": "Must provide a request header parameter 'X-TBA-Auth-Sig'" }) self.abort(400) auth = ApiAuthAccess.get_by_id(auth_id) expected_sig = md5.new('{}{}{}'.format( auth.secret if auth else None, self.request.path, self.request.body)).hexdigest() if not auth or expected_sig != auth_sig: logging.info("Auth sig: {}, Expected sig: {}".format( auth_sig, expected_sig)) self._errors = json.dumps( {"Error": "Invalid X-TBA-Auth-Id and/or X-TBA-Auth-Sig!"}) self.abort(401) # Checks event key is valid, correct auth types, and expiration error = self._validate_auth(auth, event_key) if error: self._errors = json.dumps({"Error": error}) self.abort(401) try: self._process_request(self.request, event_key) except ParserInputException, e: self._errors = json.dumps({"Error": e.message}) self.abort(400)
def get(self, event_key): df = DatafeedFMSAPI('v2.0') df2 = DatafeedFIRSTElasticSearch() event = Event.get_by_id(event_key) # Update event updated_event = df2.getEventDetails(event) if updated_event: event = EventManipulator.createOrUpdate(updated_event) models = df.getEventTeams(event_key) teams = [] district_teams = [] robots = [] for group in models: # models is a list of tuples (team, districtTeam, robot) if isinstance(group[0], Team): teams.append(group[0]) if isinstance(group[1], DistrictTeam): district_teams.append(group[1]) if isinstance(group[2], Robot): robots.append(group[2]) # Merge teams teams = TeamManipulator.mergeModels(teams, df2.getEventTeams(event)) # Write new models if teams: teams = TeamManipulator.createOrUpdate(teams) district_teams = DistrictTeamManipulator.createOrUpdate(district_teams) robots = RobotManipulator.createOrUpdate(robots) if not teams: # No teams found registered for this event teams = [] if type(teams) is not list: teams = [teams] # Build EventTeams event_teams = [EventTeam( id=event.key_name + "_" + team.key_name, event=event.key, team=team.key, year=event.year) for team in teams] # Delete eventteams of teams that are no longer registered if event_teams != []: existing_event_team_keys = set(EventTeam.query(EventTeam.event == event.key).fetch(1000, keys_only=True)) event_team_keys = set([et.key for et in event_teams]) et_keys_to_delete = existing_event_team_keys.difference(event_team_keys) EventTeamManipulator.delete_keys(et_keys_to_delete) event_teams = EventTeamManipulator.createOrUpdate(event_teams) if type(event_teams) is not list: event_teams = [event_teams] template_values = { 'event': event, 'event_teams': event_teams, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_event_details_get.html') self.response.out.write(template.render(path, template_values))
def generate_report(params, data): log = logging.getLogger('task') error = False term = Term.query.filter_by(hard_id=params['term_id']).first() if not term: log.error('Not found term %s' % params['term_id']) return False event = Event.get_by_key(data['event_key']) if not event: log.error('Not found event %s' % data['event_key']) return False firm_terms = FirmTerm.query.filter_by(term_id=term.id).all() payments = data['payments'] report_max_date = '' for payment in payments: report = Report() report.term_id = term.id report.event_id = event.id report.type = payment['type'] report.payment_id = None report.amount = payment['amount'] * int(term.factor) real_person = None for row in firm_terms: report.term_firm_id = row.firm_id query = Person.query query = query.filter((Person.payment_id == payment['card']) | ( Person.hard_id == int(payment['card']))) person = query.filter( Person.firm_id == row.child_firm_id).first() if not person: continue real_person = person if real_person: report.name = real_person.name report.person_id = real_person.id report.person_firm_id = real_person.firm_id report.payment_id = real_person.payment_id else: report.payment_id = payment['card'] date_pattern = '%Y-%m-%d %H:%M:%S' date_time_utc = date_helper.convert_date_to_utc( payment['date_time'], term.tz, date_pattern, date_pattern) report.creation_date = date_time_utc if report.creation_date > report_max_date: report_max_date = report.creation_date error = report.add_new() if not error: ReportSenderTask.lost_report_watcher.delay(term.id, report_max_date) return error
async def broadcast_event(self, event: Event): for websocket in self._websockets: await websocket.send_json(event.as_dict())
def _render(self, event_key): event = Event.get_by_id(event_key) if not event or event.year != 2016: self.abort(404) event.get_matches_async() match_predictions = event.details.predictions.get( 'match_predictions', None) match_prediction_stats = event.details.predictions.get( 'match_prediction_stats', None) ranking_predictions = event.details.predictions.get( 'ranking_predictions', None) ranking_prediction_stats = event.details.predictions.get( 'ranking_prediction_stats', None) cleaned_matches = MatchHelper.deleteInvalidMatches(event.matches) matches = MatchHelper.organizeMatches(cleaned_matches) # If no matches but there are match predictions, create fake matches # For cases where FIRST doesn't allow posting of match schedule fake_matches = False if not matches['qm'] and match_predictions: fake_matches = True for i in xrange(len(match_predictions.keys())): match_number = i + 1 alliances = { 'red': { 'score': -1, 'teams': ['frc?', 'frc?', 'frc?'] }, 'blue': { 'score': -1, 'teams': ['frc?', 'frc?', 'frc?'] } } matches['qm'].append( Match( id=Match.renderKeyName(event_key, 'qm', 1, match_number), event=event.key, year=event.year, set_number=1, match_number=match_number, comp_level='qm', alliances_json=json.dumps(alliances), )) last_played_match_num = None if ranking_prediction_stats: last_played_match_key = ranking_prediction_stats.get( 'last_played_match', None) if last_played_match_key: last_played_match_num = last_played_match_key.split('_qm')[1] self.template_values.update({ "event": event, "matches": matches, "fake_matches": fake_matches, "match_predictions": match_predictions, "match_prediction_stats": match_prediction_stats, "ranking_predictions": ranking_predictions, "ranking_prediction_stats": ranking_prediction_stats, "last_played_match_num": last_played_match_num, }) if event.within_a_day: self._cache_expiration = self.SHORT_CACHE_EXPIRATION return jinja2_engine.render('event_insights.html', self.template_values)
def post(self, event_key): self._require_admin() # Note, we don't actually use event_key. start_date = None if self.request.get("start_date"): start_date = datetime.strptime(self.request.get("start_date"), "%Y-%m-%d") end_date = None if self.request.get("end_date"): end_date = datetime.strptime(self.request.get("end_date"), "%Y-%m-%d") first_code = self.request.get("first_code", None) district_key = self.request.get("event_district_key", None) parent_key = self.request.get("parent_event", None) division_key_names = json.loads( self.request.get('divisions'), '[]') if self.request.get('divisions') else [] division_keys = [ndb.Key(Event, key) for key in division_key_names ] if division_key_names else [] website = WebsiteHelper.format_url(self.request.get("website")) event = Event( id=str(self.request.get("year")) + str.lower(str(self.request.get("event_short"))), end_date=end_date, event_short=self.request.get("event_short"), first_code=first_code if first_code and first_code != 'None' else None, event_type_enum=int(self.request.get("event_type")) if self.request.get('event_type') else EventType.UNLABLED, district_key=ndb.Key(District, self.request.get("event_district_key")) if district_key and district_key != 'None' else None, venue=self.request.get("venue"), venue_address=self.request.get("venue_address"), city=self.request.get("city"), state_prov=self.request.get("state_prov"), postalcode=self.request.get("postalcode"), country=self.request.get("country"), name=self.request.get("name"), short_name=self.request.get("short_name"), start_date=start_date, website=website, year=int(self.request.get("year")), official={ "true": True, "false": False }.get(self.request.get("official").lower()), enable_predictions={ "true": True, "false": False }.get(self.request.get("enable_predictions").lower()), facebook_eid=self.request.get("facebook_eid"), custom_hashtag=self.request.get("custom_hashtag"), webcast_json=self.request.get("webcast_json"), playoff_type=int(self.request.get("playoff_type")) if self.request.get('playoff_type') else PlayoffType.BRACKET_8_TEAM, parent_event=ndb.Key(Event, parent_key) if parent_key and parent_key.lower() != 'none' else None, divisions=division_keys, ) event = EventManipulator.createOrUpdate(event) if self.request.get("alliance_selections_json") or self.request.get( "rankings_json"): event_details = EventDetails( id=event_key, alliance_selections=json.loads( self.request.get("alliance_selections_json")), rankings=json.loads(self.request.get("rankings_json"))) EventDetailsManipulator.createOrUpdate(event_details) MemcacheWebcastFlusher.flushEvent(event.key_name) self.redirect("/admin/event/" + event.key_name)
def test_group_by_week(self): """ All events that start in the same span of Wednesday-Tuesday should be considered as being in the same week. """ events_by_week = {} # we will use this as the "answer key" # Generate random regional events seed = int(time.time()) state = random.Random() state.seed(seed) event_id_counter = 0 week_start = datetime.datetime(2013, 2, 27) for i in range(1, 7): # test for 6 weeks for _ in range(state.randint(1, 15)): # random number of events per week week_label = 'Week {}'.format(i) start_date = week_start + datetime.timedelta(days=state.randint(0, 6)) end_date = start_date + datetime.timedelta(days=state.randint(0, 3)) event = Event( id='2013tst{}'.format(event_id_counter), event_short='tst{}'.format(event_id_counter), start_date=start_date, end_date=end_date, year=2013, official=True, event_type_enum=state.choice([EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP]) ) if week_label in events_by_week: events_by_week[week_label].append(event) else: events_by_week[week_label] = [event] event_id_counter += 1 week_start = week_start + datetime.timedelta(days=7) # Generate Championship events week_start += datetime.timedelta(days=7) events_by_week[CHAMPIONSHIP_EVENTS_LABEL] = [ Event( id='2013arc'.format(event_id_counter), event_short='arc', start_date=week_start, end_date=week_start + datetime.timedelta(days=2), year=2013, official=True, event_type_enum=EventType.CMP_DIVISION ), Event( id='2013gal'.format(event_id_counter), event_short='gal', start_date=week_start, end_date=week_start + datetime.timedelta(days=2), year=2013, official=True, event_type_enum=EventType.CMP_DIVISION ), Event( id='2013cmp'.format(event_id_counter), event_short='cmp', start_date=week_start + datetime.timedelta(days=2), end_date=week_start + datetime.timedelta(days=2), year=2013, official=True, event_type_enum=EventType.CMP_FINALS ) ] # Generate official events with no dates events_by_week[WEEKLESS_EVENTS_LABEL] = [ Event( id='2013weekless1'.format(event_id_counter), event_short='weekless1', year=2013, official=True, event_type_enum=state.choice([EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP]) ), Event( id='2013weekless2'.format(event_id_counter), event_short='weekless2', year=2013, official=True, event_type_enum=state.choice([EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP]) ), Event( id='2013weekless3'.format(event_id_counter), event_short='weekless3', year=2013, official=True, event_type_enum=state.choice([EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP]) ), Event( id='2013weekless4'.format(event_id_counter), event_short='weekless4', start_date=datetime.datetime(2013, 12, 31), end_date=datetime.datetime(2013, 12, 31), year=2013, official=True, event_type_enum=state.choice([EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP]) ) ] # Generate preseason events events_by_week[PRESEASON_EVENTS_LABEL] = [ Event( id='2013preseason1'.format(event_id_counter), event_short='preseason1', year=2013, official=False, event_type_enum=EventType.PRESEASON ), Event( id='2013preseason2'.format(event_id_counter), event_short='preseason2', start_date=datetime.datetime(2013, 1, 18), end_date=datetime.datetime(2013, 1, 20), year=2013, official=False, event_type_enum=EventType.PRESEASON ), Event( id='2013preseason3'.format(event_id_counter), event_short='preseason3', start_date=datetime.datetime(2013, 7, 11), end_date=datetime.datetime(2013, 7, 12), year=2013, official=False, event_type_enum=EventType.PRESEASON ) ] # Generate offseason events. Offseason events are any event that doesn't fall under one of the above categories. events_by_week[OFFSEASON_EVENTS_LABEL] = [ Event( id='2013offseason1'.format(event_id_counter), event_short='offseason1', year=2013, official=False, event_type_enum=EventType.OFFSEASON ), Event( id='2013offseason2'.format(event_id_counter), event_short='offseason2', start_date=datetime.datetime(2013, 8, 18), end_date=datetime.datetime(2013, 8, 20), year=2013, official=False, event_type_enum=EventType.OFFSEASON ), Event( id='2013offseason3'.format(event_id_counter), event_short='offseason3', start_date=datetime.datetime(2013, 12, 30), end_date=datetime.datetime(2013, 12, 31), year=2013, official=False, event_type_enum=EventType.OFFSEASON ), Event( id='2013offseason4'.format(event_id_counter), event_short='offseason4', start_date=datetime.datetime(2013, 11, 13), end_date=datetime.datetime(2013, 11, 14), year=2013, official=False, event_type_enum=EventType.REGIONAL ) ] # Combine all events and shufle randomly events = [] for week_events in events_by_week.values(): events.extend(week_events) state.shuffle(events) ndb.put_multi(events) # Begin testing events.sort(key=EventHelper.distantFutureIfNoStartDate) week_events = EventHelper.groupByWeek(events) for key in events_by_week.keys(): try: self.assertEqual(set([e.key.id() for e in events_by_week[key]]), set([e.key.id() for e in week_events[key]])) except AssertionError, e: logging.warning("\n\nseed: {}".format(seed)) logging.warning("\n\nkey: {}".format(key)) logging.warning("\n\nevents_by_week: {}".format(events_by_week[key])) logging.warning("\n\nweek_events: {}".format(week_events[key])) raise e
def parse(self, response): matches = response['Schedule'] event_key = '{}{}'.format(self.year, self.event_short) event = Event.get_by_id(event_key) if event.timezone_id: event_tz = pytz.timezone(event.timezone_id) else: logging.warning( "Event {} has no timezone! Match times may be wrong.".format( event_key)) event_tz = None parsed_matches = [] remapped_matches = {} # If a key changes due to a tiebreaker is_octofinals = len( matches) > 0 and 'Octofinal' in matches[0]['description'] for match in matches: if 'tournamentLevel' in match: # 2016+ level = match['tournamentLevel'] else: # 2015 level = match['level'] comp_level = get_comp_level(self.year, level, match['matchNumber'], is_octofinals) set_number, match_number = get_set_match_number( self.year, comp_level, match['matchNumber'], is_octofinals) red_teams = [] blue_teams = [] red_surrogates = [] blue_surrogates = [] team_key_names = [] null_team = False sorted_teams = sorted( match['Teams'], key=lambda team: team['station'] ) # Sort by station to ensure correct ordering. Kind of hacky. for team in sorted_teams: if team['teamNumber'] is None: null_team = True team_key = 'frc{}'.format(team['teamNumber']) team_key_names.append(team_key) if 'Red' in team['station']: red_teams.append(team_key) if team['surrogate']: red_surrogates.append(team_key) elif 'Blue' in team['station']: blue_teams.append(team_key) if team['surrogate']: blue_surrogates.append(team_key) if null_team and match['scoreRedFinal'] is None and match[ 'scoreBlueFinal'] is None: continue alliances = { 'red': { 'teams': red_teams, 'surrogates': red_surrogates, 'score': match['scoreRedFinal'] }, 'blue': { 'teams': blue_teams, 'surrogates': blue_surrogates, 'score': match['scoreBlueFinal'] }, } if not match[ 'startTime']: # no startTime means it's an unneeded rubber match continue time = datetime.datetime.strptime(match['startTime'].split('.')[0], TIME_PATTERN) if event_tz is not None: time = time - event_tz.utcoffset(time) actual_time_raw = match[ 'actualStartTime'] if 'actualStartTime' in match else None actual_time = None if actual_time_raw is not None: actual_time = datetime.datetime.strptime( actual_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: actual_time = actual_time - event_tz.utcoffset(actual_time) post_result_time_raw = match.get('postResultTime') post_result_time = None if post_result_time_raw is not None: post_result_time = datetime.datetime.strptime( post_result_time_raw.split('.')[0], TIME_PATTERN) if event_tz is not None: post_result_time = post_result_time - event_tz.utcoffset( post_result_time) key_name = Match.renderKeyName(event_key, comp_level, set_number, match_number) # Check for tiebreaker matches existing_match = Match.get_by_id(key_name) # Follow chain of existing matches while existing_match is not None and existing_match.tiebreak_match_key is not None: logging.info("Following Match {} to {}".format( existing_match.key.id(), existing_match.tiebreak_match_key.id())) existing_match = existing_match.tiebreak_match_key.get() # Check if last existing match needs to be tiebroken if existing_match and existing_match.comp_level != 'qm' and \ existing_match.has_been_played and \ existing_match.winning_alliance == '' and \ existing_match.actual_time != actual_time and \ not self.is_blank_match(existing_match): logging.warning("Match {} is tied!".format( existing_match.key.id())) # TODO: Only query within set if set_number ever gets indexed match_count = 0 for match_key in Match.query( Match.event == event.key, Match.comp_level == comp_level).fetch(keys_only=True): _, match_key = match_key.id().split('_') if match_key.startswith('{}{}'.format( comp_level, set_number)): match_count += 1 # Sanity check: Tiebreakers must be played after at least 3 matches, or 6 for finals if match_count < 3 or (match_count < 6 and comp_level == 'f'): logging.warning( "Match supposedly tied, but existing count is {}! Skipping match." .format(match_count)) continue match_number = match_count + 1 new_key_name = Match.renderKeyName(event_key, comp_level, set_number, match_number) remapped_matches[key_name] = new_key_name key_name = new_key_name # Point existing match to new tiebreaker match existing_match.tiebreak_match_key = ndb.Key(Match, key_name) parsed_matches.append(existing_match) logging.warning("Creating new match: {}".format(key_name)) elif existing_match: remapped_matches[key_name] = existing_match.key.id() key_name = existing_match.key.id() match_number = existing_match.match_number parsed_matches.append( Match( id=key_name, event=event.key, year=event.year, set_number=set_number, match_number=match_number, comp_level=comp_level, team_key_names=team_key_names, time=time, actual_time=actual_time, post_result_time=post_result_time, alliances_json=json.dumps(alliances), )) if self.year == 2015: # Fix null teams in elims (due to FMS API failure, some info not complete) # Should only happen for sf and f matches organized_matches = MatchHelper.organizeMatches(parsed_matches) for level in ['sf', 'f']: playoff_advancement = MatchHelper.generatePlayoffAdvancement2015( organized_matches) if playoff_advancement[LAST_LEVEL[level]] != []: for match in organized_matches[level]: if 'frcNone' in match.team_key_names: if level == 'sf': red_seed, blue_seed = QF_SF_MAP[ match.match_number] else: red_seed = 0 blue_seed = 1 red_teams = [ 'frc{}'.format(t) for t in playoff_advancement[ LAST_LEVEL[level]][red_seed][0] ] blue_teams = [ 'frc{}'.format(t) for t in playoff_advancement[ LAST_LEVEL[level]][blue_seed][0] ] alliances = match.alliances alliances['red']['teams'] = red_teams alliances['blue']['teams'] = blue_teams match.alliances_json = json.dumps(alliances) match.team_key_names = red_teams + blue_teams fixed_matches = [] for key, matches in organized_matches.items(): if key != 'num': for match in matches: if 'frcNone' not in match.team_key_names: fixed_matches.append(match) parsed_matches = fixed_matches return parsed_matches, remapped_matches