def get(self, page_num=0): self._require_admin() page_num = int(page_num) if page_num < self.MAX_PAGE: start = self.PAGE_SIZE * page_num end = start + self.PAGE_SIZE teams = Team.query(Team.team_number >= start, Team.team_number < end).order( Team.team_number).fetch() else: start = self.PAGE_SIZE * self.MAX_PAGE teams = Team.query(Team.team_number >= start).order( Team.team_number).fetch() page_labels = [] for page in xrange(self.MAX_PAGE): if page == 0: page_labels.append('1-999') else: page_labels.append('{}\'s'.format(1000 * page)) page_labels.append('{}+'.format(1000 * self.MAX_PAGE)) self.template_values.update({ "teams": teams, "num_teams": Team.query().count(), "page_num": page_num, "page_labels": page_labels, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_list.html') self.response.out.write(template.render(path, self.template_values))
def team_list() -> str: page = 1 client = ndb.Client() with client.context(): page_labels = ['1-999'] cur_page_label = page_labels[0] teams_1 = Team.query(Team.team_number >= 0, Team.team_number <= 500).fetch_async() teams_2 = Team.query(Team.team_number >= 501, Team.team_number <= 1000).fetch_async() teams = teams_1.get_result() + teams_2.get_result() num_teams = len(teams) middle_value = num_teams // 2 if num_teams % 2 != 0: middle_value += 1 teams_a, teams_b = teams[:middle_value], teams[middle_value:] template_values = { "teams_a": teams_a, "teams_b": teams_b, "num_teams": num_teams, "page_labels": page_labels, "cur_page_label": cur_page_label, "current_page": page } return render_template("team_list.html", **template_values)
def get(self, page_num=0): self._require_admin() page_num = int(page_num) if page_num < self.MAX_PAGE: start = self.PAGE_SIZE * page_num end = start + self.PAGE_SIZE teams = Team.query(Team.team_number >= start, Team.team_number < end).order(Team.team_number).fetch() else: start = self.PAGE_SIZE * self.MAX_PAGE teams = Team.query(Team.team_number >= start).order(Team.team_number).fetch() page_labels = [] for page in xrange(self.MAX_PAGE): if page == 0: page_labels.append('1-999') else: page_labels.append('{}\'s'.format(1000 * page)) page_labels.append('{}+'.format(1000 * self.MAX_PAGE)) self.template_values.update({ "teams": teams, "num_teams": Team.query().count(), "page_num": page_num, "page_labels": page_labels, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_list.html') self.response.out.write(template.render(path, self.template_values))
def get(self): now_epoch = time.mktime(datetime.datetime.now().timetuple()) bucket_num = int((now_epoch / (60 * 60 * 24)) % self.PERIOD) highest_team_key = Team.query().order(-Team.team_number).fetch( 1, keys_only=True)[0] highest_team_num = int(highest_team_key.id()[3:]) bucket_size = int(highest_team_num / (self.PERIOD)) + 1 min_team = bucket_num * bucket_size max_team = min_team + bucket_size team_keys = Team.query(Team.team_number >= min_team, Team.team_number < max_team).fetch( 1000, keys_only=True) teams = ndb.get_multi(team_keys) for team in teams: taskqueue.add(queue_name='usfirst', url='/tasks/get/usfirst_team_details/' + team.key_name, method='GET') # FIXME omg we're just writing out? -fangeugene 2013 Nov 6 self.response.out.write("Bucket number {} out of {}<br>".format( bucket_num, self.PERIOD)) self.response.out.write( "{} team gets have been enqueued in the interval [{}, {}).".format( len(teams), min_team, max_team))
def _render(self, model_type=None): max_team_key = Team.query().order(-Team.team_number).fetch( 1, keys_only=True)[0] max_team_num = int(max_team_key.id()[3:]) max_team_page = int(max_team_num / 500) futures = [] for page_num in xrange(max_team_page + 1): futures.append( TeamListQuery(page_num).fetch_async(dict_version=3, return_updated=True)) team_list = [] for future in futures: partial_team_list, last_modified = future.get_result() team_list += partial_team_list if self._last_modified is None or last_modified > self._last_modified: self._last_modified = last_modified if model_type is not None: team_list = filter_team_properties(team_list, model_type) return json.dumps(team_list, ensure_ascii=True, indent=2, sort_keys=True)
def _render(self, page=''): page_labels = [] for curPage in self.VALID_PAGES: if curPage == 1: label = '1-999' else: label = "{}'s".format((curPage - 1)*1000) page_labels.append(label) if curPage == page: cur_page_label = label start = (page - 1) * 1000 stop = start + 999 teams = Team.query().order(Team.team_number).filter(Team.team_number >= start).filter(Team.team_number < stop).fetch(10000) num_teams = len(teams) middle_value = num_teams/2 if num_teams%2 != 0: middle_value += 1 teams_a, teams_b = teams[:middle_value], teams[middle_value:] template_values = { "teams_a": teams_a, "teams_b": teams_b, "num_teams": num_teams, "page_labels": page_labels, "cur_page_label": cur_page_label, "current_page": page } path = os.path.join(os.path.dirname(__file__), '../templates/team_list.html') return template.render(path, template_values)
def get(self): team_keys_future = Team.query().order(Team.team_number).fetch_async(keys_only=True) social_media_keys_future = Media.query(Media.year == None).fetch_async(keys_only=True) team_futures = ndb.get_multi_async(team_keys_future.get_result()) social_futures = ndb.get_multi_async(social_media_keys_future.get_result()) socials_by_team = defaultdict(dict) for social_future in social_futures: social = social_future.get_result() for reference in social.references: socials_by_team[reference.id()][social.media_type_enum] = social if team_futures: with cloudstorage.open(self.TEAMS_FILENAME_PATTERN, 'w') as teams_file: writer = csv.writer(teams_file, delimiter=',') for team_future in team_futures: team = team_future.get_result() team_row = [team.key.id(), team.nickname, team.name, team.city, team.state_prov, team.country, team.website, team.rookie_year] for social_type in MediaType.social_types: social = socials_by_team[team.key.id()].get(social_type, None) team_row.append(social.social_profile_url if social is not None else None) self._writerow_unicode(writer, team_row) self.response.out.write("Done backing up teams!")
class UsfirstTeamsTpidsGet(webapp.RequestHandler): """ A run-as-needed function that instantiates new Team objects based on FIRST's full team list. """ def get(self, year): df = DatafeedUsfirst() skip = 0 try: skip = self.request.get("skip") if skip == '': skip = 0 except Exception, detail: logging.error('Failed to get skip value') logging.info("YEAR: %s", year) df.getTeamsTpids(int(year), skip) team_count = Team.query().count() template_values = { 'team_count': team_count } path = os.path.join(os.path.dirname(__file__), '../templates/datafeeds/usfirst_teams_tpids.html') self.response.out.write(template.render(path, template_values))
def _query_async(self): page_num = self._query_args[0] start = self.PAGE_SIZE * page_num end = start + self.PAGE_SIZE teams = yield Team.query(Team.team_number >= start, Team.team_number < end).fetch_async() raise ndb.Return(teams)
def get(self): self._validate_tba_app_id() memcache_key = "csv_teams_all" output = memcache.get(memcache_key) if output is None: team_keys = Team.query().order(Team.team_number).fetch(10000, keys_only=True) team_futures = ndb.get_multi_async(team_keys) sio = StringIO.StringIO() writer = csv.writer(sio, delimiter=',') writer.writerow(['team_number','name','nickname','location','website']) for team_future in team_futures: team = team_future.get_result() row = [team.team_number, team.name, team.nickname, team.location, team.website] row_utf8 = [unicode(e).encode('utf-8') for e in row] writer.writerow(row_utf8) output = sio.getvalue() if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, output, 86400) self.response.headers["content-type"] = "text/csv" self.response.out.write(output) self._track_call_defer('teams/list')
def get(self): team_keys = Team.query().fetch(keys_only=True) for team_key in team_keys: taskqueue.add( queue_name='search-index-update', url='/tasks/do/update_team_search_index/' + team_key.id(), method='GET')
def _render(self, page_num): page_num = int(page_num) start = self.PAGE_SIZE * page_num end = start + self.PAGE_SIZE team_keys = Team.query(Team.team_number >= start, Team.team_number < end).fetch(None, keys_only=True) team_futures = ndb.get_multi_async(team_keys) team_list = [ModelToDict.teamConverter(team_future.get_result()) for team_future in team_futures] return json.dumps(team_list, ensure_ascii=True)
def get(self): self._require_admin() teams = Team.query().order(Team.team_number) self.template_values.update({"teams": teams}) path = os.path.join(os.path.dirname(__file__), "../../templates/admin/team_list.html") self.response.out.write(template.render(path, self.template_values))
def createTeamMediaSuggestion(self): user_bundle = UserBundle() team = Team.query().fetch(1)[0] SuggestionCreator.createTeamMediaSuggestion( author_account_key=user_bundle.account.key, media_url=self.YOUTUBE_URL, team_key=team.key_name, year_str="2016")
def createEventTeams(self, event): teams = Team.query().order(Team.team_number).fetch(60) event_teams = [EventTeam( id = event.key.id() + "_" + team.key.id(), event = event.key, team = team.key, year = event.year) for team in teams] return EventTeamManipulator.createOrUpdate(event_teams)
def createEventTeams(self, event): teams = Team.query().order(Team.team_number).fetch(60) event_teams = [ EventTeam(id=event.key.id() + "_" + team.key.id(), event=event.key, team=team.key, year=event.year) for team in teams ] return EventTeamManipulator.createOrUpdate(event_teams)
def get(self): self._require_admin() teams = Team.query().order(Team.team_number) self.template_values.update({ "teams": teams, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_list.html') self.response.out.write(template.render(path, self.template_values))
def get(self): team_keys = Team.query().order(Team.team_number).fetch(None, keys_only=True) team_futures = ndb.get_multi_async(team_keys) if team_futures: with cloudstorage.open(self.TEAMS_FILENAME_PATTERN, 'w') as teams_file: writer = csv.writer(teams_file, delimiter=',') for team_future in team_futures: team = team_future.get_result() self._writerow_unicode(writer, [team.key.id(), team.nickname, team.name, team.city, team.state_prov, team.country, team.website, team.rookie_year]) self.response.out.write("Done backing up teams!")
def get(self): now_epoch = time.mktime(datetime.datetime.now().timetuple()) bucket_num = int((now_epoch / (60 * 60 * 24)) % self.PERIOD) highest_team_key = Team.query().order(-Team.team_number).fetch(1, keys_only=True)[0] highest_team_num = int(highest_team_key.id()[3:]) bucket_size = int(highest_team_num / (self.PERIOD)) + 1 min_team = bucket_num * bucket_size max_team = min_team + bucket_size team_keys = Team.query(Team.team_number >= min_team, Team.team_number < max_team).fetch(1000, keys_only=True) teams = ndb.get_multi(team_keys) for team in teams: taskqueue.add( queue_name='usfirst', url='/tasks/get/usfirst_team_details/' + team.key_name, method='GET') # FIXME omg we're just writing out? -fangeugene 2013 Nov 6 self.response.out.write("Bucket number {} out of {}<br>".format(bucket_num, self.PERIOD)) self.response.out.write("{} team gets have been enqueued in the interval [{}, {}).".format(len(teams), min_team, max_team))
def get(self): offset = int(self.request.get("offset", 0)) teams = Team.query().fetch(1000, offset=int(offset)) for team in teams: taskqueue.add( queue_name='usfirst', url='/tasks/get/usfirst_team_details/' + team.key_name, method='GET') # FIXME omg we're just writing out? -gregmarra 2012 Aug 26 self.response.out.write("%s team gets have been enqueued offset from %s.<br />" %(len(teams), offset)) self.response.out.write("Reload with ?offset=%s to enqueue more." % (offset + len(teams)))
def get(self): """ Enqueues TeamEventsGet for teams numbers <= 999 (these teams participated in events from 2002 and prior, which we can't scrape normally) """ team_keys = Team.query(Team.team_number <= 999).fetch(10000, keys_only=True) teams = ndb.get_multi(team_keys) for team in teams: taskqueue.add( queue_name='usfirst', url='/tasks/get/usfirst_pre2003_team_events/{}'.format(team.key_name), method='GET') self.response.out.write("Pre 2003 event gets have been enqueued for %s teams." % (len(teams)))
def get(self): offset = int(self.request.get("offset", 0)) team_keys = Team.query().fetch(1000, offset=int(offset), keys_only=True) teams = ndb.get_multi(team_keys) for team in teams: taskqueue.add( queue_name='usfirst', url='/tasks/get/usfirst_team_details/' + team.key_name, method='GET') # FIXME omg we're just writing out? -gregmarra 2012 Aug 26 self.response.out.write("%s team gets have been enqueued offset from %s.<br />" % (len(teams), offset)) self.response.out.write("Reload with ?offset=%s to enqueue more." % (offset + len(teams)))
def _render(self): events = Event.query().order(-Event.year).order(Event.name) teams = Team.query().order(Team.team_number) results = [] for event in events: results.append({'id': event.key_name, 'name': '%s %s [%s]' % (event.year, event.name, event.event_short.upper())}) for team in teams: if not team.nickname: nickname = "Team %s" % team.team_number else: nickname = team.nickname results.append({'id': team.team_number, 'name': '%s | %s' % (team.team_number, nickname)}) return json.dumps(results)
def get(self, model_type): if model_type == 'events': event_keys = Event.query().fetch(keys_only=True) for event_key in event_keys: taskqueue.add( queue_name='admin', url='/tasks/admin/do/run_event_post_update_hook/' + event_key.id(), method='GET') elif model_type == 'teams': team_keys = Team.query().fetch(keys_only=True) for team_key in team_keys: taskqueue.add( queue_name='admin', url='/tasks/admin/do/run_team_post_update_hook/' + team_key.id(), method='GET')
def get(self): memcache_key = "csv_teams_all" output = memcache.get(memcache_key) if output is None: teams = Team.query().order(Team.team_number).fetch(10000) template_values = { "teams": teams } path = os.path.join(os.path.dirname(__file__), '../templates/api/csv_teams_all.csv') output = template.render(path, template_values) if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, output, 86400) self.response.out.write(output)
def _render(self, page=''): page_labels = [] for curPage in self.VALID_PAGES: if curPage == 1: label = '1-999' else: label = "{}'s".format((curPage - 1) * 1000) page_labels.append(label) if curPage == page: cur_page_label = label start = (page - 1) * 1000 stop = start + 999 if start == 0: start = 1 team_keys = Team.query().order(Team.team_number).filter( Team.team_number >= start).filter(Team.team_number < stop).fetch( 10000, keys_only=True) teams = ndb.get_multi(team_keys) num_teams = len(teams) middle_value = num_teams / 2 if num_teams % 2 != 0: middle_value += 1 teams_a, teams_b = teams[:middle_value], teams[middle_value:] self.template_values.update({ "teams_a": teams_a, "teams_b": teams_b, "num_teams": num_teams, "page_labels": page_labels, "cur_page_label": cur_page_label, "current_page": page }) path = os.path.join(os.path.dirname(__file__), '../templates/team_list.html') return template.render(path, self.template_values)
def get(self): self._validate_tba_app_id() memcache_key = "csv_teams_all" output = memcache.get(memcache_key) if output is None: team_keys = Team.query().order(Team.team_number).fetch(10000, keys_only=True) teams = ndb.get_multi(team_keys) template_values = { "teams": teams } path = os.path.join(os.path.dirname(__file__), '../templates/api/csv_teams_all.csv') output = template.render(path, template_values) if tba_config.CONFIG["memcache"]: memcache.set(memcache_key, output, 86400) self.response.headers["content-type"] = "text/csv" self.response.out.write(output) self._track_call_defer('teams/list')
def get_teams_async(): team_keys = yield Team.query().order(Team.team_number).fetch_async(keys_only=True) teams = yield ndb.get_multi_async(team_keys) raise ndb.Return(teams)
def test_creates(self): self.teams.extend(TeamTestCreator.createSixTeams()) teams = Team.query().order(Team.team_number).fetch(60) self.assertEqual(len(teams), 6)