def getConferencesToAttend(self, request): """Get list of conferences that user has registered for.""" # get user profile prof = self._getProfileFromUser() # get conferenceKeysToAttend from profile. # to make a ndb key from websafe key use: # ndb.Key(urlsafe=my_websafe_key_string) conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend] # fetch conferences from datastore. # Use get_multi(array_of_keys) to fetch all keys at once. conferences = ndb.get_multi(conf_keys) # get organizers organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences] profiles = ndb.get_multi(organisers) # put display names in a dict for easier fetching names = {} for profile in profiles: names[profile.key.id()] = profile.displayName # return set of ConferenceForm objects per Conference return ConferenceForms(items=[self._copyConferenceToForm(conf, "") for conf in conferences] )
def get(self): self.session = session(self.request) #自分が送った/自分に送られたメール mlist_keys = MessageDB.query().filter( ndb.OR( MessageDB.to_player == self.session['plkey'], MessageDB.from_player == self.session['plkey'] ) ).fetch(100, keys_only=True) mdatas = ndb.get_multi(mlist_keys) #from_playerの情報も取得 playerdata_keys = set() #重複しないように集合をつかう。 for m in mdatas: playerdata_keys.add( m.from_player ) pldatas = ndb.get_multi(playerdata_keys) #データの整形をしてあげる。 pldict = {} for pl in pldatas: pldict[pl.userid] = pl mdataout = [] for m in mdatas: mout = m.to_dict() #やっぱりIDだけ渡して、クライアント側でpldictを参照する #mout['from_player'] = pldict[m.from_player.string_id()] mout['from_player'] = m.from_player.string_id() mout['key_urlsafe'] = m.key.urlsafe() mdataout.append( mout ) #新しい順にソート mdataout.sort( cmp=lambda x,y: cmp(x['create_date'], y['create_date']), reverse=True ) self.session.save() return webapp2.Response( json.dumps( {"result":mdataout, "pldict":pldict}, self.session ) )
def post(self): user_key = users.user_key(self.user, create=False) request = json.loads(self.request.body) volume_ids = request['volumes'] logging.info('Removing subscriptions: %r', volume_ids); results = defaultdict(list) keys = [ subscriptions.subscription_key( volume_id, user=user_key, create=False ) for volume_id in volume_ids ] # prefetch for efficiency ndb.get_multi(keys) candidates = [] for key in keys: subscription = key.get() if subscription: candidates.append(key) else: results['skipped'].append(key.id()) logging.info('%d candidates, %d volumes', len(candidates), len(volume_ids)) # prefetch for efficiency ndb.delete_multi_async(candidates) response = { 'status': 200, 'message': 'removed %d subscriptions' % len(candidates), 'results': [key.id() for key in candidates], } self.response.write(json.dumps(response))
def post(self): """Returns dynamic data for listing alerts in response to XHR. Request parameters: sheriff: The name of a sheriff (optional). triaged: Whether to include triaged alerts (i.e. with a bug ID). improvements: Whether to include improvement anomalies. anomaly_cursor: Where to begin a paged query for anomalies (optional). stoppage_alert_cursor: Where to begin a paged query for stoppage alerts (optional). Outputs: JSON data for an XHR request to show a table of alerts. """ sheriff_name = self.request.get('sheriff', 'Chromium Perf Sheriff') sheriff_key = ndb.Key('Sheriff', sheriff_name) if not _SheriffIsFound(sheriff_key): self.response.out.write(json.dumps({ 'error': 'Sheriff "%s" not found.' % sheriff_name })) return include_improvements = bool(self.request.get('improvements')) include_triaged = bool(self.request.get('triaged')) # Cursors are used to fetch paged queries. If none is supplied, then the # first 500 alerts will be returned. If a cursor is given, the next # 500 alerts (starting at the given cursor) will be returned. anomaly_cursor = self.request.get('anomaly_cursor', None) stoppage_alert_cursor = self.request.get('stoppage_alert_cursor', None) if anomaly_cursor: anomaly_cursor = Cursor(urlsafe=anomaly_cursor) if stoppage_alert_cursor: stoppage_alert_cursor = Cursor(urlsafe=stoppage_alert_cursor) anomaly_values = _FetchAnomalies(sheriff_key, include_improvements, include_triaged, anomaly_cursor) anomalies = ndb.get_multi(anomaly_values['anomaly_keys']) stoppage_alert_values = _FetchStoppageAlerts( sheriff_key, include_triaged, stoppage_alert_cursor) stoppage_alerts = ndb.get_multi( stoppage_alert_values['stoppage_alert_keys']) values = { 'anomaly_list': AnomalyDicts(anomalies), 'anomaly_count': anomaly_values['anomaly_count'], 'stoppage_alert_list': StoppageAlertDicts(stoppage_alerts), 'stoppage_alert_count': stoppage_alert_values['stoppage_alert_count'], 'sheriff_list': _GetSheriffList(), 'anomaly_cursor': (anomaly_values['anomaly_cursor'].urlsafe() if anomaly_values['anomaly_cursor'] else None), 'stoppage_alert_cursor': (stoppage_alert_values['stoppage_alert_cursor'].urlsafe() if stoppage_alert_values['stoppage_alert_cursor'] else None), 'show_more_anomalies': anomaly_values['show_more_anomalies'], 'show_more_stoppage_alerts': stoppage_alert_values['show_more_stoppage_alerts'] } self.GetDynamicVariables(values) self.response.out.write(json.dumps(values))
def post(self): user_key = users.user_key(self.user, create=False) request = json.loads(self.request.body) updates = request.get('updates', []) results = defaultdict(list) sub_keys = [ subscriptions.subscription_key( key, user=user_key) for key in updates ] # bulk fetch to populate the cache ndb.get_multi(sub_keys) updated_subs = [] for key in sub_keys: subscription = key.get() if subscription: start_date = parse_date(updates.get(key.id())).date() if start_date == subscription.start_date: results['skipped'].append(key.id()) else: subscription.start_date = start_date updated_subs.append(subscription) results['updated'].append(key.id()) else: # no such subscription logging.debug('Not subscribed to volume %r', key) results['failed'].append(key.id()) ndb.put_multi(updated_subs) response = { 'status': 200, 'results': results } self.response.write(json.dumps(response))
def get(self, year): year_event_keys = Event.query(Event.year == int(year)).fetch(1000, keys_only=True) final_match_keys = [] for event_key in year_event_keys: final_match_keys.extend(Match.query(Match.event == event_key, Match.comp_level == 'f').fetch(100, keys_only=True)) match_keys_to_repair = [] for match_key in final_match_keys: key_name = match_key.id() if '_f0m' in key_name: match_keys_to_repair.append(match_key) deleted_keys = [] matches_to_repair = ndb.get_multi(match_keys_to_repair) for match in matches_to_repair: deleted_keys.append(match.key) event = ndb.get_multi([match.event])[0] match.set_number = 1 match.key = ndb.Key(Match, Match.renderKeyName( event.key.id(), match.comp_level, match.set_number, match.match_number)) MatchManipulator.createOrUpdate(matches_to_repair) MatchManipulator.delete_keys(deleted_keys) template_values = {'deleted_keys': deleted_keys, 'new_matches': matches_to_repair} path = os.path.join(os.path.dirname(__file__), '../templates/math/final_matches_repair_do.html') self.response.out.write(template.render(path, template_values))
def get(self): user=users.get_current_user() if user is None: self.redirect(users.create_login_url(self.request.uri)) else: self.response.headers["Content-Type"]="text/html" admins=['subhasisdutta300887','subhasistubai','*****@*****.**'] if user.nickname() not in admins: self.response.out.write(callAccessDeniedPage()) else: work_name=self.request.get("name") work_key=ndb.Key('Work',work_name) work=work_key.get() if work is None: self.response.out.write(callNoSuchWorkPage()) else: attrList=ndb.get_multi(work.attributes) photoList=ndb.get_multi(work.photoGallery) template_values={ 'pageTitle':"Edit Work", 'work':work, 'attrList': attrList, 'photoList': photoList } path=os.path.join(os.path.dirname(__file__),'../../template/editWork.html') page=template.render(path,template_values) self.response.out.write(page)
def get_attending(self, request): """ Get Conferences the calling user is attending (i.e. registered for) :param request: :return: ConferenceForms """ if not isinstance(request, message_types.VoidMessage): raise endpoints.BadRequestException() prof = ProfileApi.profile_from_user() # get user Profile conf_keys = prof.conferencesToAttend # Changed from original code if len(conf_keys) == 0: # user hasn't registered for anything, so bail out of this method return ConferenceForms() conferences = ndb.get_multi(conf_keys) # get organizers organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences] profiles = ndb.get_multi(organisers) # put display names in a dict for easier fetching names = {} for profile in profiles: names[profile.key.id()] = profile.displayName # return set of ConferenceForm objects per Conference return ConferenceForms(items=[conf.to_form(names[conf.organizerUserId]) for conf in conferences] )
def getConferencesToAttend(self, request): """Get list of conferences that user has registered for.""" prof = self._getProfileFromUser() # get user Profile # Get the conferences the user is registered to attend conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend] if not conf_keys or not len(conf_keys): return ConferenceForms(items=[]) else: conferences = ndb.get_multi(conf_keys) if not any(c for c in conferences): wskeys = ', '.join([wkey.urlsafe() for wkey in conf_keys]) raise endpoints.NotFoundException( 'The conferences to which you are registered could not be found: %s' % wskeys ) # get all the conference organizers organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences if conf is not None] if not len(organisers): raise endpoints.NotFoundException('No conference organisers were found for these conferences') profiles = ndb.get_multi(organisers) # put display names in a dict for easier fetching names = {} for profile in profiles: names[profile.key.id()] = profile.displayName # return set of ConferenceForm objects per Conference return ConferenceForms(items=[ self.toConferenceForm(conf, names[conf.organizerUserId]) for conf in conferences if conf is not None ])
def generateReport(self, request): if len(request.dates) == 1: date = request.dates[0] start_date = datetime(date.year, date.month, date.day) end_date = datetime(date.year, date.month, date.day, 23, 59, 59, 999999) title = start_date.strftime('%d, %B %Y') elif len(request.dates) == 2: date1 = request.dates[0] date2 = request.dates[1] start_date = datetime(date1.year, date1.month, date1.day) end_date = datetime(date2.year, date2.month, date2.day, 23, 59, 59, 999999) title = start_date.strftime('%d-%B-%Y') + '~' + end_date.strftime('%d-%B-%Y') else: return ReportResponse(code=ERROR_INVIALID_PARAMETER) mail_address = DBSystem.get_config().report_email if mail_address: customers = ndb.get_multi([ndb.Key(urlsafe=id) for id in request.customer_ids]) result_items, customer_order_map, date_map_for_systemreport = getOrderSummeryInternal(start_date, end_date, request.customer_ids, customers) products = ndb.get_multi([ndb.Key(urlsafe=item.product_id) for item in result_items]) product_map = dict() for product in products: product_map[product.key.urlsafe()] = product message = mail.EmailMessage(sender="Mobile Ordering Server <*****@*****.**>", subject="Report") message.to = mail_address message.body = "Mobile ordering system report" message.attachments=[("report(" + title + ").xlsx", generateReport(title, result_items, customer_order_map, customers)), ("do_head(" + title + ").xlsx", generateSystemReportHead(date_map_for_systemreport)), ("do_detail(" + title + ").xlsx", generateSystemReportDetail(product_map, date_map_for_systemreport))] message.send() return SimpleResponse(code=SUCCESS)
def _post_put_hook(self, future): existing_users = User.query( User.leagues == self.key, default_options=ndb.QueryOptions(keys_only=True)) to_add = ndb.get_multi(list(set(self.users) - set(existing_users))) to_remove = ndb.get_multi(list(set(existing_users) - set(self.users))) owner = self.owner.get() to_put = [] to_delete = [] for user in to_add: if self.key not in user.leagues: user.leagues.append(self.key) to_put.append(user) to_put.append(LeagueUser.from_league_user(self.key, user.key)) if self.key not in owner.leagues: owner.leagues.append(self.key) to_put.append(owner) to_put.append(LeagueUser.from_league_user(self.key, owner.key)) for user in to_remove: if self.key in user.leagues and not user == owner: user.leagues.remove(self.key) to_put.append(user) to_delete.append(LeagueUser.key_from_league_user(self.key, user.key)) ndb.put_multi(to_put) ndb.delete_multi(to_delete) leaderboard.delete_leaderboard_cache()
def get(self, *args, **kwargs): params = dict() params['group'] = self.group if self.group_membership.isPending == True: self.display_message('Dein Aufnahmeantrag fuer diese Gruppe wurde noch nicht bearbeitet') return messagesQuery = Message.query(ancestor=self.group.key).order(Message.created) messages = messagesQuery.fetch() userKeys = [] for message in messages: userKeys.append(message.userKey) users = ndb.get_multi(userKeys) params['messages'] = zip(messages, users) params['groupmembership'] = self.group_membership if self.group_membership.isModerator: requestsQuery = GroupMembership.query(GroupMembership.groupKey == self.group.key, GroupMembership.isPending == True) requests = requestsQuery.fetch() requestingUserKeys = [] for request in requests: requestingUserKeys.append(request.userKey) requestingUsers = ndb.get_multi(requestingUserKeys) print(requestingUsers) params['member_requests'] = requestingUsers self.render_template('showgroup.html', params=params)
def get_randomized_unused_actions(self, show, interval): # Get the stored interval options interval_vote_options = IntervalVoteOptions.query( IntervalVoteOptions.show == show.key, IntervalVoteOptions.interval == interval).get() # If the interval options haven't been generated if not interval_vote_options: # Return un-used action keys, sorted by vote unused_keys = Action.query(Action.used == False, ).order(-Action.vote_value, Action.created).fetch(RANDOM_ACTION_OPTIONS, keys_only=True) # Get a randomized sample of the top ACTION_OPTIONS amount of action keys random_sample_keys = list(random.sample(set(unused_keys), min(ACTION_OPTIONS, len(unused_keys)))) # Convert the keys into actual entities unused_actions = ndb.get_multi(random_sample_keys) ivo_create_dict = {'show': show.key, 'interval': interval} # Loop through the randomly select unused actions for i in range(0, len(unused_actions)): ivo_option_num = i + 1 # Add the option to the create dict ivo_create_dict['option_' + str(ivo_option_num)] = unused_actions[i].key # Store the interval options IntervalVoteOptions(**ivo_create_dict).put() else: iov_keys = [] # Loop through and get the stored interval options for i in range(1, ACTION_OPTIONS + 1): option_key = getattr(interval_vote_options, 'option_' + str(i), None) if option_key: iov_keys.append(option_key) # Convert the keys into actual entities unused_actions = ndb.get_multi(iov_keys) return unused_actions
def show_all_flakes(flake, bug_friendly): occurrence_keys = [] for o in flake.occurrences: occurrence_keys.append(o) occurrences = ndb.get_multi(occurrence_keys) failure_runs_keys = [] patchsets_keys = [] for o in occurrences: failure_runs_keys.append(o.failure_run) patchsets_keys.append(o.failure_run.parent()) failure_runs = ndb.get_multi(failure_runs_keys) patchsets = ndb.get_multi(patchsets_keys) pst_timezone = pytz.timezone("US/Pacific") for index, f in enumerate(failure_runs): f.patchset_url = patchsets[index].getURL() f.builder = patchsets[index].builder f.formatted_time = f.time_finished.replace(tzinfo=pytz.utc).astimezone( pst_timezone).strftime('%m/%d/%y %I:%M %p') # Do simple sorting to make reading easier. failure_runs = sorted(failure_runs, key=RunsSortFunction) values = { 'flake': flake, 'failure_runs': failure_runs, 'bug_friendly': bug_friendly, 'time_now': datetime.datetime.now(), } return template.render('templates/all_flake_occurrences.html', values)
def getConferencesToAttend(self, request): """Get list of conferences that user has registered for.""" prof = self._getProfileFromUser() # get user Profile conf_keys = [ ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend ] conferences = ndb.get_multi(conf_keys) # get organizers organisers = [ ndb.Key(Profile, conf.organizerUserId) for conf in conferences ] profiles = ndb.get_multi(organisers) # put display names in a dict for easier fetching names = {} for profile in profiles: names[profile.key.id()] = profile.displayName # return set of ConferenceForm objects per Conference return ConferenceForms( items=[ self._copyConferenceToForm( conf, names[conf.organizerUserId] ) for conf in conferences ] )
def build_session(self): ''' Build an initial session object and create an SID ''' ## build a session no matter what, yo self.session = self.get_session() if self.session.get('ukey', None) is None: u = self.api.users.get_current_user() if u is not None: ## we have an authenticated user self.session['authenticated'] = True ## sometimes this returns none (when federated identity auth is enabled), but then the email is a persistent token self.session['uid'] = u.email() user, permissions = tuple(ndb.get_multi([ndb.Key(User, self.session['uid']), ndb.Key(Permissions, 'global', parent=ndb.Key(User, self.session['uid']))])) if user is None: self.session['redirect'] = self.url_for('auth/register') self.session['returnto'] = self.request.url self.session['register'] = True else: self.user = user self.permissions = permissions self.session['ukey'] = user.key.urlsafe() self.session['email'] = u.email() self.session['nickname'] = u.nickname() if self.api.users.is_current_user_admin(): self.session['root'] = True else: self.user = ndb.Key(urlsafe=self.session['ukey']) self.permissions = ndb.Key(Permissions, 'global', parent=ndb.Key(User, self.session['uid'])) self.user, self.permissions = tuple(ndb.get_multi([self.user, self.permissions])) return self.session
def getConferencesToAttend(self, request): """Get list of conferences that user has registered for.""" # TODO: # step 1: get user profile # step 2: get conferenceKeysToAttend from profile. # to make a ndb key from websafe key you can use: # ndb.Key(urlsafe=my_websafe_key_string) # step 3: fetch conferences from datastore. prof = self._getProfileFromUser() conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend] conferences = ndb.get_multi(conf_keys) # Use get_multi(array_of_keys) to fetch all keys at once. # Do not fetch them one by one! organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences] profiles = ndb.get_multi(organisers) names = {} for profile in profiles: names[profile.key.id()] = profile.displayName return ConferenceForms(items=[self._copyConferenceToForm(conf, names[conf.organizerUserId])\ for conf in conferences]) # return set of ConferenceForm objects per Conference return ConferenceForms(items=[self._copyConferenceToForm(conf, "")\ for conf in conferences] )
def user_follows(handler): """Get the users followed by and following a certain users. Called via a route like: /api/user/<user_key>/follows """ path = handler.request.path user_key = path[len('/api/user/'):-len('/follows')] user_data = ndb.Key(urlsafe=user_key).get() if not user_data: return "User not found" # TODO make async following_data = ndb.get_multi(user_data.following) followers_data = ndb.get_multi(user_data.followers) # Finally ready to do the update data = { 'user_data': user_data, 'following': following_data, 'followers': followers_data } return jsonify.jsonify(data)
def test_user_search(self): APIDB.create_user("own:" + "member", username="******", name="test", gender="m", avatar="..", birthday=datetime.now(), country='Italy', city='TN', language='en', picture='..', email='*****@*****.**', phone='2313213', active_club=None, unique_properties=['email']) APIDB.create_user("own:" + "trainer", username="******", name="trainer", gender="m", avatar="..", birthday=datetime.now(), country='Italy', city='TN', language='en', picture='..', email='*****@*****.**', phone='2313213', active_club=None, unique_properties=['email']) APIDB.create_user("own:" + "owner", username="******", name="owner", gender="m", avatar="..", birthday=datetime.now(), country='Italy', city='TN', language='en', picture='..', email='*****@*****.**', phone='2313213', active_club=None, unique_properties=['email']) # dummy user used in tests to see what others can do to_change = APIDB.create_user("own:" + "dummy", username="******", name="dummy", gender="m", avatar="..", birthday=datetime.now(), country='Italy', city='TN', language='en', picture='..', email='*****@*****.**', phone='2313213', active_club=None, unique_properties=['email']) index = search.Index(name="users") query_string = "stefano" query_options = search.QueryOptions(ids_only=True) query = search.Query(query_string=query_string, options=query_options) results = [Key(urlsafe=r.doc_id) for r in index.search(query)] print len(ndb.get_multi(results)) to_change.email = "*****@*****.**" to_change.put() query = search.Query(query_string=query_string, options=query_options) results = [Key(urlsafe=r.doc_id) for r in index.search(query)] print len(ndb.get_multi(results))
def get_public(self, **kwargs): try: trade_url = kwargs['trade_url'] trade_key = ndb.Key(urlsafe = trade_url) if trade_key.kind() != 'Trade': raise Exception, 'invalid key.' trade = trade_key.get() if trade is None: raise Exception, 'no such trade.' own_games = ndb.get_multi(trade.own_keys) seek_games = ndb.get_multi(trade.seek_keys) person = trade.key.parent().get() comments = ndb.get_multi(trade.comment_keys) template_values = { 'login': users.create_login_url(self.request.uri), 'trade': trade, 'own_games': own_games, 'seek_games': seek_games, 'person': person, 'comments': comments, } template = JINJA_ENVIRONMENT.get_template('public/trade.html') self.response.write(template.render(template_values)) except: self.redirect('/')
def get_user(self, user, **kwargs): try: page = kwargs.get('page', 1) qry = models.Trade.query().order(-models.Trade.date) pager = ndbpager.Pager(query = qry, page = page) trades, cursor, more = pager.paginate(page_size = constants.PAGE_SIZE) my_locations = models.Location.query(ancestor = user.key) trades = [(trade, ndb.get_multi(trade.own_keys), ndb.get_multi(trade.seek_keys), trade.key.parent().get(), utils.min_dist(my_locations, models.Location.query(ancestor = trade.key.parent())) ) for trade in trades] template_values = { 'user': user, 'logout': users.create_logout_url(self.request.uri), 'trades': trades, 'pager': pager, } template = JINJA_ENVIRONMENT.get_template('user/trades_recent.html') self.response.write(template.render(template_values)) except: self.redirect('/dashboard')
def _FetchLabelsAndComponents(alert_keys): """Fetches a list of bug labels and components for the given Alert keys.""" labels = set(_DEFAULT_LABELS) components = set() alerts = ndb.get_multi(alert_keys) sheriff_keys = set(alert.sheriff for alert in alerts) sheriff_labels = [sheriff.labels for sheriff in ndb.get_multi(sheriff_keys)] tags = [item for sublist in sheriff_labels for item in sublist] for tag in tags: if tag.startswith('Cr-'): components.add(_ComponentFromCrLabel(tag)) else: labels.add(tag) if any(a.internal_only for a in alerts): # This is a Chrome-specific behavior, and should ideally be made # more general (maybe there should be a list in datastore of bug # labels to add for internal bugs). labels.add('Restrict-View-Google') for test in {a.GetTestMetadataKey() for a in alerts}: labels_components = bug_label_patterns.GetBugLabelsForTest(test) for item in labels_components: if item.startswith('Cr-'): components.add(_ComponentFromCrLabel(item)) else: labels.add(item) return labels, components
def test_generate_all_keys_and_add(self): # Generate all keys keys = model.TagRelation.generate_all_keys(self.tag_list1) self.assertEqual(len(keys), len(self.tag_list1)*(len(self.tag_list1)-1)) # Add by keys model.TagRelation.add_by_keys(keys) dbs = ndb.get_multi(keys) for db in dbs: self.assertEqual(db.count,1) # Partially add again model.TagRelation.add_by_keys(keys[3:5]) dbs = ndb.get_multi(keys) for db in dbs[3:5]: self.assertEqual(db.count,2) for db in dbs[6:]: self.assertEqual(db.count,1) # Add a child to the same list keys2 = model.TagRelation.add(self.tag_list1,collection=self.child1) self.assertEqual(len(keys2), len(self.tag_list1)*(len(self.tag_list1)-1)) dbs = ndb.get_multi(keys) for db in dbs[7:]: self.assertEqual(db.count,2) # add a child with a new list keys3 = model.TagRelation.add(self.tag_list2,collection=self.child1) self.assertEqual(len(keys3), len(self.tag_list2)*(len(self.tag_list2)-1)) dbs = ndb.get_multi(keys3) for db in dbs: self.assertEqual(db.count,1) # check also toplevels top_keys3 = model.TagRelation.generate_all_keys(self.tag_list2) top_dbs = ndb.get_multi(top_keys3) for db in top_dbs: self.assertEqual(db.count,1)
def get(self): limit = int(self.request.get('limit', 100)) cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor')) sort_by = self.request.get('sort_by', '__key__') if sort_by not in self.ACCEPTABLE_BOTS_SORTS: self.abort(400, 'Invalid sort_by query parameter') if sort_by[0] == '-': order = datastore_query.PropertyOrder( sort_by[1:], datastore_query.PropertyOrder.DESCENDING) else: order = datastore_query.PropertyOrder( sort_by, datastore_query.PropertyOrder.ASCENDING) now = utils.utcnow() cutoff = now - datetime.timedelta( seconds=config.settings().bot_death_timeout_secs) num_bots_busy_future = bot_management.BotInfo.query( bot_management.BotInfo.is_busy == True).count_async() num_bots_dead_future = bot_management.BotInfo.query( bot_management.BotInfo.last_seen_ts < cutoff).count_async() num_bots_quarantined_future = bot_management.BotInfo.query( bot_management.BotInfo.quarantined == True).count_async() num_bots_total_future = bot_management.BotInfo.query().count_async() fetch_future = bot_management.BotInfo.query().order(order).fetch_page_async( limit, start_cursor=cursor) # TODO(maruel): self.request.host_url should be the default AppEngine url # version and not the current one. It is only an issue when # version-dot-appid.appspot.com urls are used to access this page. version = bot_code.get_bot_version(self.request.host_url) bots, cursor, more = fetch_future.get_result() # Prefetch the tasks. We don't actually use the value here, it'll be # implicitly used by ndb local's cache when refetched by the html template. tasks = filter(None, (b.task for b in bots)) ndb.get_multi(tasks) num_bots_busy = num_bots_busy_future.get_result() num_bots_dead = num_bots_dead_future.get_result() num_bots_quarantined = num_bots_quarantined_future.get_result() num_bots_total = num_bots_total_future.get_result() params = { 'bots': bots, 'current_version': version, 'cursor': cursor.urlsafe() if cursor and more else '', 'is_admin': acl.is_admin(), 'is_privileged_user': acl.is_privileged_user(), 'limit': limit, 'now': now, 'num_bots_alive': num_bots_total - num_bots_dead, 'num_bots_busy': num_bots_busy, 'num_bots_dead': num_bots_dead, 'num_bots_quarantined': num_bots_quarantined, 'sort_by': sort_by, 'sort_options': self.SORT_OPTIONS, 'xsrf_token': self.generate_xsrf_token(), } self.response.write( template.render('swarming/restricted_botslist.html', params))
def get_cart_plus_offers_async(acct): cart_future = CartItem.query(CartItem.account == acct.key).fetch_async() offers_future = SpecialOffer.query().fetch_async(10) cart = cart_future.get_result() offers = offers_future.get_result() ndb.get_multi([item.inventory for item in cart] + [offer.inventory for offer in offers]) return cart, offers
def _get_posts_helper(q, cursor_index_key, page_number=1, limit=POSTS_PER_PAGE): # Note, this needs to be on a single line cursor_index = memcache.get(cursor_index_key) if not cursor_index: logging.debug('No Cursor Index found in memcache... generating it') # No Cursor Index in memcache - build it cursor_index = build_index(q) memcache.set(cursor_index_key, cursor_index) # Fetch the cursor based on the page # #TODO: Catch index error! cursor = None if page_number > 1: urlsafe_cursor = cursor_index['cursors'][(page_number - 1)] cursor = Cursor(urlsafe=urlsafe_cursor) # Run the query posts, cursor, more = q.fetch_page(limit, start_cursor=cursor) # Bulk Dereference Categories c_map = {} for p in posts: setattr(p, 'category_entities', []) if p.categories: for c in p.categories: if not c_map.get(c, None): c_map[c] = [] c_map[c].append(p) categories = ndb.get_multi(c_map.keys()) for category in categories: p_list = c_map.get(category.key, None) if p_list and category: for p in p_list: p.category_entities.append(category) # Finally, bulk dereference the primary image p_map = {} for p in posts: setattr(p, 'get_primary_media_image', None) if p.primary_media_image: if not p_map.get(p.primary_media_image, None): p_map[p.primary_media_image] = [] p_map[p.primary_media_image].append(p) images = ndb.get_multi(p_map.keys()) for image in images: p_list = p_map.get(image.key, None) if p_list and image: for p in p_list: p.get_primary_media_image = image return posts, cursor, more
def to_entities(results): """ Transform a list of search results into ndb.Model entities by using the document id as the urlsafe form of the key. """ if isinstance(results, SearchResults): items = [x for x in ndb.get_multi([ndb.Key(urlsafe=y.doc_id) for y in results.items]) if x] return SearchResults(items=items, error=results.error, next_page_token=results.next_page_token) else: return[x for x in ndb.get_multi([ndb.Key(urlsafe=y.doc_id) for y in results]) if x]
def post(self): user_key = users.user_key(self.user) request = json.loads(self.request.body) issue_ids = request['issues'] results = defaultdict(list) query = issues.Issue.query( issues.Issue.identifier.IN( [int(identifier) for identifier in issue_ids] ) ) records = query.fetch() issue_dict = {record.key.id(): record for record in records} candidates = [] for issue_id in issue_ids: issue = issue_dict.get(issue_id) if issue: try: pull_key = pulls.pull_key( issue, user=user_key, create=False) candidates.append((issue.key, pull_key)) except pulls.NoSuchIssue as error: logging.info( 'Unable to add pull, issue %s/%r not found', issue_id, issue ) results['failed'].append(issue_id) else: logging.info( 'Unable to add pull, issue %s/%r not found', issue_id, issue) results['failed'].append(issue_id) # prefetch for efficiency ndb.get_multi(pull for issue, pull in candidates) new_pulls = [] for issue_key, pull_key in candidates: if pull_key.get(): logging.info( 'Unable to add pull, issue %s already pulled', issue_key.id() ) # Already exists results['skipped'].append(pull_key.id()) else: new_pulls.append(pulls.Pull( key=pull_key, issue=issue_key, read=False, )) results['added'].append(pull_key.id()) ndb.put_multi(new_pulls) response = { 'status': 200, 'results': results } self.response.write(json.dumps(response))
def test_delete(self): model = mommy.save_one(ModelStub) self.assertIsNotNone(model.key.get()) DeleteCommand(model.key).execute() self.assertIsNone(model.key.get()) models = [mommy.save_one(ModelStub) for i in range(3)] model_keys = [m.key for m in models] self.assertListEqual(models, ndb.get_multi(model_keys)) DeleteCommand(*model_keys).execute() self.assertListEqual([None, None, None], ndb.get_multi(model_keys))
def get(self, when): if when == "now": events = EventHelper.getEventsWithinADay() events = filter(lambda e: e.official, events) else: event_keys = Event.query(Event.official == True).filter( Event.year == int(when)).fetch(500, keys_only=True) events = ndb.get_multi(event_keys) for event in events: taskqueue.add(queue_name='datafeed', url='/tasks/get/fmsapi_event_rankings/' + event.key_name, method='GET') template_values = { 'events': events, } if 'X-Appengine-Taskname' not in self.request.headers: # Only write out if not in taskqueue path = os.path.join( os.path.dirname(__file__), '../templates/datafeeds/usfirst_event_rankings_enqueue.html') self.response.out.write(template.render(path, template_values))
def getConferencesByPopularity(self, request): """Return conferences by popularity""" #fetch conference from key conf = Conference.query().fetch() #Check that conference exists if not conf: raise endpoints.NotFoundException( 'No conference found with this key: %s' % request.websafeConferenceKey) conf_list = [] for c in conf: count = Profile.query().filter( Profile.conferenceKeysToAttend == c.key.urlsafe()).count() conf_list.append({'conf': c, 'count': count}) conf_list = sorted(conf_list, key=lambda conf: conf['count'], reverse=True) # need to fetch organiser displayName from profiles # get all keys and use get_multi for speed organisers = [(ndb.Key(Profile, c.organizerUserId)) for c in conf] profiles = ndb.get_multi(organisers) # put display names in a dict for easier fetching names = {} for profile in profiles: names[profile.key.id()] = profile.displayName # return individual ConferenceForm object per Conference return ConferenceForms( items=[self._copyConferenceToForm(c['conf'], names[c['conf'].organizerUserId]) for c in \ conf_list] )
def getConferencesToAttend(self, request): """Get list of conferences that user has registered for.""" # TODO: user = endpoints.get_current_user() if not user: raise endpoints.UnauthorizedException('Authorization required') # step 1: get user profile # make profile key prof = self._getProfileFromUser() # step 2: get conferenceKeysToAttend from profile. conf_keys = [ ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend ] conferences = ndb.get_multi(conf_keys) # Do not fetch them one by one! # return set of ConferenceForm objects per Conference return ConferenceForms(items=[ self._copyConferenceToForm(conf, "") for conf in conferences ])
def get(self): self._require_registration() user = self.user_bundle.account.key existing_access = TeamAdminAccess.query( TeamAdminAccess.account == user).fetch() team_keys = [ ndb.Key(Team, "frc{}".format(access.team_number)) for access in existing_access ] teams = ndb.get_multi(team_keys) team_num_to_team = {team.team_number: team for team in teams} self.template_values.update({ "existing_access": existing_access, "status": self.request.get("status"), "team": self.request.get("team"), "teams": team_num_to_team, }) self.response.out.write( jinja2_engine.render('team_admin_redeem.html', self.template_values))
def consolidate_blocks(identifier, type_, depth): blocks = MeasurementBlock.query(MeasurementBlock.identifier == identifier, MeasurementBlock.type == type_, MeasurementBlock.count == BLOCK_SIZE ** depth).order(MeasurementBlock.first).fetch( keys_only=True) logging.info('There are %d blocks at size %d.' % (len(blocks), BLOCK_SIZE ** depth)) i = 0 if len(blocks) >= BLOCK_SIZE: blocks = ndb.get_multi(blocks) for blocknr in range(len(blocks) // BLOCK_SIZE): blockdata = [] first, last = datetime.datetime(2999, 12, 31), datetime.datetime(1000, 1, 1) for bnr in range(BLOCK_SIZE): block = blocks[i] i += 1 first = min(first, block.first) last = max(last, block.last) blockdata.extend(json.loads(block.values)) block.key.delete() metablock = MeasurementBlock(identifier=identifier, type=type_, count=BLOCK_SIZE ** (depth + 1), first=first, last=last, values=json.dumps(blockdata)) metablock.put() return "Ok."
def get(self): allcom = [] lengthofc = 0 lengthofc1 n = list(Comnts.query().filter( Comnts.imageid == self.request.get('im')).fetch(keys_only=True)) allcom = ndb.get_multi(n) allcom.sort(reverse=True) if len(allcom) < 5: lengthofc = len(allcom) else: lengthofc = 5 if len(allcom) < 10: lengthofc1 = len(allcom) else: lengthofc1 = 10 template_values = { 'lengthofc1': lengthofc1, 'lengthofc': lengthofc, 'allcom': allcom, 'im': self.request.get('im') } template = JINJA_ENVIRONMENT.get_template('coment2.html') self.response.write(template.render(template_values))
def testTriage_GroupTriaged_InfAnomaly(self): anomalies = [self._AddAnomaly(median_before_anomaly=0), self._AddAnomaly()] group = self._AddAlertGroup( anomalies[0], issue=self._issue_tracker.issue, status=alert_group.AlertGroup.Status.triaged, ) self._sheriff_config.patterns = { '*': [ subscription.Subscription(name='sheriff', auto_triage_enable=True) ], } w = alert_group_workflow.AlertGroupWorkflow( group.get(), sheriff_config=self._sheriff_config, issue_tracker=self._issue_tracker, ) w.Process( update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate( now=datetime.datetime.utcnow(), anomalies=ndb.get_multi(anomalies), issue=self._issue_tracker.issue, )) self.assertIn('inf', self._issue_tracker.add_comment_args[1])
def searchSub(self, word): user = MyUser().getCurrentUser() userid = user.user_id() o = ''.join(sorted(word.lower())) all_combinations = self.words(o) result = [] searchArray = [] for w in all_combinations: ss = ''.join(w) if len(w) > 2: n = ''.join(sorted(ss.lower())) if n not in searchArray: searchArray.append(n) search = ndb.get_multi( [ndb.Key('Anagram', userid + item) for item in searchArray]) for s in search: if s is not None: result.extend(s.anagrams) return result
def get(self): """Renders the UI for listing alerts. Request parameters: sheriff: The name of a sheriff (optional). triaged: Whether to include triaged alerts (i.e. with a bug ID). improvements: Whether to include improvement anomalies. Outputs: A page displaying an overview table of all alerts. """ sheriff_name = self.request.get('sheriff', 'Chromium Perf Sheriff') sheriff_key = ndb.Key('Sheriff', sheriff_name) include_improvements = bool(self.request.get('improvements')) include_triaged = bool(self.request.get('triaged')) anomaly_keys = _FetchAnomalyKeys(sheriff_key, include_improvements, include_triaged) anomalies = ndb.get_multi(anomaly_keys[:_MAX_ANOMALIES_TO_SHOW]) stoppage_alerts = _FetchStoppageAlerts(sheriff_key, include_triaged) self.RenderHtml( 'alerts.html', { 'anomaly_list': json.dumps(AnomalyDicts(anomalies)), 'stoppage_alert_list': json.dumps(StoppageAlertDicts(stoppage_alerts)), 'have_anomalies': bool(anomalies), 'have_stoppage_alerts': bool(stoppage_alerts), 'sheriff_list': json.dumps(_GetSheriffList()), 'num_anomalies': len(anomaly_keys), })
def run(): run_result, result_summary = ndb.get_multi( (run_result_key, result_summary_key)) if bot_id and run_result.bot_id != bot_id: return None, 'Bot %s sent task kill for task %s owned by bot %s' % ( bot_id, packed, run_result.bot_id) if run_result.state == task_result.State.BOT_DIED: # Ignore this failure. return None, None run_result.signal_server_version(server_version) run_result.state = task_result.State.BOT_DIED run_result.internal_failure = True run_result.abandoned_ts = now run_result.modified_ts = now result_summary.set_from_run_result(run_result, None) futures = ndb.put_multi_async((run_result, result_summary)) _maybe_pubsub_notify_via_tq(result_summary, request) for f in futures: f.check_success() return run_result, None
def GetMultiEntityRecords(self): """Gets records store in multiple entities. Combines and deserializes the data stored in QuickLogPart for this log. Returns: List of Record object. """ if not self.key.id(): logging.error('Key id does not exist.') return None if self.size < 1: return None string_id = self.key.string_id() log_part_keys = [ndb.Key('QuickLog', string_id, 'QuickLogPart', i+1) for i in xrange(self.size)] log_parts = ndb.get_multi(log_part_keys) serialized = ''.join(l.value for l in log_parts if l is not None) try: return pickle.loads(serialized) except ImportError: logging.error('Failed to load QuickLog "%s".', string_id) return None
def _AssociateAlertsWithBug(self, bug_id, urlsafe_keys, is_confirmed): """Sets the bug ID for a set of alerts. This is done after the user enters and submits a bug ID. Args: bug_id: Bug ID number, as a string. urlsafe_keys: Comma-separated Alert keys in urlsafe format. is_confirmed: Whether the user has confirmed that they really want to associate the alerts with a bug even if it appears that the revision ranges don't overlap. """ # Validate bug ID. try: bug_id = int(bug_id) except ValueError: self.RenderHtml('bug_result.html', {'error': 'Invalid bug ID "%s".' % str(bug_id)}) return # Get Anomaly entities and related TestMetadata entities. alert_keys = [ndb.Key(urlsafe=k) for k in urlsafe_keys.split(',')] alert_entities = ndb.get_multi(alert_keys) if not is_confirmed: warning_msg = self._VerifyAnomaliesOverlap(alert_entities, bug_id) if warning_msg: self._ShowConfirmDialog('associate_alerts', warning_msg, { 'bug_id': bug_id, 'keys': urlsafe_keys, }) return AssociateAlerts(bug_id, alert_entities) self.RenderHtml('bug_result.html', {'bug_id': bug_id})
def _ShowCommentDialog(self, urlsafe_keys): """Sends a HTML page with a form for selecting a bug number. Args: urlsafe_keys: Comma-separated Alert keys in urlsafe format. """ # Fetch metadata about recent bugs. response = urlfetch.fetch(_RECENT_BUGS_QUERY) if response.status_code == 200: bugs = json.loads(response.content) bugs = bugs.get('items', []) if bugs else [] else: logging.error( 'Couldn\'t fetch recent bugs from www.googleapis.com.') bugs = [] # Get information about Alert entities and related Test entities, # so that they can be compared with recent bugs. alert_keys = [ndb.Key(urlsafe=k) for k in urlsafe_keys.split(',')] alert_entities = ndb.get_multi(alert_keys) ranges = [(a.start_revision, a.end_revision) for a in alert_entities] # Mark bugs that have overlapping revision ranges as potentially relevant. # On the alerts page, alerts are only highlighted if the revision range # overlaps with the revision ranges for all of the selected alerts; the # same thing is done here. for bug in bugs: this_range = _RevisionRangeFromSummary(bug['summary']) bug['relevant'] = all( _RangesOverlap(this_range, r) for r in ranges) self.RenderHtml('bug_result.html', { 'bug_associate_form': True, 'keys': urlsafe_keys, 'bugs': bugs })
def get(self): q = taskqueue.Queue('views') while True: try: tasks = q.lease_tasks(60, 1000) if not tasks: return tallies = {} for t in tasks: tallies[t.payload] = tallies.get(t.payload, 0) + 1 objects = ndb.get_multi( [ndb.Key(BlogPost, int(k)) for k in tallies]) for object in objects: object.views += tallies[str(object.key.id())] ndb.put_multi(objects) q.delete_tasks(tasks) except google.appengine.api.taskqueue.TransientError: print("google.appengine.api.taskqueue.TransientError") except google.appengine.runtime.apiproxy_errors.DeadlineExceededError: print( "google.appengine.runtime.apiproxy_errors.DeadlineExceededError" )
def get_stats(handler, resolution, now, num_items, as_dict): """Wrapper calls that returns items for the specified resolution. Arguments: - handler: Instance of StatisticsFramework. - resolution: One of 'days', 'hours' or 'minutes' - now: datetime.datetime or None. - num_items: Maximum number of items to return ending at 'now'. - as_dict: When True, preprocess the entities to convert them to_dict(). If False, returns the raw objects that needs to be handled manually. """ mapping = { 'days': _get_days_keys, 'hours': _get_hours_keys, 'minutes': _get_minutes_keys, } keys = mapping[resolution](handler, now, num_items) if as_dict: return [ i.get_result() for i in _get_snapshot_as_dict_future(keys) if i.get_result() ] # Automatically skip missing entities. return [i for i in ndb.get_multi(keys) if i]
def test_success(self, taskqueue, users_len=3): pswd_users_emails = [mommy.save_one(PswdUserEmail, email='*****@*****.**' % i) for i in range(users_len)] pswd_users = [mommy.save_one(PswdUser) for i in range(users_len)] keys = [us.key for us in pswd_users] for e, u in izip(pswd_users_emails, pswd_users): CreateEmailUser(e, u)() def task_add_mock(url): cursor = router.to_handler(url)[1][0] tasks(cursor) taskqueue.add = task_add_mock tasks() self.assertIsNone(PswdUser.query().get()) main_users = ndb.get_multi(keys) for user, pswd_user_email, pswd_user in izip(main_users, pswd_users_emails, pswd_users): self.assertIsInstance(user, MainUser) self.assertEqual(pswd_user_email.email, user.email) self.assertEqual(pswd_user_email.email, user.name) self.assertEqual(pswd_user.creation, user.creation) self.assertListEqual([''], user.groups)
def SpecialQuery1(self, request): """Return all sessions user has in wishlist but not registered to attend""" prof = self._getProfileFromUser() # get user Profile # get session keys in user's wishlist sess_keys_wishlist = prof.sessionWishlist # get session keys of all conferences user is registered to conf_keys = [ndb.Key(urlsafe=ck) for ck in prof.conferenceKeysToAttend] confs = ndb.get_multi(conf_keys) sess_keys_registered = [] for conf in confs: conf_id = conf.key.id() ancestor_key = ndb.Key(Conference, conf_id) sessions = Session.query(ancestor=ancestor_key) for session in sessions: sess_keys_registered.append(session.key.urlsafe()) # get session keys of unregistered conferences sess_keys_unreg = [] for key in sess_keys_wishlist: if not key in sess_keys_registered: sess_keys_unreg.append(key) # convert urlsafe keys into ndb keys sess_keys = [] for value in sess_keys_unreg: key = ndb.Key(urlsafe=value) sess_keys.append(key) sessions = Session.query(ndb.OR(Session.key.IN(sess_keys))) # when sess_keys is empty, app still works in local host # returns error when deployed - not sure how to avoid this. return SessionForms( items=[self._copySessionToForm(sess) for sess in sessions])
def requests(self, request): """Returns tasks requests based on the filters. This endpoint is slightly slower than 'list'. Use 'list' or 'count' when possible. """ logging.debug('%s', request) if request.include_performance_stats: raise endpoints.BadRequestException( 'Can\'t set include_performance_stats for tasks/list') now = utils.utcnow() try: # Get the TaskResultSummary keys, then fetch the corresponding # TaskRequest entities. keys, cursor = datastore_utils.fetch_page( self._query_from_request(request), request.limit, request.cursor, keys_only=True) items = ndb.get_multi( task_pack.result_summary_key_to_request_key(k) for k in keys) except ValueError as e: raise endpoints.BadRequestException( 'Inappropriate filter for tasks/requests: %s' % e) except datastore_errors.NeedIndexError as e: logging.error('%s', e) raise endpoints.BadRequestException( 'Requires new index, ask admin to create one.') except datastore_errors.BadArgumentError as e: logging.error('%s', e) raise endpoints.BadRequestException( 'This combination is unsupported, sorry.') return swarming_rpcs.TaskRequests( cursor=cursor, items=[message_conversion.task_request_to_rpc(i) for i in items], now=now)
def testBisect_ExplicitOptOut(self): anomalies = [self._AddAnomaly(), self._AddAnomaly()] group = self._AddAlertGroup( anomalies[0], issue=self._issue_tracker.issue, status=alert_group.AlertGroup.Status.triaged, ) self._issue_tracker.issue.update({ 'state': 'open', 'labels': self._issue_tracker.issue.get('labels') + ['Chromeperf-Auto-BisectOptOut'] }) self._sheriff_config.patterns = { '*': [ subscription.Subscription(name='sheriff', auto_triage_enable=True, auto_bisect_enable=True) ], } w = alert_group_workflow.AlertGroupWorkflow( group.get(), sheriff_config=self._sheriff_config, issue_tracker=self._issue_tracker, pinpoint=self._pinpoint, crrev=self._crrev, ) self.assertIn('Chromeperf-Auto-BisectOptOut', self._issue_tracker.issue.get('labels')) w.Process(update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate( now=datetime.datetime.utcnow(), anomalies=ndb.get_multi(anomalies), issue=self._issue_tracker.issue, )) self.assertIsNone(self._pinpoint.new_job_request)
def queryConferences(self, request): """Query for conferences.""" conferences = self._getQuery(request) print "Here are the conferences: ", conferences for conf in conferences: print "here is the conf: ", conf.name, conf.organizerUserId # need to fetch organiser displayName from profiles # get all keys and use get_multi for speed organisers = [(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences] print "These are the organizers: ", organisers profiles = ndb.get_multi(organisers) print "These are the profiles: ", profiles # put display names in a dict for easier fetching names = {} for profile in profiles: print "Here is profiel: ", profile names[profile.key.id()] = profile.displayName # return individual ConferenceForm object per Conference return ConferenceForms( items=[self._copyConferenceToForm(conf, names[conf.organizerUserId]) for conf in \ conferences] )
def list_establishments_from_bounds(self, request): slippyTiles = get_tiles(ne_lat=request.bounds.ne.lat, ne_lon=request.bounds.ne.lon, sw_lat=request.bounds.sw.lat, sw_lon=request.bounds.sw.lon) # establishments_model is a Query object establishments_model = models.Establishment.query( models.Establishment.location.slippyPosition.IN(slippyTiles)) establishments = [] if establishments_model: keys = establishments_model.fetch(100, keys_only=True) if keys: # keys is not empty for establishment in ndb.get_multi(keys): establishments.append( Establishment( categories=establishment.categories, contact=Contact(formattedPhone=establishment. contact.formattedPhone, phone=establishment.contact.phone), location=Location( address=establishment.location.address, cc=establishment.location.cc, cityOrMunicipality=establishment.location. cityOrMunicipality, country=establishment.location.country, position=Position( lat=establishment.location.position.lat, lon=establishment.location.position.lon), slippyPosition=establishment.location. slippyPosition), name=establishment.name, url=establishment.url)) return ListEstablishmentsFromBoundsResponse( establishments=establishments)
def GetCounterCount(name): """Sums a cumulative value from all the shard counts for the given name. Args: name: The name of the counter. Returns: Integer; the cumulative count of all sharded counters for the given counter name. """ total = memcache.get(key=name) if total is not None: return total total = 0 all_keys = CounterShardConfig.AllKeys(name) for counter in ndb.get_multi(all_keys): if counter is not None: total += counter.count if memcache.add(key=name, value=total, time=_COUNTER_MEMCACHE_EXPIRATION_S): return total raise recall_errors.MessageRecallCounterError( 'Unexpected problem adding to memcache: %s.' % name)
def testBisect_GroupTriaged_PinpointFailed(self): anomalies = [self._AddAnomaly(), self._AddAnomaly()] group = self._AddAlertGroup( anomalies[0], issue=self._issue_tracker.issue, status=alert_group.AlertGroup.Status.triaged, ) self._issue_tracker.issue.update({ 'state': 'open', }) self._pinpoint.SetFailure() self._sheriff_config.patterns = { '*': [ subscription.Subscription( name='sheriff', auto_triage_enable=True, auto_bisect_enable=True) ], } w = alert_group_workflow.AlertGroupWorkflow( group.get(), sheriff_config=self._sheriff_config, issue_tracker=self._issue_tracker, pinpoint=self._pinpoint, crrev=self._crrev, ) w.Process( update=alert_group_workflow.AlertGroupWorkflow.GroupUpdate( now=datetime.datetime.utcnow(), anomalies=ndb.get_multi(anomalies), issue=self._issue_tracker.issue, )) self.assertEqual(alert_group.AlertGroup.Status.bisected, group.get().status) self.assertEqual([], group.get().bisection_ids) self.assertEqual(['Chromeperf-Auto-NeedsAttention'], self._issue_tracker.add_comment_kwargs['labels'])
def attach_tags(self, package_name, instance_id, tags, caller, now=None): """Adds a bunch of tags to an existing package instance. Idempotent. Skips existing tags. Package instance must exist and must have all processors successfully finished. Args: package_name: name of the package, e.g. 'infra/tools/cipd'. instance_id: identifier of the package instance (SHA1 of package file). tags: list of strings with tags to attach. caller: auth.Identity that issued the request. now: datetime when the request was made (or None for current time). Returns: {tag: corresponding InstanceTag (just created or existing one)}. """ assert tags and all(is_valid_instance_tag(tag) for tag in tags), tags self._assert_instance_is_ready(package_name, instance_id) # Grab info about existing tags, register new ones. now = now or utils.utcnow() existing = ndb.get_multi( instance_tag_key(package_name, instance_id, tag) for tag in tags) to_create = [ InstanceTag(key=instance_tag_key(package_name, instance_id, tag), tag=tag, registered_by=caller, registered_ts=now) for tag, ent in zip(tags, existing) if not ent ] ndb.put_multi(to_create) attached = {} attached.update({e.tag: e for e in existing if e}) attached.update({e.tag: e for e in to_create}) return attached
def get_conference_speakers(self, websafe_conference_key): """Given a conference, get the list of all speakers. Args: websafe_conference_key (string) Returns: SpeakerForms """ # Get Conference object conference = self.get_conference(websafe_conference_key) # Get all sessions for the conference sessions = Session.query(ancestor = conference.key).fetch() # Only use valid keys and ignore duplicates speaker_keys = set([s.speakerKey for s in sessions if s.speakerKey]) # Get the speakers speakers = ndb.get_multi(speaker_keys) return SpeakerForms( items = [s.to_form() for s in speakers] )
def get(self): # The year we want to display championships for. We should update this # once we're ready to start announcing the next year's championships. year = 2018 championships = Championship.query(ndb.AND(Championship.year == year, Championship.region != None)).fetch() competitions = ndb.get_multi([c.competition for c in championships]) states = State.query().fetch() regions = Region.query().order(Region.name).fetch() championships.sort(key=lambda championship: championship.competition.get().start_date) championship_regions = [championship.region for championship in championships] regions_missing_championships = [ region for region in regions if region.key not in championship_regions] template = JINJA_ENVIRONMENT.get_template('regional.html') self.response.write(template.render({ 'c': common.Common(self), 'year': year, 'championships': championships, 'regions_missing_championships': regions_missing_championships, }))
def to_dict(self): company = CompanyNew.get_by_id(self.company_id) return { 'active': self.active, 'venueId': self.key.id(), 'name': self.name, 'address': self.address, 'latitude': self.location.lat, 'longitude': self.location.lon, 'logoUrl': '', 'phone': self.phone, 'payment_types': [x.to_dict() for x in ndb.get_multi(company.payment_types)], 'schedule': self.schedule or company.schedule, }
def getPlayerGames(self, request): """ This returns all games a player has signed up for but has not completed, i.e., those in the gamesInProgress list. """ if request.player_name is None: raise endpoints.NotFoundException('You did not enter player_name') else: player = Player.query( Player.displayName == request.player_name).get() if player is None: raise endpoints.NotFoundException( 'No player by the name of {}'.format(request.player_name)) else: game_keys = getattr(player, 'gamesInProgress') key_objects = [ndb.Key(urlsafe=key) for key in game_keys] # Use get_multi(array_of_keys) to fetch all games at once. games = ndb.get_multi(key_objects) if not games: raise endpoints.NotFoundException( 'Not a single active game is this player {}'.format( request.player_name)) return GamesForm( items=[game._copyGameToForm for game in games if game])
def _render(self, *args, **kw): year = datetime.datetime.now().year event_keys = Event.query( Event.year == year, Event.event_type_enum.IN(EventType.CMP_EVENT_TYPES)).fetch( 100, keys_only=True) events = [event_key.get() for event_key in event_keys] self.template_values.update({ "events": events, "year": year, }) insights = ndb.get_multi([ ndb.Key(Insight, Insight.renderKeyName(year, insight_name)) for insight_name in Insight.INSIGHT_NAMES.values() ]) for insight in insights: if insight: self.template_values[insight.name] = insight path = os.path.join(os.path.dirname(__file__), '../templates/index_champs.html') return template.render(path, self.template_values)
def get_snapshots_metadata(cls, model_instance_id, version_numbers, allow_deleted=False): """Gets a list of dicts, each representing a model snapshot. One dict is returned for each version number in the list of version numbers requested. If any of the version numbers does not exist, an error is raised. Args: model_instance_id: str. Id of requested model. version_numbers: list(int). List of version numbers. allow_deleted: bool. If is False, an error is raised if the current model has been deleted. Default is False. Returns: list(dict). Each dict contains metadata for a particular snapshot. It has the following keys: committer_id: str. The user_id of the user who committed the change. commit_message: str. commit_cmds: list(dict). A list of commands, describing changes made in this model, should give sufficient information to reconstruct the commit. Dict always contains: cmd: str. Unique command. And then additional arguments for that command. For example: {'cmd': 'AUTO_revert_version_number' 'version_number': 4} commit_type: str. Unique identifier of commit type. Possible values are in COMMIT_TYPE_CHOICES. version_number: int. created_on_ms: float. Snapshot creation time in milliseconds since the Epoch. Raises: Exception: There is no model instance corresponding to at least one of the given version numbers. """ # pylint: disable=protected-access if not allow_deleted: cls.get(model_instance_id)._require_not_marked_deleted() snapshot_ids = [ cls._get_snapshot_id(model_instance_id, version_number) for version_number in version_numbers ] # pylint: enable=protected-access metadata_keys = [ ndb.Key(cls.SNAPSHOT_METADATA_CLASS, snapshot_id) for snapshot_id in snapshot_ids ] returned_models = ndb.get_multi(metadata_keys) for ind, model in enumerate(returned_models): if model is None: raise Exception( 'Invalid version number %s for model %s with id %s' % (version_numbers[ind], cls.__name__, model_instance_id)) return [{ 'committer_id': model.committer_id, 'commit_message': model.commit_message, 'commit_cmds': model.commit_cmds, 'commit_type': model.commit_type, 'version_number': version_numbers[ind], 'created_on_ms': utils.get_time_in_millisecs(model.created_on), } for (ind, model) in enumerate(returned_models)]