def test_delete_multi_with_transactional(dispose_of): """Regression test for issue #271 https://github.com/googleapis/python-ndb/issues/271 """ N = 10 class SomeKind(ndb.Model): foo = ndb.IntegerProperty() @ndb.transactional() def delete_them(entities): ndb.delete_multi([entity.key for entity in entities]) foos = list(range(N)) entities = [SomeKind(foo=foo) for foo in foos] keys = ndb.put_multi(entities) dispose_of(*(key._key for key in keys)) entities = ndb.get_multi(keys) assert [entity.foo for entity in entities] == foos assert delete_them(entities) is None entities = ndb.get_multi(keys) assert entities == [None] * N
def post(self): entities = ndb.get_multi( ndb.Key(urlsafe=u) for u in self.request.params.getall('key')) for e in entities: e.status = 'complete' ndb.put_multi(entities) self.redirect('/admin/responses')
def _get_bugs(bug_ids, to_response=bug_to_response): """Get bugs from bug ids.""" bugs = ndb.get_multi([ndb.Key(osv.Bug, bug_id) for bug_id in bug_ids]) return [ to_response(bug) for bug in bugs if bug and bug.status == osv.BugStatus.PROCESSED ]
def regional(): with client.context(): year = datetime.date.today().year championships = Championship.query( ndb.AND(Championship.year == year, Championship.region != None)).fetch() competitions = ndb.get_multi([c.competition for c in championships]) states = State.query().fetch() regions = Region.query().order(Region.name).fetch() championships.sort( key=lambda championship: championship.competition.get().start_date) championship_regions = [ championship.region for championship in championships ] regions_missing_championships = [ region for region in regions if region.key not in championship_regions ] return render_template( 'regional.html', c=common.Common(wca_disclaimer=True), year=year, championships=championships, regions_missing_championships=regions_missing_championships)
def mark_complete(): entities = ndb.get_multi(ndb.Key(urlsafe=u) for u in request.values.getlist('key')) for e in entities: e.status = 'complete' ndb.put_multi(entities) return util.redirect('/admin/responses')
def bulk_dereference(entities: List[ArticleEntity]) -> List[ArticleEntity]: # Step 1: Collect Ids entity_map = {} return_entities = [] for e in entities: setattr(e, LEGACY_IMAGE_PROP, None) # Legacy image - it is a ndb key property if e.primary_media_image: entity_map[e.primary_media_image] = None # Step 2: Fetch all of the entities we want to deref subentities = ndb.get_multi(entity_map.keys()) for se in subentities: if (se): entity_map[se.key] = se # Step 3: Iterate over posts and link up the dereferenced props for e in entities: if e.primary_media_image: se = entity_map.get(e.primary_media_image, None) setattr(e, LEGACY_IMAGE_PROP, se) return_entities.append(e) return return_entities
def fetch_multiple_entities_by_ids_and_models( ids_and_models # type: List[Tuple[Text, List[Text]]] ): # type: (...) -> List[List[Optional[TYPE_MODEL_SUBCLASS]]] """Fetches the entities from the datastore corresponding to the given ids and models. Args: ids_and_models: list(tuple(str, list(str))). The ids and their corresponding model names for which we have to fetch entities. Returns: list(list(datastore_services.Model)). The model instances corresponding to the ids and models. The models corresponding to the same tuple in the input are grouped together. """ entity_keys = [] # type: List[Key] for (model_name, entity_ids) in ids_and_models: # Add the keys to the list of keys whose entities we have to fetch. entity_keys = ( entity_keys + [ndb.Key(model_name, entity_id) for entity_id in entity_ids]) all_models = ndb.get_multi( entity_keys) # type: List[Optional[TYPE_MODEL_SUBCLASS]] all_models_grouped_by_model_type = [ ] # type: List[List[Optional[TYPE_MODEL_SUBCLASS]]] start_index = 0 for (_, entity_ids) in ids_and_models: all_models_grouped_by_model_type.append( all_models[start_index:start_index + len(entity_ids)]) start_index = start_index + len(entity_ids) return all_models_grouped_by_model_type
def check_token_for_actor(self, actor): """Checks that the given actor is public and matches the request's token. Raises: :class:`HTTPException` with HTTP 400 """ if not actor: self.abort(400, f'Missing actor!') if not gr_source.Source.is_public(actor): self.abort( 400, f'Your {self.gr_source().NAME} account is private. Bridgy only supports public accounts.' ) token = util.get_required_param(self, 'token') domains = set( util.domain_from_link(util.replace_test_domains_with_localhost(u)) for u in microformats2.object_urls(actor)) domains.discard(self.source_class().GR_CLASS.DOMAIN) logging.info(f'Checking token against domains {domains}') for domain in ndb.get_multi(ndb.Key(Domain, d) for d in domains): if domain and token in domain.tokens: return self.abort(403, f'Token {token} is not authorized for any of: {domains}')
def get_multi(keys): """Get multiple entities, working around a limitation in the NDB library with the maximum number of keys allowed.""" result = [] for chunk in _gen_chunks(keys, _GET_BATCH_SIZE): result.extend(ndb.get_multi(chunk)) return result
def get_entities(*keys): entities = [] for entity in ndb.get_multi(keys): entities.append(entity) if isinstance(entity, SomeKind): entities.extend(get_foos(entity)) return entities
def get_multi(keys: List[Key]) -> List[Optional[TYPE_MODEL_SUBCLASS]]: """Fetches models corresponding to a sequence of keys. Args: keys: list(str). The keys to look up. Returns: list(datastore_services.Model | None). List whose items are either a Model instance or None if the corresponding key wasn't found. """ return ndb.get_multi(keys)
def test_multi_with_lots_of_keys(dispose_of): """Regression test for issue #318. https://github.com/googleapis/python-ndb/issues/318 """ N = 1001 class SomeKind(ndb.Model): foo = ndb.IntegerProperty() foos = list(range(N)) entities = [SomeKind(foo=foo) for foo in foos] keys = ndb.put_multi(entities) dispose_of(*(key._key for key in keys)) assert len(keys) == N entities = ndb.get_multi(keys) assert [entity.foo for entity in entities] == foos ndb.delete_multi(keys) entities = ndb.get_multi(keys) assert entities == [None] * N
def test_multi_get_weirdness_with_redis(dispose_of): """Regression test for issue #294. https://github.com/googleapis/python-ndb/issues/294 """ class SomeKind(ndb.Model): foo = ndb.StringProperty() objects = [SomeKind(foo=str(i)) for i in range(10)] keys = ndb.put_multi(objects) for key in keys: dispose_of(key._key) ndb.get_multi(keys) one_object = random.choice(keys).get() one_object.foo = "CHANGED" one_object.put() objects_upd = ndb.get_multi(keys) keys_upd = [obj.key for obj in objects_upd] assert len(keys_upd) == len(keys) assert len(set(keys_upd)) == len(set(keys)) assert set(keys_upd) == set(keys)
def get_count(name): """Retrieve the value for a given sharded counter. Args: name: The name of the counter. Returns: Integer; the cumulative count of all sharded counters for the given counter name. """ total = memcache.get(name) if total is None: total = 0 all_keys = GeneralCounterShardConfig.all_keys(name) for counter in ndb.get_multi(all_keys): if counter is not None: total += counter.count memcache.add(name, total, 60) return total
def fetch_multiple_entities_by_ids_and_models( ids_and_models: List[Tuple[str, List[str]]] ) -> List[List[Optional[TYPE_MODEL_SUBCLASS]]]: """Fetches the entities from the datastore corresponding to the given ids and models. Args: ids_and_models: list(tuple(str, list(str))). The ids and their corresponding model names for which we have to fetch entities. Raises: Exception. Model names should not be duplicated in input list. Returns: list(list(datastore_services.Model)). The model instances corresponding to the ids and models. The models corresponding to the same tuple in the input are grouped together. """ entity_keys: List[Key] = [] model_names = [model_name for (model_name, _) in ids_and_models] if len(model_names) != len(list(set(model_names))): raise Exception('Model names should not be duplicated in input list.') for (model_name, entity_ids) in ids_and_models: # Add the keys to the list of keys whose entities we have to fetch. entity_keys = ( entity_keys + [ndb.Key(model_name, entity_id) for entity_id in entity_ids]) all_models: List[Optional[TYPE_MODEL_SUBCLASS]] = ndb.get_multi( entity_keys) all_models_grouped_by_model_type: List[List[ Optional[TYPE_MODEL_SUBCLASS]]] = [] start_index = 0 for (_, entity_ids) in ids_and_models: all_models_grouped_by_model_type.append( all_models[start_index:start_index + len(entity_ids)]) start_index = start_index + len(entity_ids) return all_models_grouped_by_model_type
def get_parts(head): # We could alternatively achieve this via an ancestor query (retrieving the # head and its parts simultaneously) to give us strong consistency. But the # downside of that is that it bypasses the automatic memcache layer built # into ndb, which we want to take advantage of. if head.numparts == 1: return [] logging.info("retrieving %d extra part(s)", head.numparts - 1) filename = head.key.string_id() keys = [ndb.Key('ProcessedFilePart', filename + ':' + str(i)) for i in range(1, head.numparts)] num_tries = 0 while True: num_tries += 1 if num_tries >= 10: logging.error("tried too many times, giving up") raise werkzeug.exceptions.InternalServerError() parts = ndb.get_multi(keys) if any(p.etag != head.etag for p in parts): logging.warn("got differing etags, retrying") else: return sorted(parts, key=lambda p: p.key.string_id())
def state_rankings_table(event_id, state_id, use_average): with client.context(): ranking_class = RankAverage if use_average == '1' else RankSingle state = State.get_by_id(state_id) if not state: self.response.write('Unrecognized state %s' % state_id) return event = Event.get_by_id(event_id) if not event: self.response.write('Unrecognized event %s' % event_id) return rankings = (ranking_class.query( ndb.AND(ranking_class.event == event.key, ranking_class.state == state.key)).order( ranking_class.best).fetch(100)) people = ndb.get_multi([ranking.person for ranking in rankings]) people_by_id = {person.key.id(): person for person in people} return render_template('state_rankings_table.html', c=common.Common(), is_average=(use_average == '1'), rankings=rankings, people_by_id=people_by_id)
def check_token_for_actor(self, actor): """Checks that the given actor is public and matches the request's token. Returns: actor, with 'url' field removed and 'urls' set to only the resolved, non-blocklisted urls Raises: :class:`HTTPException` with HTTP 403 """ if not actor: self.error('Scrape error: missing actor!') if not gr_source.Source.is_public(actor): self.error( f'Your {self.gr_source().NAME} account is private. Bridgy only supports public accounts.' ) token = request.values['token'] # create temporary source instance to get domains from actor src = self.source_class().new(actor=actor) if not src.domains: self.error( f'No usable web sites found in your {self.gr_source().NAME} profile. Add one of your registered domains above!' ) # update actor so resolved URLs can be reused actor.pop('url', None) actor['urls'] = [{'value': url} for url in src.domain_urls] logger.info(f'Checking token against domains {src.domains}') for domain in ndb.get_multi(ndb.Key(Domain, d) for d in src.domains): if domain and token in domain.tokens: return actor self.error( f'Found link(s) to {src.domains} in your {self.gr_source().NAME} profile. Add one of your registered domains above!' )
def operate_on_multiple_keys_at_once(list_of_entities): list_of_keys = ndb.put_multi(list_of_entities) list_of_entities = ndb.get_multi(list_of_keys) ndb.delete_multi(list_of_keys)
def get_foos(entity): return ndb.get_multi(entity.foos)