def test_order_by_with_or_filter(dispose_of): """ Checking to make sure ordering is preserved when merging different results sets. """ class SomeKind(ndb.Model): foo = ndb.IntegerProperty() bar = ndb.StringProperty() @ndb.toplevel def make_entities(): keys = yield ( SomeKind(foo=0, bar="a").put_async(), SomeKind(foo=1, bar="b").put_async(), SomeKind(foo=2, bar="a").put_async(), SomeKind(foo=3, bar="b").put_async(), ) for key in keys: dispose_of(key._key) make_entities() query = SomeKind.query(ndb.OR(SomeKind.bar == "a", SomeKind.bar == "b")) query = query.order(SomeKind.foo) results = eventually(query.fetch, _length_equals(4)) assert [entity.foo for entity in results] == [0, 1, 2, 3]
def test_offset_and_limit_with_or_filter(dispose_of): class SomeKind(ndb.Model): foo = ndb.IntegerProperty() bar = ndb.StringProperty() @ndb.toplevel def make_entities(): keys = yield ( SomeKind(foo=0, bar="a").put_async(), SomeKind(foo=1, bar="b").put_async(), SomeKind(foo=2, bar="a").put_async(), SomeKind(foo=3, bar="b").put_async(), SomeKind(foo=4, bar="a").put_async(), SomeKind(foo=5, bar="b").put_async(), ) for key in keys: dispose_of(key._key) make_entities() eventually(SomeKind.query().fetch, _length_equals(6)) query = SomeKind.query(ndb.OR(SomeKind.bar == "a", SomeKind.bar == "b")) query = query.order(SomeKind.foo) results = query.fetch(offset=1, limit=2) assert [entity.foo for entity in results] == [1, 2]
def top_lovers_and_lovees(utc_week_start, dept=None, limit=20): """Synchronously return a list of (employee key, sent love count) and a list of (employee key, received love count), each sorted in descending order of love sent or received. """ sent_query = LoveCount.query(LoveCount.week_start == utc_week_start) if dept: sent_query = sent_query.filter( ndb.OR(LoveCount.meta_department == dept, LoveCount.department == dept)) sent = sent_query.order(-LoveCount.sent_count).fetch() lovers = [] for c in sent: if len(lovers) == limit: break if c.sent_count == 0: continue employee_key = c.key.parent() lovers.append((employee_key, c.sent_count)) received = sorted(sent, key=lambda c: c.received_count, reverse=True) lovees = [] for c in received: if len(lovees) == limit: break if c.received_count == 0: continue employee_key = c.key.parent() lovees.append((employee_key, c.received_count)) return (lovers, lovees)
def list_queue(status="all", return_as_string=True): if type(status) == list: query = Queue.query( ndb.OR(Queue.status == status[0], Queue.status == status[1])) elif status != "all": query = Queue.query(Queue.status == status) else: query = Queue.query() if return_as_string: queue = {} for res in query.order(Queue.updated).fetch(): if res.hash_item not in queue: queue[res.hash_item] = {} queue[res.hash_item]['hash_item'] = res.hash_item queue[res.hash_item]['description'] = res.description queue[res.hash_item]['method'] = res.method queue[res.hash_item]['url'] = res.url queue[res.hash_item]['payload'] = res.payload queue[res.hash_item]['created'] = res.created queue[res.hash_item]['updated'] = res.updated queue[res.hash_item]['cache_name'] = res.cache_name queue[ res.hash_item]['cache_is_unique'] = res.cache_is_unique queue[res.hash_item]['status'] = res.status queue[res.hash_item]['response'] = res.response queue[res.hash_item]['retry_count'] = res.retry_count queue[res. hash_item]['backoff_duration'] = res.backoff_duration queue[res.hash_item]['count'] = 1 else: queue[res.hash_item]['count'] += 1 return queue else: return query.order(Queue.updated).fetch()
def any_of(*nodes: ndb.Node) -> ndb.Node: """Returns a query node which performs a boolean OR on their conditions. Args: *nodes: datastore_services.Node. The nodes to combine. Returns: datastore_services.Node. A node combining the conditions using boolean OR. """ return ndb.OR(*nodes)
def get(self, source_short_name, id): cls = models.sources[source_short_name] self.source = cls.lookup(id) if not self.source: key = cls.query(ndb.OR(*[ndb.GenericProperty(prop) == id for prop in ('domains', 'inferred_username', 'name', 'username')]) ).get(keys_only=True) if key: return self.redirect(cls(key=key).bridgy_path(), permanent=True) if self.source and self.source.features: self.source.verify() self.source = self.preprocess_source(self.source) else: self.response.status_int = 404 super(UserHandler, self).get()
def edit_users_table(filter_text=''): with client.context(): me = auth.user() if not me or not me.HasAnyRole(Roles.AdminRoles()): abort(403) if filter_text: users_to_show = User.query(ndb.OR( User.name == filter_text, User.city == filter_text, User.wca_person == ndb.Key(Person, filter_text)), order_by=[User.name]).fetch(30) else: users_to_show = User.query(order_by=[User.name]).fetch(30) return render_template('admin/edit_users_table.html', c=Common(), users=users_to_show)
def test_filter_or(dispose_of): class SomeKind(ndb.Model): foo = ndb.IntegerProperty() bar = ndb.StringProperty() @ndb.toplevel def make_entities(): keys = yield ( SomeKind(foo=1, bar="a").put_async(), SomeKind(foo=2, bar="b").put_async(), SomeKind(foo=1, bar="c").put_async(), ) for key in keys: dispose_of(key._key) make_entities() eventually(SomeKind.query().fetch, _length_equals(3)) query = SomeKind.query(ndb.OR(SomeKind.foo == 1, SomeKind.bar == "c")) results = query.fetch() results = sorted(results, key=operator.attrgetter("bar")) assert [entity.bar for entity in results] == ["a", "c"]
def test_filter_or(dispose_of): class SomeKind(ndb.Model): foo = ndb.IntegerProperty() bar = ndb.StringProperty() @ndb.tasklet def make_entities(): keys = yield ( SomeKind(foo=1, bar="a").put_async(), SomeKind(foo=2, bar="b").put_async(), SomeKind(foo=1, bar="c").put_async(), ) for key in keys: dispose_of(key._key) make_entities().check_success() query = SomeKind.query(ndb.OR(SomeKind.foo == 1, SomeKind.bar == "c")) results = query.fetch() assert len(results) == 2 results = sorted(results, key=operator.attrgetter("bar")) assert [entity.bar for entity in results] == ["a", "c"]
def process_request(self): task_id = 'json-requests:CheckIfUserExists:process_request' debug_data = [] return_msg = task_id + ": " exists = False # input validation email_address = unicode( self.request.get(TaskArguments.s5t3_email_address, "")) or None phone_number = unicode( self.request.get(TaskArguments.s5t3_phone_number, "")) or None call_result = self.ruleCheck([ [email_address, PostDataRules.optional_name], [phone_number, PostDataRules.optional_name], ]) debug_data.append(call_result) if call_result[RDK.success] != RC.success: return_msg += "input validation failed" return { RDK.success: RC.input_validation_failed, RDK.return_msg: return_msg, RDK.debug_data: debug_data, 'exists': exists } if not (email_address or phone_number): return_msg += "Email address or phone number must be specified" return { RDK.success: RC.input_validation_failed, RDK.return_msg: return_msg, RDK.debug_data: debug_data, 'exists': exists } #</end> input validation if email_address and phone_number: user_query = Datastores.users.query( ndb.OR( Datastores.users.phone_1 == phone_number, Datastores.users.phone_2 == phone_number, Datastores.users.email_address == email_address, )) elif email_address: user_query = Datastores.users.query( Datastores.users.email_address == email_address) else: user_query = Datastores.users.query( ndb.OR( Datastores.users.phone_1 == phone_number, Datastores.users.phone_2 == phone_number, )) call_result = DSF.kfetch(user_query) debug_data.append(call_result) if call_result[RDK.success] != RC.success: return_msg += "Failed to load users from datastore" return { RDK.success: RC.input_validation_failed, RDK.return_msg: return_msg, RDK.debug_data: debug_data, 'exists': exists, } users = call_result['fetch_result'] if users: exists = True return { RDK.success: RC.success, RDK.return_msg: return_msg, RDK.debug_data: debug_data, 'exists': exists }
def user(site, id): """View for a user page.""" cls = models.sources.get(site) if not cls: return render_template('user_not_found.html'), 404 source = cls.lookup(id) if not source: key = cls.query( ndb.OR(*[ ndb.GenericProperty(prop) == id for prop in ('domains', 'inferred_username', 'name', 'username') ])).get(keys_only=True) if key: return redirect(cls(key=key).bridgy_path(), code=301) if not source or not source.features: return render_template('user_not_found.html'), 404 source.verify() source = util.preprocess_source(source) vars = { 'source': source, 'logs': logs, 'REFETCH_HFEED_TRIGGER': models.REFETCH_HFEED_TRIGGER, 'RECENT_PRIVATE_POSTS_THRESHOLD': RECENT_PRIVATE_POSTS_THRESHOLD, } # Blog webmention promos if 'webmention' not in source.features: if source.SHORT_NAME in ('blogger', 'medium', 'tumblr', 'wordpress'): vars[source.SHORT_NAME + '_promo'] = True else: for domain in source.domains: if ('.blogspot.' in domain and # Blogger uses country TLDs not Blogger.query(Blogger.domains == domain).get()): vars['blogger_promo'] = True elif (util.domain_or_parent_in(domain, ['tumblr.com']) and not Tumblr.query(Tumblr.domains == domain).get()): vars['tumblr_promo'] = True elif (util.domain_or_parent_in(domain, 'wordpress.com') and not WordPress.query(WordPress.domains == domain).get()): vars['wordpress_promo'] = True # Responses if 'listen' in source.features or 'email' in source.features: vars['responses'] = [] query = Response.query().filter(Response.source == source.key) # if there's a paging param (responses_before or responses_after), update # query with it def get_paging_param(param): val = request.values.get(param) try: return util.parse_iso8601(val.replace(' ', '+')) if val else None except BaseException: error(f"Couldn't parse {param}, {val!r} as ISO8601") before = get_paging_param('responses_before') after = get_paging_param('responses_after') if before and after: error("can't handle both responses_before and responses_after") elif after: query = query.filter(Response.updated > after).order( Response.updated) elif before: query = query.filter( Response.updated < before).order(-Response.updated) else: query = query.order(-Response.updated) query_iter = query.iter() for i, r in enumerate(query_iter): r.response = json_loads(r.response_json) r.activities = [json_loads(a) for a in r.activities_json] if (not source.is_activity_public(r.response) or not all( source.is_activity_public(a) for a in r.activities)): continue elif r.type == 'post': r.activities = [] verb = r.response.get('verb') r.actor = (r.response.get('object') if verb == 'invite' else r.response.get('author') or r.response.get('actor')) or {} activity_content = '' for a in r.activities + [r.response]: if not a.get('content'): obj = a.get('object', {}) a['content'] = activity_content = ( obj.get('content') or obj.get('displayName') or # historical, from a Reddit bug fixed in granary@4f9df7c obj.get('name') or '') response_content = r.response.get('content') phrases = { 'like': 'liked this', 'repost': 'reposted this', 'rsvp-yes': 'is attending', 'rsvp-no': 'is not attending', 'rsvp-maybe': 'might attend', 'rsvp-interested': 'is interested', 'invite': 'is invited', } phrase = phrases.get(r.type) or phrases.get(verb) if phrase and (r.type != 'repost' or activity_content.startswith(response_content)): r.response[ 'content'] = f'{r.actor.get("displayName") or ""} {phrase}.' # convert image URL to https if we're serving over SSL image_url = r.actor.setdefault('image', {}).get('url') if image_url: r.actor['image']['url'] = util.update_scheme( image_url, request) # generate original post links r.links = process_webmention_links(r) r.original_links = [ util.pretty_link(url, new_tab=True) for url in r.original_posts ] vars['responses'].append(r) if len(vars['responses']) >= 10 or i > 200: break vars['responses'].sort(key=lambda r: r.updated, reverse=True) # calculate new paging param(s) new_after = (before if before else vars['responses'][0].updated if vars['responses'] and query_iter.probably_has_next() and (before or after) else None) if new_after: vars[ 'responses_after_link'] = f'?responses_after={new_after.isoformat()}#responses' new_before = (after if after else vars['responses'][-1].updated if vars['responses'] and query_iter.probably_has_next() else None) if new_before: vars[ 'responses_before_link'] = f'?responses_before={new_before.isoformat()}#responses' vars['next_poll'] = max( source.last_poll_attempt + source.poll_period(), # lower bound is 1 minute from now util.now_fn() + datetime.timedelta(seconds=90)) # Publishes if 'publish' in source.features: publishes = Publish.query().filter(Publish.source == source.key)\ .order(-Publish.updated)\ .fetch(10) for p in publishes: p.pretty_page = util.pretty_link( p.key.parent().id(), attrs={'class': 'original-post u-url u-name'}, new_tab=True) vars['publishes'] = publishes if 'webmention' in source.features: # Blog posts blogposts = BlogPost.query().filter(BlogPost.source == source.key)\ .order(-BlogPost.created)\ .fetch(10) for b in blogposts: b.links = process_webmention_links(b) try: text = b.feed_item.get('title') except ValueError: text = None b.pretty_url = util.pretty_link( b.key.id(), text=text, attrs={'class': 'original-post u-url u-name'}, max_length=40, new_tab=True) # Blog webmentions webmentions = BlogWebmention.query()\ .filter(BlogWebmention.source == source.key)\ .order(-BlogWebmention.updated)\ .fetch(10) for w in webmentions: w.pretty_source = util.pretty_link( w.source_url(), attrs={'class': 'original-post'}, new_tab=True) try: target_is_source = (urllib.parse.urlparse( w.target_url()).netloc in source.domains) except BaseException: target_is_source = False w.pretty_target = util.pretty_link( w.target_url(), attrs={'class': 'original-post'}, new_tab=True, keep_host=target_is_source) vars.update({'blogposts': blogposts, 'webmentions': webmentions}) return render_template(f'{source.SHORT_NAME}_user.html', **vars)