def get_all_addresses(limit, paginated=False, cursor_key=None): """ get all addresses in the email book :param limit: the limit on the number to return :param paginated: if we should return a key for pagination :param cursor_key: the key of the cursor if we are on a later page :return: json serialisable dict representing all the emailes """ if paginated and cursor_key: cursor = ndb.Cursor(urlsafe=cursor_key) else: cursor = ndb.Cursor() results, next_cursor, is_more = AddressEntry.query().fetch_page( limit, start_cursor=cursor) jdict = { "entries": [entry.to_json_dict() for entry in results], "is_more": is_more } if paginated: jdict["next"] = cursor.urlsafe() return jdict
def index(author_id=None): # Build the query. author, other_contribs = None, None if author_id: if author_id not in AUTHORS: return render_template('404.html'), 404 else: q = Story.query(Story.author_id == author_id) author = AUTHORS[author_id] else: q = Story.query() q = q.order(-Story.date) # Build the cursor. page = request.args.get('page', None) cursor = ndb.Cursor(urlsafe=page) if page else ndb.Cursor() posts, next_cursor, more = q.fetch_page(10, start_cursor=cursor) # XXX: TODO: Implement backwards cursors for "prev" buttons. prev_cursor=None other_contribs = [a for a in AUTHORS.values()] random.shuffle(other_contribs) return render_template('index.html', posts=posts, next_cursor=next_cursor, prev_cursor=None, more=more, author=author, author_id=author_id, other_contribs=other_contribs)
def get_pending_transactions(app_user, token_type, page_size, cursor): # type: (users.User, unicode, long, unicode) -> tuple[list[ThreeFoldPendingTransaction], ndb.Cursor, bool] if token_type: validate_token_type(token_type) return ThreeFoldPendingTransaction.list_by_user_and_token_type(app_user, token_type) \ .fetch_page(page_size, start_cursor=ndb.Cursor(urlsafe=cursor)) else: return ThreeFoldPendingTransaction.list_by_user(app_user) \ .fetch_page(page_size, start_cursor=ndb.Cursor(urlsafe=cursor))
def _fetch_page(self, query, page_size, start_cursor, predicate=None): assert query assert isinstance(page_size, int) assert start_cursor is None or isinstance(start_cursor, basestring) curs = None if start_cursor: try: curs = ndb.Cursor(urlsafe=start_cursor) except db.BadValueError as ex: msg = 'Bad cursor "%s": %s' % (start_cursor, ex) logging.warning(msg) raise errors.InvalidInputError(msg) query_iter = query.iter(start_cursor=curs, produce_cursors=True, batch_size=page_size) entities = [] for entity in query_iter: if predicate is None or predicate(entity): # pragma: no branch entities.append(entity) if len(entities) >= page_size: break next_cursor_str = None if query_iter.has_next(): next_cursor_str = query_iter.cursor_after().urlsafe() return entities, next_cursor_str
def connection_from_ndb_query(query, args=None, connection_type=None, edge_type=None, pageinfo_type=None, transform_edges=None, context=None, **kwargs): ''' A simple function that accepts an ndb Query and used ndb QueryIterator object(https://cloud.google.com/appengine/docs/python/ndb/queries#iterators) to returns a connection object for use in GraphQL. It uses array offsets as pagination, so pagination will only work if the array is static. ''' args = args or {} connection_type = connection_type or Connection edge_type = edge_type or Edge pageinfo_type = pageinfo_type or PageInfo full_args = dict(args, **kwargs) first = full_args.get('first') after = full_args.get('after') has_previous_page = bool(after) keys_only = full_args.get('keys_only', False) batch_size = full_args.get('batch_size', 20) page_size = first if first else full_args.get('page_size', 20) start_cursor = ndb.Cursor(urlsafe=after) if after else None ndb_iter = query.iter(produce_cursors=True, start_cursor=start_cursor, batch_size=batch_size, keys_only=keys_only, projection=query.projection) edges = [] while len(edges) < page_size: missing_edges_count = page_size - len(edges) edges_page = generate_edges_page(ndb_iter, missing_edges_count, keys_only, edge_type) edges.extend( transform_edges(edges_page, args, context ) if transform_edges else edges_page) if len(edges_page) < missing_edges_count: break try: end_cursor = ndb_iter.cursor_after().urlsafe() except BadArgumentError: end_cursor = None # Construct the connection return connection_type( edges=edges, page_info=pageinfo_type( start_cursor=start_cursor.urlsafe() if start_cursor else '', end_cursor=end_cursor, has_previous_page=has_previous_page, has_next_page=ndb_iter.has_next()))
def get(self): self.response.headers['Content-Type'] = 'application/json' cursor = self.request.get("cursor", None) if cursor: cursor = ndb.Cursor(urlsafe=cursor) members, next_cursor, more = models.LinkingMember.query().fetch_page( 10, start_cursor=cursor) result = {} result["cursor"] = next_cursor.urlsafe() if next_cursor else None result["more"] = more result["data"] = [] for x in xrange(len(members)): member = members[x] if member.idnumber: d = {} d["title"] = member.title d["first_name"] = member.firstName d["last_name"] = member.lastname d["id_number"] = member.idnumber d["cellphone"] = member.cellPhoneNumber d["reference_number"] = member.referenceNumber d["reffered_by_name"] = member.referrerfirstname d["reffered_by_surname"] = member.referrerlastname d["reffered_by_contact"] = member.referrerContact d["reffered_by_pastors_name"] = member.church_id.get( ).pasterName if member.church_id else None d["reffered_by_pastors_church_name"] = member.church_id.get( ).churchName if member.church_id else None d["reffered_by_pastors_address"] = member.church_id.get( ).address if member.church_id else None result["data"].append(d) self.response.out.write(json.dumps(result))
def users(): if not current_user.key.id() in settings.ADMINS: raise flask.abort(httplib.NOT_FOUND) cursor = flask.request.args.get('cursor', None) cursor = cursor and ndb.Cursor(urlsafe=cursor) or None reverse = flask.request.args.get('reverse', False) cursor_prev, cursor_next = cursor and cursor.reversed(), cursor query = User.query() f_order = -User.date_created r_order = User.date_created limit = 20 if reverse: cursor_prev, cursor_next = cursor_next, cursor_prev users, cursor_prev, more = query.order( r_order).fetch_page(limit, start_cursor=cursor_prev) users.reverse() cursor_prev = None if not more else cursor_prev.urlsafe() else: users, cursor_next, more = query.order( f_order).fetch_page(limit, start_cursor=cursor_next) cursor_next = None if not more else cursor_next.urlsafe() raise ndb.Return(flask.render_template('backoffice_users.html', users=users, cursor_prev=cursor_prev, cursor_next=cursor_next ))
def list_flow_runs(cursor, page_size, flow_name, start_date): start_date = start_date and dateutil.parser.parse( start_date.replace('Z', '')) if start_date: qry = FlowRun.list_by_start_date(start_date) elif flow_name: qry = FlowRun.list_by_flow_name(flow_name) else: qry = FlowRun.list() return qry.fetch_page(page_size, start_cursor=ndb.Cursor(urlsafe=cursor))
def paginate(cls, *args, **kwargs): query = cls.build_query(*args, **kwargs) op = {} if 'cursor' in kwargs: op['start_cursor'] = ndb.Cursor(urlsafe=kwargs['cursor']) query_option = ndb.QueryOptions(**op) page_size = kwargs['per_page'] if 'per_page' in kwargs else 10 results, cursor, more = query.fetch_page(page_size, options=query_option) return PaginateModel(results, cursor, query_option.start_cursor, more)
def get(self): user = users.get_current_user() nickname = "" if user: url = users.create_logout_url(self.request.uri) nickname = user.nickname() else: url = users.create_login_url(self.request.uri) questions_query = Question.query().order(-Question.lastUpdateDate) cursor = ndb.Cursor(urlsafe=self.request.get('cursor')) questions, next_curs, more = questions_query.fetch_page( 10, start_cursor=cursor) if more: next_c = next_curs.urlsafe() else: next_c = None for question in questions: canVoteUp = True canVoteDown = True totalVotes = 0 if user: for vote in question.votes: totalVotes += vote.direction if vote.user == user and vote.direction == 1: canVoteUp = False if vote.user == user and vote.direction == -1: canVoteDown = False else: canVoteUp = False canVoteDown = False question.totalVotes = totalVotes question.canVoteDown = canVoteDown question.canVoteUp = canVoteUp template_values = { 'user': user, 'nickname': nickname, 'questions': questions, 'url': url, 'cursor': next_c } template = JINJA_ENVIRONMENT.get_template('index.html') self.response.write(template.render(template_values))
def get(self): bmq = Bookmarks.query(Bookmarks.shared == True, Bookmarks.trashed == False) bmq = bmq.filter(Bookmarks.user != users.get_current_user()) bmq = bmq.order(Bookmarks.user, -Bookmarks.data, Bookmarks._key) c = ndb.Cursor(urlsafe=self.request.get('c')) bms, next_curs, more = bmq.fetch_page(10, start_cursor=c) if more: next_c = next_curs.urlsafe() else: next_c = None self.response.set_cookie('active-tab', 'stream') self.generate('public.html', {'bms' : bms, 'c': next_c })
def get(self): bmq = Bookmarks.query(Bookmarks.user == users.get_current_user()) bmq = bmq.filter(Bookmarks.trashed == True) bmq = bmq.order(-Bookmarks.data) c = ndb.Cursor(urlsafe=self.request.get('c')) bms, next_curs, more = bmq.fetch_page(10, start_cursor=c) if more: next_c = next_curs.urlsafe() else: next_c = None self.response.set_cookie('active-tab', 'trash') self.generate('home.html', {'bms' : bms, 'c': next_c })
def cursor(cursor): """Verifies if given string is valid ndb query cursor if so returns instance of it Args: cursor (string): Url encoded ndb query cursor Returns: google.appengine.datastore.datastore_query.Cursor: ndb query cursor Raises: ValueError: If captcha is incorrect """ if not cursor: return None return ndb.Cursor(urlsafe=cursor)
def build(self, bmq, page, method): c = ndb.Cursor(urlsafe=self.request.get('c')) bms, next_curs, more = bmq.fetch_page(10, start_cursor=c) if more: next_c = next_curs.urlsafe() else: next_c = None if method == 'get': self.response.set_cookie('active-tab', page) self.generate('home.html', {'bms': bms, 'c': next_c }) else: values = {'bms': bms, 'c': next_c, "ui": self.ui() } template = jinja_environment.get_template('frame.html') self.response.write(template.render(values))
def get(self): cursor_value = self.request.get('cursor') cursor = ndb.Cursor(urlsafe=cursor_value) project, next_cursor, more = NDBProject.query().fetch_page( 1, start_cursor=cursor) if len(project) == 1: output = { 'project': project[0].name, 'next': next_cursor.urlsafe() } else: output = {'project': None, 'next': None} self.response.headers['Content-Type'] = "application/json" self.response.out.write(json.dumps(output))
def total_comment(event_key, cursor=None, batch_size=50): """count message of this event""" from .message import Message query = Message.query().order(Message.key) query = query.filter(Message.event == event_key, Message.message_type == 'c') start_cursor = ndb.Cursor(urlsafe=cursor) return query.fetch_page(batch_size, start_cursor=start_cursor, keys_only=True)
def list(cls, more_cursor=None, is_reverse=False): qry = cls.query().order(-cls.created) cursor = ndb.Cursor(urlsafe=more_cursor) # Pagination previous if is_reverse: qry = cls.query().order(cls.created) cursor = cursor.reversed() # Limit, Start Cursor # You can read on my blog on how to handle # loading user ids to name asynchronously # since you will save your user data differently its simply showing ids return qry.fetch_page(5, start_cursor=None if not more_cursor else cursor)
def get_testruns(testname=None, statuses=None, cursorWS=None): lqry = TestRun.query() if testname: lqry = lqry.filter(TestRun.testname == testname) if statuses: lqry = lqry.filter(TestRun.status in statuses) lqry = lqry.order(-TestRun.started) lcursor = ndb.Cursor(urlsafe=cursorWS) if cursorWS else None lresults, lcursor, lmore = lqry.fetch_page(5, start_cursor=lcursor) return { "results": [_to_json(ltestRun) for ltestRun in lresults], "cursor": lcursor.urlsafe() if lmore else None }
def fetch_page_async(query, page_size, start_cursor, predicate=None): """Fetches a page of Build entities.""" assert query assert isinstance(page_size, int) assert start_cursor is None or isinstance(start_cursor, basestring) curs = None if start_cursor: try: curs = ndb.Cursor(urlsafe=start_cursor) except db.BadValueError as ex: msg = 'Bad cursor "%s": %s' % (start_cursor, ex) logging.warning(msg) raise errors.InvalidInputError(msg) entities = [] skipped = 0 pages = 0 started = utils.utcnow() while len(entities) < page_size: # It is important not to request more than needed in query.fetch_page, # otherwise the cursor we return to the user skips fetched, but not returned # entities, and the user will never see them. to_fetch = page_size - len(entities) logging.debug('fetch_page: ds query: %s', query) page, curs, more = yield query.fetch_page_async(to_fetch, start_cursor=curs) pages += 1 for entity in page: if predicate and not predicate(entity): # pragma: no cover skipped += 1 continue entities.append(entity) if len(entities) >= page_size: break if not more: break logging.debug( 'fetch_page: %dms ellapsed ', (utils.utcnow() - started).total_seconds() * 1000, ) curs_str = None if more: curs_str = curs.urlsafe() raise ndb.Return(entities, curs_str)
def get(self): args = BrowseArgs(self.request.GET) try: args.validate() except ValueError as e: self.response.status = 400 logging.info(e.message) self.response.write(e.message) return query = args.build_gql_query() blobs, cursor, more = ndb.gql(query).fetch_page( page_size=PAGE_SIZE, start_cursor=ndb.Cursor(urlsafe=self.request.get('start'))) if cursor: cursor = cursor.urlsafe() context = { 'blobs': blobs, 'cursor': cursor, 'more': more, } if args.has_filter: context['filter'] = args.filter if args.has_filename_prefix: context['filename_prefix'] = args.filename_prefix if args.has_content_type: context['content_type'] = args.content_type if args.has_size_op: context['size_op'] = args.size_op if args.has_size_unit: context['size_unit'] = args.size_unit if args.has_size: context['size'] = args.size if args.has_creation_op: context['creation_op'] = args.creation_op if args.has_creation_start: context['creation_start'] = args.creation_start if args.has_creation_end: context['creation_end'] = args.creation_end else: opp_sort_dir = args.sort_dir == 'asc' and 'desc' or 'asc' context['sort_col'] = args.sort_col context['sort_dir'] = args.sort_dir context['opp_sort_dir'] = opp_sort_dir self.render_response('index.html', **context)
def get(self): tag_name = self.request.get('tag') refine = self.request.get('refine') tagq = Tags.query(Tags.user == users.get_current_user()) tag1 = tagq.filter(Tags.name == tag_name).get() tag2 = tagq.filter(Tags.name == refine).get() bmq = Bookmarks.query(Bookmarks.user == users.get_current_user()) bmq = bmq.filter(Bookmarks.tags == tag1.key) bmq = bmq.filter(Bookmarks.tags == tag2.key) bmq = bmq.order(-Bookmarks.data) c = ndb.Cursor(urlsafe=self.request.get('c')) bms, next_curs, more = bmq.fetch_page(10, start_cursor=c) if more: next_c = next_curs.urlsafe() else: next_c = None self.response.set_cookie('active-tab', 'refine') self.generate('home.html', {'bms' : bms, 'c': next_c })
def default_index(self): params = {decamelize(k): v for (k, v) in self.request.GET.iteritems()} paged = json.loads(params.pop('paged', 'true')) sort_by = params.pop('sort_by', None) limit = int(params.pop('limit', 20)) cursor = ndb.Cursor(urlsafe=params.pop('cursor', None)) visible_fields = getattr(Model, 'VISIBLE_FIELDS', None) query = Model.query() meta = {} if sort_by: descending = False if sort_by.startswith('-'): descending = True sort_by = sort_by[1:] if not hasattr(Model, decamelize(sort_by)): raise UserFacingError( "Couldn't sort by non-existent property %s" % sort_by) prop = getattr(Model, decamelize(sort_by)) if descending: query = query.order(-prop) else: query = query.order(prop) for (k, v) in params.iteritems(): if not hasattr(Model, k): raise UserFacingError( "Couldn't query on non-existent field %s" % k) query = query.filter(getattr(Model, k) == v) if not paged: results = query.fetch() else: results, next_cursor, has_more = query.fetch_page( limit, start_cursor=cursor) if has_more: meta.update({'cursor': next_cursor.urlsafe()}) items = { entity.key.id(): entity.to_dict(include=visible_fields) for entity in results } return (meta, items)
def clean_db(self): MAX_PER_BATCH = 300 entities = [Transacao, Conta, Dispositivo] for entity in entities: print "Deleting all tale related instances of ", entity.__name__ cursor = ndb.Cursor() has_more = True while has_more: instances, cursor, has_more = entity.query( entity.key >= ndb.Key(entity, self.PREFIX)).fetch_page( MAX_PER_BATCH, start_cursor=cursor) next_batch = [ instance.key for instance in instances if unicode(instance.id).startswith(self.PREFIX) ] if next_batch: ndb.delete_multi(next_batch) else: break print "Done deleting all tale related instances"
def query(self, query_definition): self.validate_query(query_definition) if query_definition.ancestor_id: ancestor_key = ndb.Key(self.ancestor_type, query_definition.ancestor_id) query = self.model_type.query(ancestor=ancestor_key) else: query = self.model_type.query() for field_filter in query_definition.field_filters: (search_property, search_value) = self.get_search_property_and_value( field_filter.field_name, field_filter.value) operator = field_filter.operator if operator == Operator.EQUALS: query = query.filter(search_property == search_value) elif operator == Operator.LESS_THAN: query = query.filter(search_property < search_value) elif operator == Operator.GREATER_THAN: query = query.filter(search_property > search_value) elif operator == Operator.LESS_THAN_OR_EQUALS: query = query.filter(search_property <= search_value) elif operator == Operator.GREATER_THAN_OR_EQUALS: query = query.filter(search_property >= search_value) else: assert False, "Invalid operator: {}".format(operator) if query_definition.order_by: order_property = self.get_search_property_and_value( query_definition.order_by.field_name, None)[0] if query_definition.order_by.ascending: query = query.order(order_property) else: query = query.order(-order_property) cursor = None if query_definition.pagination_token: cursor = ndb.Cursor(urlsafe=query_definition.pagination_token) fetch_results = query.fetch_page(query_definition.max_results, start_cursor=cursor) result_token = None if fetch_results[2]: result_token = fetch_results[1].urlsafe() return Results(fetch_results[0], result_token)
def get(self): tag_name = self.request.get('tag') tag_obj = Tags.query(Tags.user == users.get_current_user()) tag_obj = tag_obj.filter(Tags.name == tag_name).get() bmq = Bookmarks.query(Bookmarks.user == users.get_current_user()) bmq = bmq.filter(Bookmarks.tags == tag_obj.key) bmq = bmq.order(-Bookmarks.data) if tag_obj: c = ndb.Cursor(urlsafe=self.request.get('c')) bms, next_curs, more = bmq.fetch_page(10, start_cursor=c) if more: next_c = next_curs.urlsafe() else: next_c = None tagset = utils.tag_set(bmq) tagset.remove(tag_obj.key) self.response.set_cookie('active-tab', 'filter') self.generate('home.html', {'tag_obj': tag_obj, 'bms': bms, 'tags': tagset, 'c': next_c }) else: self.redirect('/')
def api_get_transactions(asset_id, transaction_type, cursor=None): app_user = get_app_user_from_asset_id(asset_id) rto = GetPaymentTransactionsResponseTO() rto.transactions = [] if transaction_type == u"confirmed": qry = get_transactions(app_user, get_token_from_asset_id(asset_id)) elif transaction_type == u"pending": qry = get_transaction_of_type_pending(app_user, get_token_from_asset_id(asset_id)) else: rto.cursor = None return rto transaction_models, new_cursor, has_more = qry.fetch_page(10, start_cursor=ndb.Cursor( urlsafe=cursor) if cursor else None) for t in transaction_models: if transaction_type == u"confirmed": trans_id = unicode(t.height) else: trans_id = unicode(t.id) to = PaymentProviderTransactionTO() to.id = trans_id to.type = u'transfer' to.name = u'Transfer %s' % trans_id to.amount = t.amount to.currency = t.token to.memo = t.memo to.timestamp = t.timestamp to.from_asset_id = get_asset_id_from_token(t.from_user, t.token) if t.from_user else None to.to_asset_id = get_asset_id_from_token(t.to_user, t.token) rto.transactions.append(to) rto.cursor = unicode(new_cursor.urlsafe()) if has_more and new_cursor else None return rto
def list_flow_runs_by_user(username, cursor, page_size): return FlowRun.list_by_user(username).fetch_page( page_size, start_cursor=ndb.Cursor(urlsafe=cursor))
def get(self, model, action): item = None items = None items_count = 0 cursor = self.request.get('cursor') next_c = None per_page = getattr(settings, "PER_PAGE", 10) item_id = self.request.GET.get("id", None) msg = self.request.GET.get("msg", None) if not model in MODELS: raise Exception("Model `%s` not registered in `models_admin.py`" % model) m = eval(model) # model template for given action path = "/admin/%s/" % model.lower() template = m.Meta.__dict__.get(action, getattr(self, action)) fields = m.Meta().fields # if no template - fallback to default one if not os.path.isfile('./templates' + path + template): path = "/admin/" # item if item_id: item = m.get_by_id(int(item_id)) if action == "u": for f in fields: f.initial = getattr(item, f.field) # list if action == "r": items = m.query() if hasattr(m.Meta(), "order_by"): items = items.order(m.Meta().order_by) items_count = items.count() # -- PAGINATE RESULTS -- cursor = ndb.Cursor(urlsafe=cursor) items, next_curs, more = items.fetch_page(per_page, start_cursor=cursor) if more: next_c = next_curs.urlsafe() else: next_c = None content = { "model": model, "action": actions_map[action], "fields": fields, "item": item, "items": items, "items_count": items_count, "cursor": next_c, "per_page": per_page, "msg": msg } self.render_template(path + template, **content)
def post(self): TRIPS_PER_PAGE = int(self.request.get("TRIPS_PER_PAGE")) # IF prev BUTTON WAS PRESSED is_prev = self.request.get("prev", "") != "" # DRIVER QUERY driverId = self.request.get("driverId", "") # DATE FROM QUERY query_date_from = datetime.strptime( self.request.get("query_date_from"), "%Y-%m-%d") # DATE TO QUERY query_date_to = datetime.strptime(self.request.get("query_date_to"), "%Y-%m-%d") # CURSOR prev_cursor_before = self.request.get('prev_cursor', default_value="") next_cursor_before = self.request.get('next_cursor', default_value="") # if a driver_id was input, use it to filter if driverId == "": query = Trip.query( ndb.AND( Trip.pickup_datetime >= query_date_from, Trip.pickup_datetime < query_date_to + timedelta(days=1))) else: driver_key = ndb.Key(urlsafe=driverId).get() # usar memcache query = Trip.query(ndb.AND( Trip.pickup_datetime >= query_date_from, Trip.pickup_datetime < query_date_to + timedelta(days=1)), ancestor=driver_key) # --------- PAGE MANAGEMENT query_forward = query.order(Trip.pickup_datetime) query_reverse = query.order(-Trip.pickup_datetime) if is_prev: qry = query_reverse cursor = ndb.Cursor(urlsafe=prev_cursor_before).reversed( ) if prev_cursor_before != "" else None else: qry = query_forward cursor = ndb.Cursor(urlsafe=next_cursor_before ) if next_cursor_before != "" else None # trips in page query_results, cursor, more = qry.fetch_page(TRIPS_PER_PAGE, start_cursor=cursor) # get driver info for each trip results = [] drivers = {} for result in query_results: taxiKey = result.key.parent() if taxiKey.urlsafe() not in drivers: taxi = memcache.get(taxiKey.urlsafe()) if taxi is None: taxi = taxiKey.get() if taxi is None: logging.error( "Received trip exists for taxiKey: %s, but taxi doesn't exist", trip_id) taxi = {"driver_name": "MISSING"} else: memcache.add(taxiKey.urlsafe(), taxi) drivers[taxiKey.urlsafe()] = taxi.driver_name total_result = { "driver": drivers[taxiKey.urlsafe()], "trip": result } results.append(total_result) if is_prev: prev_cursor_url = cursor.reversed().urlsafe() if more else "" next_cursor_url = prev_cursor_before else: prev_cursor_url = next_cursor_before next_cursor_url = cursor.urlsafe() if more else "" # --------- template_values = { 'results': results, 'query_date_from': query_date_from, 'query_date_to': query_date_to, 'driverId': driverId, 'prev_cursor': prev_cursor_url, 'next_cursor': next_cursor_url, 'TRIPS_PER_PAGE': TRIPS_PER_PAGE } template = JINJA_ENVIRONMENT.get_template('admin-query-index.html') self.response.write(template.render(template_values))
def post(self): # Create instance variable to track if parameters came from a direct request # Or if they came through Period entity self.params_from_request = None params = None s = "Version: %s\n" % __version__ s += "Arguments from POST:" for arg in self.request.arguments(): s += '\n%s:%s' % (arg, self.request.get(arg)) logging.info(s) # Try to get period from the request in case GitHubStore was called directly self.period = self.request.get("period", None) # If real period not in request, try to get parameters from StatsRun entity # in case GetEvents was called from a previous task. if self.period is None or len(self.period)==0: run_key = ndb.Key("StatsRun", 5759180434571264) run_entity = run_key.get() self.period = run_entity.period self.params_from_request = False s = "Version: %s\n" % __version__ s += "Period %s determined from StatsRun entity: %s" % (self.period, params) logging.info(s) else: self.params_from_request = True s = "Version: %s\n" % __version__ s += "Period %s determined from request: %s" % (self.period, self.request) logging.info(s) if self.period is None or len(self.period)==0: self.error(400) resp = { "status": "error", "message": "Period parameter was not provided." } s = "Version: %s\n" % __version__ s += "%s" % resp logging.error(s) self.response.write(json.dumps(resp)+"\n") return # If Period not already stored, halt period_key = ndb.Key("Period", self.period) period_entity = period_key.get() if not period_entity: self.error(400) resp = { "status": "error", "message": "Provided period does not exist in datastore", "data": { "period": self.period } } logging.error(resp) self.response.write(json.dumps(resp)+"\n") return # Get the remaining parameters based on the parameter source if self.params_from_request == True: # Get parameters from request # 'testing' parameter try: self.testing = self.request.get('testing').lower() == 'true' except Exception: # default value for 'testing' if not provided is False self.testing = False # 'gbifdatasetid' parameter try: self.gbifdatasetid = self.request.get('gbifdatasetid').lower() except Exception: # default value for 'gbifdatasetid' if not provided is None self.gbifdatasetid = None else: # Get parameters from Period entity # 'testing' parameter try: self.testing = period_entity.testing except Exception: self.testing = False # 'gbifdatasetid' parameter can't be used when called from another task # Prepare list of reports to create issues for # Base query reports_q = Report.query() # Only Reports for current Period reports_q = reports_q.filter(Report.reported_period == period_key) # Only those with 'issue_sent' property set to False reports_q = reports_q.filter(Report.issue_sent == False) # Only those with 'report_stored' property set to True reports_q = reports_q.filter(Report.stored == True) # And if there is a gbifdatasetid, filter on that too if self.gbifdatasetid is not None and len(self.gbifdatasetid) > 0: dataset_key = ndb.Key("Dataset", self.gbifdatasetid) if dataset_key is None: s = "Version: %s\n" % __version__ s += "gbifdatasetid %s not found in data store." % self.gbifdatasetid logging.error(s) return else: reports_q = reports_q.filter(Report.reported_resource == dataset_key) # Store final query reports_query = reports_q s = "Version: %s\n" % __version__ s += "Found %d Reports to send issues for " % reports_query.count() s += "from query %s" % reports_query logging.info(s) # Get cursor from request, if any cursor_str = self.request.get("cursor", None) cursor = None if cursor_str: cursor = ndb.Cursor(urlsafe=cursor_str) s = "Version: %s\n" % __version__ s += "Cursor built: %s" % cursor logging.info(s) # Initialize loop if reports_query.count==0: more = False else: more = True # Loop until DeadlineExceededError # or until there are no more reports left to store try: # Keep track of dataset for which Reports have been stored in this run datasets = [] while more is True: s = "Version: %s\n" % __version__ s += "Issuing query: %s" % reports_query logging.info(s) # Get next (or first) round of results report, new_cursor, more = reports_query.fetch_page( PAGE_SIZE, start_cursor=cursor ) # Check to see if there is actually another report if report is not None and len(report) != 0: # Send issue self.send_issue(report[0]) gbifdatasetid = report[0].reported_resource.id() datasets.append(gbifdatasetid) if more is True: cursor = new_cursor s = "Version: %s\n" % __version__ s += "Finished creating all %d issues" % len(datasets) logging.info(s) resp = { "status": "success", "message": s, } period_entity.status = "done" mail.send_mail( sender=EMAIL_SENDER, to=EMAIL_ADMINS, subject="Usage reports for period %s" % self.period, body=""" Hey there! Just a note to let you know the GitHubIssue process for period %s stats has successfully finished. Reports have been stored in their respective GitHub repositories and issues have been created. Issues submitted for (%d) datasets: %s Code version: %s """ % (self.period, len(datasets), datasets, __version__) ) # In any case, store period data, show message and finish period_entity.put() s = "Version: %s\n" % __version__ s += "Response: %s" % resp logging.info(s) self.response.write(json.dumps(resp)+"\n") return except DeadlineExceededError: # Launch new instance with current (failed) cursor taskqueue.add(url=URI_GITHUB_ISSUE, params={"cursor": cursor.urlsafe()}, queue_name=QUEUENAME) s = "Version: %s\n" % __version__ s += "Caught a DeadlineExceededError. Relaunching." logging.info(s) resp = { "status": "in progress", "message": s, "data": { "period": self.period, "cursor": cursor.urlsafe() } } logging.info(resp) self.response.write(json.dumps(resp)+"\n") return